Update web-platform-tests to revision 7ed322c3132993bcb5734702b40621448670fc76

This commit is contained in:
WPT Sync Bot 2019-12-24 08:23:56 +00:00
parent 10fa5fa68a
commit 110ca49f65
52 changed files with 1682 additions and 485 deletions

View file

@ -7,7 +7,7 @@
expected: FAIL
[Opening a blob URL in a new window immediately before revoking it works.]
expected: TIMEOUT
expected: FAIL
[Opening a blob URL in a noopener about:blank window immediately before revoking it works.]
expected: FAIL

View file

@ -280382,6 +280382,15 @@
"native-file-system/script-tests/FileSystemFileHandle-getFile.js": [
[]
],
"native-file-system/script-tests/FileSystemWritableFileStream-piped.js": [
[]
],
"native-file-system/script-tests/FileSystemWritableFileStream-write.js": [
[]
],
"native-file-system/script-tests/FileSystemWritableFileStream.js": [
[]
],
"native-file-system/script-tests/FileSystemWriter.js": [
[]
],
@ -374719,6 +374728,12 @@
{}
]
],
"loading/lazyload/move-element-and-scroll.tentative.html": [
[
"loading/lazyload/move-element-and-scroll.tentative.html",
{}
]
],
"loading/lazyload/not-rendered-below-viewport-image-loading-lazy.tentative.html": [
[
"loading/lazyload/not-rendered-below-viewport-image-loading-lazy.tentative.html",
@ -374767,6 +374782,12 @@
{}
]
],
"loading/lazyload/remove-element-and-scroll.tentative.html": [
[
"loading/lazyload/remove-element-and-scroll.tentative.html",
{}
]
],
"loading/preloader-css-import-no-quote.tentative.html": [
[
"loading/preloader-css-import-no-quote.tentative.html",
@ -379592,6 +379613,81 @@
}
]
],
"native-file-system/native_FileSystemWritableFileStream-piped.tentative.https.manual.window.js": [
[
"native-file-system/native_FileSystemWritableFileStream-piped.tentative.https.manual.window.html",
{
"script_metadata": [
[
"script",
"/resources/testdriver.js"
],
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/native-fs-test-helpers.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream-piped.js"
]
]
}
]
],
"native-file-system/native_FileSystemWritableFileStream-write.tentative.https.manual.window.js": [
[
"native-file-system/native_FileSystemWritableFileStream-write.tentative.https.manual.window.html",
{
"script_metadata": [
[
"script",
"/resources/testdriver.js"
],
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/native-fs-test-helpers.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream-write.js"
]
]
}
]
],
"native-file-system/native_FileSystemWritableFileStream.tentative.https.manual.window.js": [
[
"native-file-system/native_FileSystemWritableFileStream.tentative.https.manual.window.html",
{
"script_metadata": [
[
"script",
"/resources/testdriver.js"
],
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/native-fs-test-helpers.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream.js"
]
]
}
]
],
"native-file-system/native_FileSystemWriter.tentative.https.manual.window.js": [
[
"native-file-system/native_FileSystemWriter.tentative.https.manual.window.html",
@ -379971,6 +380067,134 @@
}
]
],
"native-file-system/sandboxed_FileSystemWritableFileStream-piped.tentative.https.any.js": [
[
"native-file-system/sandboxed_FileSystemWritableFileStream-piped.tentative.https.any.html",
{
"script_metadata": [
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/sandboxed-fs-test-helpers.js"
],
[
"script",
"../streams/resources/recording-streams.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream-piped.js"
]
]
}
],
[
"native-file-system/sandboxed_FileSystemWritableFileStream-piped.tentative.https.any.worker.html",
{
"script_metadata": [
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/sandboxed-fs-test-helpers.js"
],
[
"script",
"../streams/resources/recording-streams.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream-piped.js"
]
]
}
]
],
"native-file-system/sandboxed_FileSystemWritableFileStream-write.tentative.https.any.js": [
[
"native-file-system/sandboxed_FileSystemWritableFileStream-write.tentative.https.any.html",
{
"script_metadata": [
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/sandboxed-fs-test-helpers.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream-write.js"
]
]
}
],
[
"native-file-system/sandboxed_FileSystemWritableFileStream-write.tentative.https.any.worker.html",
{
"script_metadata": [
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/sandboxed-fs-test-helpers.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream-write.js"
]
]
}
]
],
"native-file-system/sandboxed_FileSystemWritableFileStream.tentative.https.any.js": [
[
"native-file-system/sandboxed_FileSystemWritableFileStream.tentative.https.any.html",
{
"script_metadata": [
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/sandboxed-fs-test-helpers.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream.js"
]
]
}
],
[
"native-file-system/sandboxed_FileSystemWritableFileStream.tentative.https.any.worker.html",
{
"script_metadata": [
[
"script",
"resources/test-helpers.js"
],
[
"script",
"resources/sandboxed-fs-test-helpers.js"
],
[
"script",
"script-tests/FileSystemWritableFileStream.js"
]
]
}
]
],
"native-file-system/sandboxed_FileSystemWriter.tentative.https.any.js": [
[
"native-file-system/sandboxed_FileSystemWriter.tentative.https.any.html",
@ -671141,6 +671365,10 @@
"f7d887b18a228e648a5de45b890bfce371963cec",
"testharness"
],
"loading/lazyload/move-element-and-scroll.tentative.html": [
"f9d89807b8c5575982e4d28f156cb604ae0bbd8b",
"testharness"
],
"loading/lazyload/not-rendered-below-viewport-image-loading-lazy.tentative.html": [
"0c40d7dbcd832b379a3d8427f9390fca842439cd",
"testharness"
@ -671173,6 +671401,10 @@
"58f8c3a4d5a1e21ce2afd9def3ab9b5870cc272f",
"testharness"
],
"loading/lazyload/remove-element-and-scroll.tentative.html": [
"53708ceac75fa3421c4ecc1a8b91034c5a91c396",
"testharness"
],
"loading/lazyload/resources/image.png": [
"b712825093805d1052b01047b1dbb102f0af8f0f",
"support"
@ -676393,6 +676625,18 @@
"16c68c59b273663fb16847f121f38e03bb94cf19",
"testharness"
],
"native-file-system/native_FileSystemWritableFileStream-piped.tentative.https.manual.window.js": [
"2c0299df12e9577261cce2f68d12a1bf744158c2",
"testharness"
],
"native-file-system/native_FileSystemWritableFileStream-write.tentative.https.manual.window.js": [
"0efacf9e607d990bccf185b7d84d5f45220338d8",
"testharness"
],
"native-file-system/native_FileSystemWritableFileStream.tentative.https.manual.window.js": [
"caf6fbd1c52a3cef603896f8ee98d9618e4dc92e",
"testharness"
],
"native-file-system/native_FileSystemWriter.tentative.https.manual.window.js": [
"25d8ee995857fa67be2c41f048e882ec473f739d",
"testharness"
@ -676485,6 +676729,18 @@
"fb93858fe7934b27244fa0ff828eac75c34b6629",
"testharness"
],
"native-file-system/sandboxed_FileSystemWritableFileStream-piped.tentative.https.any.js": [
"eed6a561dc40e658b6b6c8d51766cdacc42a024c",
"testharness"
],
"native-file-system/sandboxed_FileSystemWritableFileStream-write.tentative.https.any.js": [
"7ef0ea0ef82626eae74f152b94f898859aca6832",
"testharness"
],
"native-file-system/sandboxed_FileSystemWritableFileStream.tentative.https.any.js": [
"16dbbe6a808a603c9b81482f733dcf09e84670ff",
"testharness"
],
"native-file-system/sandboxed_FileSystemWriter.tentative.https.any.js": [
"8352e2487fe0823a2d353372757d833d85e98c4b",
"testharness"
@ -676525,6 +676781,18 @@
"6b7d9f9a3171c96aaa2e1312451b3a9cac6c2e9b",
"support"
],
"native-file-system/script-tests/FileSystemWritableFileStream-piped.js": [
"59fc1e3d1362b3ec90e3ef7a4f0981bdd4a21340",
"support"
],
"native-file-system/script-tests/FileSystemWritableFileStream-write.js": [
"f14d79fd040a0e7fc7f19216fec1aec0ca23dec8",
"support"
],
"native-file-system/script-tests/FileSystemWritableFileStream.js": [
"5acf93aec8cee8ef3a8389979e2a402acf5cbc7f",
"support"
],
"native-file-system/script-tests/FileSystemWriter.js": [
"1c51d0b4eb771eec5a606263e72719be589aa317",
"support"
@ -710978,7 +711246,7 @@
"support"
],
"tools/requirements_mypy.txt": [
"50afed2bb2dec184caee69a1074e428d115c5b40",
"f3a56a6dd715ac6d6b01057a609a3e470ba8e303",
"support"
],
"tools/runner/css/bootstrap-theme.min.css": [
@ -715338,7 +715606,7 @@
"support"
],
"tools/wptrunner/wptrunner/browsers/base.py": [
"655344581973c15b657729feb736dd2073d79a30",
"fef052dd5ab495dad4342e086491723c761edcfd",
"support"
],
"tools/wptrunner/wptrunner/browsers/chrome.py": [
@ -715366,7 +715634,7 @@
"support"
],
"tools/wptrunner/wptrunner/browsers/epiphany.py": [
"8a0e5f578b2feb871ddd50ab08168c6b9d45e4b0",
"f6c4c602a38c043637cf9dbc8bbb5350ec94527e",
"support"
],
"tools/wptrunner/wptrunner/browsers/firefox.py": [
@ -715410,11 +715678,11 @@
"support"
],
"tools/wptrunner/wptrunner/browsers/webkit.py": [
"aa2862139450f682e6051c5f617ab8caf1960784",
"1be683ed26e43a9e10a1f82520811c33e90dad05",
"support"
],
"tools/wptrunner/wptrunner/browsers/webkitgtk_minibrowser.py": [
"d735f3c0792a6356c6270143205c55b7c02eef47",
"d8b9744bd743e39ce19f008e7f40cbc80e80107c",
"support"
],
"tools/wptrunner/wptrunner/config.py": [
@ -715422,7 +715690,7 @@
"support"
],
"tools/wptrunner/wptrunner/environment.py": [
"e79ae3750647f88bba44703181c208229800f658",
"7dcea4aeafabb9c481dccdc1aaf9bd153572bb7d",
"support"
],
"tools/wptrunner/wptrunner/executors/__init__.py": [
@ -715466,7 +715734,7 @@
"support"
],
"tools/wptrunner/wptrunner/executors/executorservo.py": [
"9cef1fb2d7f78e8729e51ff400b3df2402a2e94e",
"9eebfa59febf991bd41db25f2b02ea4c8c00195e",
"support"
],
"tools/wptrunner/wptrunner/executors/executorservodriver.py": [
@ -715522,7 +715790,7 @@
"support"
],
"tools/wptrunner/wptrunner/expectedtree.py": [
"4d505086bd8d991c953c34d38d8e1fa0cb920f99",
"7521f25b1344f5c50b8182a2b7a68278858f9b47",
"support"
],
"tools/wptrunner/wptrunner/font.py": [
@ -715550,23 +715818,23 @@
"support"
],
"tools/wptrunner/wptrunner/manifestexpected.py": [
"eae85b1f6661a67dff59ef78a9ed627e3e3603d8",
"65b53f0ab97b581b2b71277bd8f260f79a1afb12",
"support"
],
"tools/wptrunner/wptrunner/manifestinclude.py": [
"d302831a57abbaadd75fe49e094482dc14223ea3",
"b3ab2c0776571ffe4ca49e599e0a898c4a7c79a3",
"support"
],
"tools/wptrunner/wptrunner/manifestupdate.py": [
"2f2a8d543352d4c2128afd0314e8261668964d01",
"3cb1b5107924c4d897efccaf8f5de93df2311609",
"support"
],
"tools/wptrunner/wptrunner/metadata.py": [
"bf4d7a558abb7da117c95f896c1c8c3babd77219",
"aafc7d52250f62fdcd7025858f2273290c77d49e",
"support"
],
"tools/wptrunner/wptrunner/products.py": [
"e3117042709f99f4e0443dc0dfaf561ad0b548b0",
"abd84094bb33dbd13b3594a7acbe8467512a99ce",
"support"
],
"tools/wptrunner/wptrunner/stability.py": [
@ -715598,7 +715866,7 @@
"support"
],
"tools/wptrunner/wptrunner/testloader.py": [
"fa54ca361576318cb35ec716a1a159bdb532e6c8",
"f16cc14ceada70f781a2aaabb1c2f547cb9d61d1",
"support"
],
"tools/wptrunner/wptrunner/testrunner.py": [
@ -715626,7 +715894,7 @@
"support"
],
"tools/wptrunner/wptrunner/tests/test_expectedtree.py": [
"2308be9590e9004f41a492682d187a7b4fc57231",
"d71237a42dad58c69b686e6040b65f40064c9437",
"support"
],
"tools/wptrunner/wptrunner/tests/test_formatters.py": [
@ -715634,7 +715902,7 @@
"support"
],
"tools/wptrunner/wptrunner/tests/test_manifestexpected.py": [
"525915d1832ac8af1957a799615969e058eefca5",
"f3e4ce796a45c472a88fe022277e0347b4e98948",
"support"
],
"tools/wptrunner/wptrunner/tests/test_products.py": [
@ -715646,15 +715914,15 @@
"support"
],
"tools/wptrunner/wptrunner/tests/test_testloader.py": [
"e857cd43db6d281f95414230f52c984aad915118",
"836003d106038ab4303035eecca31774c9a26ce1",
"support"
],
"tools/wptrunner/wptrunner/tests/test_update.py": [
"5ed366788067c94245a8f7e1dadccb93da0493ab",
"a24e4a733dae576bf82f5b6e17a7bbe3f6d351ef",
"support"
],
"tools/wptrunner/wptrunner/tests/test_wpttest.py": [
"9bb3e1fd34e37c4430f752b87e4bb4e3bfa7f959",
"1a94a2f3303a7b5a1d5b2c553af6bbd1d8b45bc7",
"support"
],
"tools/wptrunner/wptrunner/update/__init__.py": [
@ -715670,7 +715938,7 @@
"support"
],
"tools/wptrunner/wptrunner/update/state.py": [
"64dbf1180604cd8df7a468e036447f95b80371b0",
"f8a83525cbd4706bdfbc99a518d2bac123d34e96",
"support"
],
"tools/wptrunner/wptrunner/update/sync.py": [
@ -715694,11 +715962,11 @@
"support"
],
"tools/wptrunner/wptrunner/wptcommandline.py": [
"923bdaa55857e793e8d6e8c587167360aed4ae94",
"91f1161b01b99f31ef1d3dde05333627bf4365b2",
"support"
],
"tools/wptrunner/wptrunner/wptlogging.py": [
"69cee744879eb6780cb99ec93123922e166d9e16",
"444d1d962d25873109977b937d96c86cb293cd8f",
"support"
],
"tools/wptrunner/wptrunner/wptmanifest/__init__.py": [
@ -715758,11 +716026,11 @@
"support"
],
"tools/wptrunner/wptrunner/wptrunner.py": [
"cac172a5940ec696e3a3b279170f9712aaf4668d",
"7409dc26560af0be3a397bb73184137d8715811f",
"support"
],
"tools/wptrunner/wptrunner/wpttest.py": [
"67b57583424d08779114dc2fa030390e584d716f",
"4cbaedcdb0163776dfcf55d13afca4cdba05dc92",
"support"
],
"tools/wptserve/.gitignore": [
@ -721278,7 +721546,7 @@
"testharness"
],
"wasm/jsapi/constructor/multi-value.any.js": [
"7fbac5b24f1a50568170e257552bdc7a7783ae25",
"2c53e3611e044c8ba44a5c0436d4409bc9f1af42",
"testharness"
],
"wasm/jsapi/constructor/validate.any.js": [
@ -721326,7 +721594,7 @@
"testharness"
],
"wasm/jsapi/instanceTestFactory.js": [
"7ccf06c234a7ce5c17353f3bd74565b66c289952",
"c81672f208b1505430dd1ee909afaf12d9b2db20",
"support"
],
"wasm/jsapi/interface.any.js": [
@ -721354,7 +721622,7 @@
"testharness"
],
"wasm/jsapi/module/customSections.any.js": [
"8e9732e5512d3295c445c110e949905cec0efbe6",
"09355979d84ade5385e7b3a5ac265eaa0da500cf",
"testharness"
],
"wasm/jsapi/module/exports.any.js": [
@ -721394,7 +721662,7 @@
"testharness"
],
"wasm/jsapi/wasm-module-builder.js": [
"09ff891f52e2b4e9dd80fbc88586129cd0a910b6",
"82c6e04135f1b86df1a1d8e72c5f829c3297bb10",
"support"
],
"wasm/resources/load_wasm.js": [

View file

@ -1,2 +1,2 @@
[no-transition-from-ua-to-blocking-stylesheet.html]
expected: FAIL
expected: TIMEOUT

View file

@ -309,24 +309,21 @@
[<iframe>: separate response Content-Type: */* text/html]
expected: FAIL
[<iframe>: combined response Content-Type: text/html;charset=gbk text/plain text/html]
expected: FAIL
[<iframe>: combined response Content-Type: text/html;" \\" text/plain]
expected: FAIL
[<iframe>: combined response Content-Type: text/html;" text/plain]
expected: FAIL
[<iframe>: separate response Content-Type: text/plain */*]
expected: FAIL
[<iframe>: combined response Content-Type: text/html */*]
expected: FAIL
[<iframe>: combined response Content-Type: text/html */*;charset=gbk]
expected: FAIL
[<iframe>: combined response Content-Type: text/html;x=" text/plain]
expected: FAIL
[<iframe>: combined response Content-Type: */* text/html]
expected: FAIL
[<iframe>: separate response Content-Type: text/html;" text/plain]
expected: FAIL
[<iframe>: separate response Content-Type: text/plain */*;charset=gbk]
expected: FAIL

View file

@ -56,6 +56,3 @@
[separate text/javascript x/x]
expected: FAIL
[separate text/javascript;charset=windows-1252 error text/javascript]
expected: FAIL

View file

@ -11,6 +11,6 @@
[X-Content-Type-Options%3A%20nosniff%0C]
expected: FAIL
[X-Content-Type-Options%3A%20%40%23%24%23%25%25%26%5E%26%5E*()()11!%2Cnosniff]
[X-Content-Type-Options%3A%20'NosniFF']
expected: FAIL

View file

@ -1,6 +1,6 @@
[embedded-credentials.tentative.sub.html]
type: testharness
expected: TIMEOUT
expected: CRASH
[Embedded credentials are treated as network errors.]
expected: FAIL

View file

@ -0,0 +1,4 @@
[traverse_the_history_1.html]
[Multiple history traversals from the same task]
expected: FAIL

View file

@ -1,4 +1,8 @@
[skip-document-with-fragment.html]
expected: TIMEOUT
[Autofocus elements in iframed documents with URL fragments should be skipped.]
expected: FAIL
[Autofocus elements in top-level browsing context's documents with URI fragments should be skipped.]
expected: TIMEOUT

View file

@ -1,16 +1,20 @@
[supported-elements.html]
expected: TIMEOUT
[Contenteditable element should support autofocus]
expected: FAIL
[Element with tabindex should support autofocus]
expected: FAIL
expected: TIMEOUT
[Host element with delegatesFocus including no focusable descendants should be skipped]
expected: FAIL
expected: NOTRUN
[Area element should support autofocus]
expected: FAIL
expected: NOTRUN
[Host element with delegatesFocus should support autofocus]
expected: FAIL
expected: NOTRUN
[Non-HTMLElement should not support autofocus]
expected: NOTRUN

View file

@ -1,4 +1,5 @@
[crossorigin-sandwich-TAO.sub.html]
expected: ERROR
[There should be one entry.]
expected: FAIL

View file

@ -1,4 +1,5 @@
[realtimeanalyser-fft-scaling.html]
expected: TIMEOUT
[X 2048-point FFT peak position is not equal to 64. Got 0.]
expected: FAIL

View file

@ -1,5 +1,4 @@
[005.html]
expected: ERROR
[dedicated worker in shared worker in dedicated worker]
expected: FAIL

View file

@ -0,0 +1,40 @@
<!DOCTYPE html>
<head>
<title>Images with loading='lazy' load being moved to another document
and then scrolled to</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="common.js"></script>
</head>
<!--
Marked as tentative until https://github.com/whatwg/html/pull/3752 is landed.
-->
<body>
<div id="tall_div" style="height:1000vh"></div>
<div id="below_viewport_div"></div>
<img id="below_viewport" src='resources/image.png?below_viewport' loading="lazy">
<script>
const tall_div = document.getElementById("tall_div");
const below_viewport_element = document.getElementById("below_viewport");
const below_viewport_div = document.getElementById("below_viewport_div");
async_test(function(t) {
below_viewport_element.onload =
t.unreached_func("The below viewport image should not load");
t.step_timeout(t.step_func_done(), 1000);
const iframe = document.createElement('iframe');
iframe.setAttribute("style", "display:none");
iframe.srcdoc = "<body></body>";
iframe.onload = () => {
const adopted_img = iframe.contentDocument.adoptNode(below_viewport_element);
iframe.contentDocument.body.appendChild(adopted_img);
below_viewport_div.scrollIntoView();
};
document.body.insertBefore(iframe, tall_div);
}, "Test that <img> below viewport is not loaded when moved to another " +
"document and then scrolled to");
</script>
</body>

View file

@ -0,0 +1,36 @@
<!DOCTYPE html>
<head>
<title>Images with loading='lazy' load being removed and then scrolled to</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="common.js"></script>
</head>
<!--
Marked as tentative until https://github.com/whatwg/html/pull/3752 is landed.
-->
<body>
<img id="in_viewport" src='resources/image.png?in_viewport&pipe=trickle(d1)'>
<div style="height:1000vh"></div>
<div id="below_viewport_div"></div>
<img id="below_viewport" src='resources/image.png?below_viewport' loading="lazy">
<script>
const in_viewport_element = document.getElementById("in_viewport");
const below_viewport_element = document.getElementById("below_viewport");
const below_viewport_div = document.getElementById("below_viewport_div");
async_test(t => {
below_viewport_element.onload = t.unreached_func("Removed loading=lazy image " +
"should not load when its old position is scrolled to.");
below_viewport_element.remove();
in_viewport_element.onload = () => {
below_viewport_div.scrollIntoView();
t.step_timeout(t.step_func_done(), 2000);
};
}, "Test that <img> below viewport is not loaded when removed from the " +
"document and then scrolled to");
</script>
</body>

View file

@ -0,0 +1,4 @@
// META: script=/resources/testdriver.js
// META: script=resources/test-helpers.js
// META: script=resources/native-fs-test-helpers.js
// META: script=script-tests/FileSystemWritableFileStream-piped.js

View file

@ -0,0 +1,4 @@
// META: script=/resources/testdriver.js
// META: script=resources/test-helpers.js
// META: script=resources/native-fs-test-helpers.js
// META: script=script-tests/FileSystemWritableFileStream-write.js

View file

@ -0,0 +1,4 @@
// META: script=/resources/testdriver.js
// META: script=resources/test-helpers.js
// META: script=resources/native-fs-test-helpers.js
// META: script=script-tests/FileSystemWritableFileStream.js

View file

@ -0,0 +1,4 @@
// META: script=resources/test-helpers.js
// META: script=resources/sandboxed-fs-test-helpers.js
// META: script=../streams/resources/recording-streams.js
// META: script=script-tests/FileSystemWritableFileStream-piped.js

View file

@ -0,0 +1,3 @@
// META: script=resources/test-helpers.js
// META: script=resources/sandboxed-fs-test-helpers.js
// META: script=script-tests/FileSystemWritableFileStream-write.js

View file

@ -0,0 +1,3 @@
// META: script=resources/test-helpers.js
// META: script=resources/sandboxed-fs-test-helpers.js
// META: script=script-tests/FileSystemWritableFileStream.js

View file

@ -0,0 +1,135 @@
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'foo_string.txt', root);
const wfs = await handle.createWritable();
const rs = recordingReadableStream({
start(controller) {
controller.enqueue('foo_string');
controller.close();
}
});
await rs.pipeTo(wfs, { preventCancel: true });
assert_equals(await getFileContents(handle), 'foo_string');
assert_equals(await getFileSize(handle), 10);
}, 'can be piped to with a string');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'foo_arraybuf.txt', root);
const wfs = await handle.createWritable();
const buf = new ArrayBuffer(3);
const intView = new Uint8Array(buf);
intView[0] = 0x66;
intView[1] = 0x6f;
intView[2] = 0x6f;
const rs = recordingReadableStream({
start(controller) {
controller.enqueue(buf);
controller.close();
}
});
await rs.pipeTo(wfs, { preventCancel: true });
assert_equals(await getFileContents(handle), 'foo');
assert_equals(await getFileSize(handle), 3);
}, 'can be piped to with an ArrayBuffer');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'foo_blob.txt', root);
const wfs = await handle.createWritable();
const rs = recordingReadableStream({
start(controller) {
controller.enqueue(new Blob(['foo']));
controller.close();
}
});
await rs.pipeTo(wfs, { preventCancel: true });
assert_equals(await getFileContents(handle), 'foo');
assert_equals(await getFileSize(handle), 3);
}, 'can be piped to with a Blob');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'foo_write_param.txt', root);
const wfs = await handle.createWritable();
const rs = recordingReadableStream({
start(controller) {
controller.enqueue({type: 'write', data: 'foobar'});
controller.close();
}
});
await rs.pipeTo(wfs, { preventCancel: true });
assert_equals(await getFileContents(handle), 'foobar');
assert_equals(await getFileSize(handle), 6);
}, 'can be piped to with a param object with write command');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'foo_write_param.txt', root);
const wfs = await handle.createWritable();
const rs = recordingReadableStream({
start(controller) {
controller.enqueue({type: 'write', data: 'foobar'});
controller.enqueue({type: 'truncate', size: 10});
controller.enqueue({type: 'write', position: 0, data: 'baz'});
controller.close();
}
});
await rs.pipeTo(wfs, { preventCancel: true });
assert_equals(await getFileContents(handle), 'bazbar\0\0\0\0');
assert_equals(await getFileSize(handle), 10);
}, 'can be piped to with a param object with multiple commands');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'foo_write_queued.txt', root);
const wfs = await handle.createWritable();
const rs = recordingReadableStream({
start(controller) {
controller.enqueue('foo');
controller.enqueue('bar');
controller.enqueue('baz');
controller.close();
}
});
await rs.pipeTo(wfs, { preventCancel: true });
assert_equals(await getFileContents(handle), 'foobarbaz');
assert_equals(await getFileSize(handle), 9);
}, 'multiple operations can be queued');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'fetched.txt', root);
const wfs = await handle.createWritable();
const response = await fetch('data:text/plain,fetched from far');
const body = await response.body;
await body.pipeTo(wfs, { preventCancel: true });
assert_equals(await getFileContents(handle), 'fetched from far');
assert_equals(await getFileSize(handle), 16);
}, 'plays well with fetch');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'aborted should_be_empty.txt', root);
const wfs = await handle.createWritable();
const response = await fetch('data:text/plain,fetched from far');
const body = await response.body;
const abortController = new AbortController();
const signal = abortController.signal;
const promise = body.pipeTo(wfs, { signal });
await abortController.abort();
await promise_rejects(t, 'AbortError', promise, 'stream is aborted');
await promise_rejects(t, TypeError(), wfs.close(), 'stream cannot be closed to flush writes');
assert_equals(await getFileContents(handle), '');
assert_equals(await getFileSize(handle), 0);
}, 'abort() aborts write');

View file

@ -0,0 +1,337 @@
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'empty_blob', root);
const stream = await handle.createWritable();
await stream.write(new Blob([]));
await stream.close();
assert_equals(await getFileContents(handle), '');
assert_equals(await getFileSize(handle), 0);
}, 'write() with an empty blob to an empty file');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'valid_blob', root);
const stream = await handle.createWritable();
await stream.write(new Blob(['1234567890']));
await stream.close();
assert_equals(await getFileContents(handle), '1234567890');
assert_equals(await getFileSize(handle), 10);
}, 'write() a blob to an empty file');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'write_param_empty', root);
const stream = await handle.createWritable();
await stream.write({type: 'write', data: '1234567890'});
await stream.close();
assert_equals(await getFileContents(handle), '1234567890');
assert_equals(await getFileSize(handle), 10);
}, 'write() with WriteParams without position to an empty file');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'string_zero_offset', root);
const stream = await handle.createWritable();
await stream.write({type: 'write', position: 0, data: '1234567890'});
await stream.close();
assert_equals(await getFileContents(handle), '1234567890');
assert_equals(await getFileSize(handle), 10);
}, 'write() a string to an empty file with zero offset');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'blob_zero_offset', root);
const stream = await handle.createWritable();
await stream.write({type: 'write', position: 0, data: new Blob(['1234567890'])});
await stream.close();
assert_equals(await getFileContents(handle), '1234567890');
assert_equals(await getFileSize(handle), 10);
}, 'write() a blob to an empty file with zero offset');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'write_appends', root);
const stream = await handle.createWritable();
await stream.write('12345');
await stream.write('67890');
await stream.close();
assert_equals(await getFileContents(handle), '1234567890');
assert_equals(await getFileSize(handle), 10);
}, 'write() called consecutively appends');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'write_appends_object_string', root);
const stream = await handle.createWritable();
await stream.write('12345');
await stream.write({type: 'write', data: '67890'});
await stream.close();
assert_equals(await getFileContents(handle), '1234567890');
assert_equals(await getFileSize(handle), 10);
}, 'write() WriteParams without position and string appends');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'write_appends_object_blob', root);
const stream = await handle.createWritable();
await stream.write('12345');
await stream.write({type: 'write', data: new Blob(['67890'])});
await stream.close();
assert_equals(await getFileContents(handle), '1234567890');
assert_equals(await getFileSize(handle), 10);
}, 'write() WriteParams without position and blob appends');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'string_with_offset', root);
const stream = await handle.createWritable();
await stream.write('1234567890');
await stream.write({type: 'write', position: 4, data: 'abc'});
await stream.close();
assert_equals(await getFileContents(handle), '1234abc890');
assert_equals(await getFileSize(handle), 10);
}, 'write() called with a string and a valid offset');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'blob_with_offset', root);
const stream = await handle.createWritable();
await stream.write('1234567890');
await stream.write({type: 'write', position: 4, data: new Blob(['abc'])});
await stream.close();
assert_equals(await getFileContents(handle), '1234abc890');
assert_equals(await getFileSize(handle), 10);
}, 'write() called with a blob and a valid offset');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'bad_offset', root);
const stream = await handle.createWritable();
await promise_rejects(
t, 'InvalidStateError', stream.write({type: 'write', position: 4, data: new Blob(['abc'])}));
await promise_rejects(
t, TypeError(), stream.close(), 'stream is already closed');
assert_equals(await getFileContents(handle), '');
assert_equals(await getFileSize(handle), 0);
}, 'write() called with an invalid offset');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'empty_string', root);
const stream = await handle.createWritable();
await stream.write('');
await stream.close();
assert_equals(await getFileContents(handle), '');
assert_equals(await getFileSize(handle), 0);
}, 'write() with an empty string to an empty file');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'valid_utf8_string', root);
const stream = await handle.createWritable();
await stream.write('foo🤘');
await stream.close();
assert_equals(await getFileContents(handle), 'foo🤘');
assert_equals(await getFileSize(handle), 7);
}, 'write() with a valid utf-8 string');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'string_with_unix_line_ending', root);
const stream = await handle.createWritable();
await stream.write('foo\n');
await stream.close();
assert_equals(await getFileContents(handle), 'foo\n');
assert_equals(await getFileSize(handle), 4);
}, 'write() with a string with unix line ending preserved');
directory_test(async (t, root) => {
const handle =
await createEmptyFile(t, 'string_with_windows_line_ending', root);
const stream = await handle.createWritable();
await stream.write('foo\r\n');
await stream.close();
assert_equals(await getFileContents(handle), 'foo\r\n');
assert_equals(await getFileSize(handle), 5);
}, 'write() with a string with windows line ending preserved');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'empty_array_buffer', root);
const stream = await handle.createWritable();
const buf = new ArrayBuffer(0);
await stream.write(buf);
await stream.close();
assert_equals(await getFileContents(handle), '');
assert_equals(await getFileSize(handle), 0);
}, 'write() with an empty array buffer to an empty file');
directory_test(async (t, root) => {
const handle =
await createEmptyFile(t, 'valid_string_typed_byte_array', root);
const stream = await handle.createWritable();
const buf = new ArrayBuffer(3);
const intView = new Uint8Array(buf);
intView[0] = 0x66;
intView[1] = 0x6f;
intView[2] = 0x6f;
await stream.write(buf);
await stream.close();
assert_equals(await getFileContents(handle), 'foo');
assert_equals(await getFileSize(handle), 3);
}, 'write() with a valid typed array buffer');
directory_test(async (t, root) => {
const dir = await createDirectory(t, 'parent_dir', root);
const file_name = 'close_fails_when_dir_removed.txt';
const handle = await createEmptyFile(t, file_name, dir);
const stream = await handle.createWritable();
await stream.write('foo');
await root.removeEntry('parent_dir', {recursive: true});
await promise_rejects(t, 'NotFoundError', stream.close());
}, 'atomic writes: close() fails when parent directory is removed');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'atomic_writes.txt', root);
const stream = await handle.createWritable();
await stream.write('foox');
const stream2 = await handle.createWritable();
await stream2.write('bar');
assert_equals(await getFileSize(handle), 0);
await stream2.close();
assert_equals(await getFileContents(handle), 'bar');
assert_equals(await getFileSize(handle), 3);
await stream.close();
assert_equals(await getFileContents(handle), 'foox');
assert_equals(await getFileSize(handle), 4);
}, 'atomic writes: writable file streams make atomic changes on close');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'atomic_write_after_close.txt', root);
const stream = await handle.createWritable();
await stream.write('foo');
await stream.close();
assert_equals(await getFileContents(handle), 'foo');
assert_equals(await getFileSize(handle), 3);
await promise_rejects(
t, TypeError(), stream.write('abc'));
}, 'atomic writes: write() after close() fails');
directory_test(async (t, root) => {
const handle =
await createEmptyFile(t, 'atomic_truncate_after_close.txt', root);
const stream = await handle.createWritable();
await stream.write('foo');
await stream.close();
assert_equals(await getFileContents(handle), 'foo');
assert_equals(await getFileSize(handle), 3);
await promise_rejects(t, TypeError(), stream.truncate(0));
}, 'atomic writes: truncate() after close() fails');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'atomic_close_after_close.txt', root);
const stream = await handle.createWritable();
await stream.write('foo');
await stream.close();
assert_equals(await getFileContents(handle), 'foo');
assert_equals(await getFileSize(handle), 3);
await promise_rejects(t, TypeError(), stream.close());
}, 'atomic writes: close() after close() fails');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'there_can_be_only_one.txt', root);
const stream = await handle.createWritable();
await stream.write('foo');
// This test might be flaky if there is a race condition allowing
// close() to be called multiple times.
const success_promises =
[...Array(100)].map(() => stream.close().then(() => 1).catch(() => 0));
const close_attempts = await Promise.all(success_promises);
const success_count = close_attempts.reduce((x, y) => x + y);
assert_equals(success_count, 1);
}, 'atomic writes: only one close() operation may succeed');
directory_test(async (t, root) => {
const dir = await createDirectory(t, 'parent_dir', root);
const file_name = 'atomic_writable_file_stream_persists_removed.txt';
const handle = await createFileWithContents(t, file_name, 'foo', dir);
const stream = await handle.createWritable();
await stream.write('bar');
await dir.removeEntry(file_name);
await promise_rejects(t, 'NotFoundError', getFileContents(handle));
await stream.close();
assert_equals(await getFileContents(handle), 'bar');
assert_equals(await getFileSize(handle), 3);
}, 'atomic writes: writable file stream persists file on close, even if file is removed');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'writer_written', root);
const stream = await handle.createWritable();
const writer = stream.getWriter();
await writer.write('foo');
await writer.write(new Blob(['bar']));
await writer.write({type: 'seek', position: 0});
await writer.write({type: 'write', data: 'baz'});
await writer.close();
assert_equals(await getFileContents(handle), 'bazbar');
assert_equals(await getFileSize(handle), 6);
}, 'getWriter() can be used');
directory_test(async (t, root) => {
const handle = await createFileWithContents(
t, 'content.txt', 'very long string', root);
const stream = await handle.createWritable();
await promise_rejects(
t, SyntaxError(), stream.write({type: 'truncate'}), 'truncate without size');
}, 'WriteParams: truncate missing size param');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'content.txt', root);
const stream = await handle.createWritable();
await promise_rejects(
t, SyntaxError(), stream.write({type: 'write'}), 'write without data');
}, 'WriteParams: write missing data param');
directory_test(async (t, root) => {
const handle = await createFileWithContents(
t, 'content.txt', 'seekable', root);
const stream = await handle.createWritable();
await promise_rejects(
t, SyntaxError(), stream.write({type: 'seek'}), 'seek without position');
}, 'WriteParams: seek missing position param');

View file

@ -0,0 +1,117 @@
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'trunc_shrink', root);
const stream = await handle.createWritable();
await stream.write('1234567890');
await stream.truncate(5);
await stream.close();
assert_equals(await getFileContents(handle), '12345');
assert_equals(await getFileSize(handle), 5);
}, 'truncate() to shrink a file');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'trunc_grow', root);
const stream = await handle.createWritable();
await stream.write('abc');
await stream.truncate(5);
await stream.close();
assert_equals(await getFileContents(handle), 'abc\0\0');
assert_equals(await getFileSize(handle), 5);
}, 'truncate() to grow a file');
directory_test(async (t, root) => {
const dir = await createDirectory(t, 'parent_dir', root);
const file_name = 'create_writable_fails_when_dir_removed.txt';
const handle = await createEmptyFile(t, file_name, dir);
await root.removeEntry('parent_dir', {recursive: true});
await promise_rejects(t, 'NotFoundError', handle.createWritable());
}, 'createWritable() fails when parent directory is removed');
directory_test(async (t, root) => {
const dir = await createDirectory(t, 'parent_dir', root);
const file_name = 'write_fails_when_dir_removed.txt';
const handle = await createEmptyFile(t, file_name, dir);
const stream = await handle.createWritable();
await root.removeEntry('parent_dir', {recursive: true});
await promise_rejects(t, 'NotFoundError', stream.write('foo'));
}, 'write() fails when parent directory is removed');
directory_test(async (t, root) => {
const dir = await createDirectory(t, 'parent_dir', root);
const file_name = 'truncate_fails_when_dir_removed.txt';
const handle = await createEmptyFile(t, file_name, dir);
const stream = await handle.createWritable();
await root.removeEntry('parent_dir', {recursive: true});
await promise_rejects(t, 'NotFoundError', stream.truncate(0));
}, 'truncate() fails when parent directory is removed');
directory_test(async (t, root) => {
const handle = await createFileWithContents(
t, 'atomic_file_is_copied.txt', 'fooks', root);
const stream = await handle.createWritable({keepExistingData: true});
await stream.write('bar');
await stream.close();
assert_equals(await getFileContents(handle), 'barks');
assert_equals(await getFileSize(handle), 5);
}, 'createWritable({keepExistingData: true}): atomic writable file stream initialized with source contents');
directory_test(async (t, root) => {
const handle = await createFileWithContents(
t, 'atomic_file_is_not_copied.txt', 'very long string', root);
const stream = await handle.createWritable({keepExistingData: false});
await stream.write('bar');
assert_equals(await getFileContents(handle), 'very long string');
await stream.close();
assert_equals(await getFileContents(handle), 'bar');
assert_equals(await getFileSize(handle), 3);
}, 'createWritable({keepExistingData: false}): atomic writable file stream initialized with empty file');
directory_test(async (t, root) => {
const handle = await createFileWithContents(
t, 'trunc_smaller_offset.txt', '1234567890', root);
const stream = await handle.createWritable({keepExistingData: true});
await stream.truncate(5);
await stream.write('abc');
await stream.close();
assert_equals(await getFileContents(handle), 'abc45');
assert_equals(await getFileSize(handle), 5);
}, 'cursor position: truncate size > offset');
directory_test(async (t, root) => {
const handle = await createFileWithContents(
t, 'trunc_bigger_offset.txt', '1234567890', root);
const stream = await handle.createWritable({keepExistingData: true});
await stream.seek(6);
await stream.truncate(5);
await stream.write('abc');
await stream.close();
assert_equals(await getFileContents(handle), '12345abc');
assert_equals(await getFileSize(handle), 8);
}, 'cursor position: truncate size < offset');
directory_test(async (t, root) => {
const handle = await createEmptyFile(t, 'contents', root);
const stream = await handle.createWritable();
stream.write('abc');
stream.write('def');
stream.truncate(9);
stream.seek(0);
stream.write('xyz');
await stream.close();
assert_equals(await getFileContents(handle), 'xyzdef\0\0\0');
assert_equals(await getFileSize(handle), 9);
}, 'commands are queued');

View file

@ -1,3 +1,3 @@
mypy==0.760
mypy==0.761
mypy-extensions==0.4.3
typed-ast==1.4.0

View file

@ -3,6 +3,7 @@ import platform
import socket
from abc import ABCMeta, abstractmethod
from copy import deepcopy
from six import iteritems
from ..wptcommandline import require_arg # noqa: F401
@ -199,5 +200,5 @@ class ExecutorBrowser(object):
up the browser from the runner process.
"""
def __init__(self, **kwargs):
for k, v in kwargs.iteritems():
for k, v in iteritems(kwargs):
setattr(self, k, v)

View file

@ -2,7 +2,8 @@ from .base import get_timeout_multiplier, maybe_add_args, certificate_domain_lis
from .webkit import WebKitBrowser
from ..executors import executor_kwargs as base_executor_kwargs
from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
WebDriverRefTestExecutor) # noqa: F401
WebDriverRefTestExecutor, # noqa: F401
WebDriverCrashtestExecutor) # noqa: F401
from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401
__wptrunner__ = {"product": "epiphany",
@ -11,7 +12,8 @@ __wptrunner__ = {"product": "epiphany",
"browser_kwargs": "browser_kwargs",
"executor": {"testharness": "WebDriverTestharnessExecutor",
"reftest": "WebDriverRefTestExecutor",
"wdspec": "WebKitDriverWdspecExecutor"},
"wdspec": "WebKitDriverWdspecExecutor",
"crashtest": "WebDriverCrashtestExecutor"},
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",

View file

@ -2,7 +2,8 @@ from .base import Browser, ExecutorBrowser, require_arg
from .base import get_timeout_multiplier, certificate_domain_list # noqa: F401
from ..executors import executor_kwargs as base_executor_kwargs
from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
WebDriverRefTestExecutor) # noqa: F401
WebDriverRefTestExecutor, # noqa: F401
WebDriverCrashtestExecutor) # noqa: F401
from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401
from ..webdriver_server import WebKitDriverServer
@ -13,7 +14,8 @@ __wptrunner__ = {"product": "webkit",
"browser_kwargs": "browser_kwargs",
"executor": {"testharness": "WebDriverTestharnessExecutor",
"reftest": "WebDriverRefTestExecutor",
"wdspec": "WebKitDriverWdspecExecutor"},
"wdspec": "WebKitDriverWdspecExecutor",
"crashtest": "WebDriverCrashtestExecutor"},
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",

View file

@ -2,7 +2,8 @@ from .base import get_timeout_multiplier, maybe_add_args, certificate_domain_lis
from .webkit import WebKitBrowser
from ..executors import executor_kwargs as base_executor_kwargs
from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
WebDriverRefTestExecutor) # noqa: F401
WebDriverRefTestExecutor, # noqa: F401
WebDriverCrashtestExecutor) # noqa: F401
from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401
__wptrunner__ = {"product": "webkitgtk_minibrowser",
@ -11,7 +12,8 @@ __wptrunner__ = {"product": "webkitgtk_minibrowser",
"browser_kwargs": "browser_kwargs",
"executor": {"testharness": "WebDriverTestharnessExecutor",
"reftest": "WebDriverRefTestExecutor",
"wdspec": "WebKitDriverWdspecExecutor"},
"wdspec": "WebKitDriverWdspecExecutor",
"crashtest": "WebDriverCrashtestExecutor"},
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",

View file

@ -5,6 +5,7 @@ import signal
import socket
import sys
import time
from six import iteritems
from mozlog import get_default_logger, handlers, proxy
@ -98,7 +99,7 @@ class TestEnvironment(object):
def __exit__(self, exc_type, exc_val, exc_tb):
self.process_interrupts()
for scheme, servers in self.servers.iteritems():
for scheme, servers in iteritems(self.servers):
for port, server in servers:
server.kill()
for cm in self.env_extras_cms:
@ -194,7 +195,7 @@ class TestEnvironment(object):
route_builder.add_handler(b"GET", b"/resources/testdriver.js",
StringHandler(data, "text/javascript"))
for url_base, paths in self.test_paths.iteritems():
for url_base, paths in iteritems(self.test_paths):
if url_base == "/":
continue
route_builder.add_mount_point(url_base, paths["tests_path"])
@ -220,7 +221,7 @@ class TestEnvironment(object):
def test_servers(self):
failed = []
host = self.config["server_host"]
for scheme, servers in self.servers.iteritems():
for scheme, servers in iteritems(self.servers):
for port, server in servers:
if self.test_server_port:
s = socket.socket()

View file

@ -7,6 +7,7 @@ import tempfile
import threading
import traceback
import uuid
from six import iteritems
from mozprocess import ProcessHandler
@ -45,7 +46,7 @@ def build_servo_command(test, test_url_func, browser, binary, pause_after_test,
args += ["-Z", debug_opts]
for stylesheet in browser.user_stylesheets:
args += ["--user-stylesheet", stylesheet]
for pref, value in test.environment.get('prefs', {}).iteritems():
for pref, value in iteritems(test.environment.get('prefs', {})):
args += ["--pref", "%s=%s" % (pref, value)]
if browser.ca_certificate_path:
args += ["--certificate-path", browser.ca_certificate_path]

View file

@ -1,5 +1,6 @@
from math import log
from collections import defaultdict
from six import iteritems, itervalues
class Node(object):
def __init__(self, prop, value):
@ -33,7 +34,7 @@ def entropy(results):
result_counts = defaultdict(int)
total = float(len(results))
for values in results.itervalues():
for values in itervalues(results):
# Not sure this is right, possibly want to treat multiple values as
# distinct from multiple of the same value?
for value in values:
@ -41,7 +42,7 @@ def entropy(results):
entropy_sum = 0
for count in result_counts.itervalues():
for count in itervalues(result_counts):
prop = float(count) / total
entropy_sum -= prop * log(prop, 2)
@ -52,7 +53,7 @@ def split_results(prop, results):
"""Split a dictionary of results into a dictionary of dictionaries where
each sub-dictionary has a specific value of the given property"""
by_prop = defaultdict(dict)
for run_info, value in results.iteritems():
for run_info, value in iteritems(results):
by_prop[run_info[prop]][run_info] = value
return by_prop
@ -77,13 +78,13 @@ def build_tree(properties, dependent_props, results, tree=None):
prop_index = {prop: i for i, prop in enumerate(properties)}
all_results = defaultdict(int)
for result_values in results.itervalues():
for result_value, count in result_values.iteritems():
for result_values in itervalues(results):
for result_value, count in iteritems(result_values):
all_results[result_value] += count
# If there is only one result we are done
if not properties or len(all_results) == 1:
for value, count in all_results.iteritems():
for value, count in iteritems(all_results):
tree.result_values[value] += count
tree.run_info |= set(results.keys())
return tree
@ -99,7 +100,7 @@ def build_tree(properties, dependent_props, results, tree=None):
continue
new_entropy = 0.
results_sets_entropy = []
for prop_value, result_set in result_sets.iteritems():
for prop_value, result_set in iteritems(result_sets):
results_sets_entropy.append((entropy(result_set), prop_value, result_set))
new_entropy += (float(len(result_set)) / len(results)) * results_sets_entropy[-1][0]
@ -109,7 +110,7 @@ def build_tree(properties, dependent_props, results, tree=None):
# In the case that no properties partition the space
if not results_partitions:
for value, count in all_results.iteritems():
for value, count in iteritems(all_results):
tree.result_values[value] += count
tree.run_info |= set(results.keys())
return tree

View file

@ -1,6 +1,7 @@
import os
from six.moves.urllib.parse import urljoin
from collections import deque
from six import text_type
from .wptmanifest.backends import static
from .wptmanifest.backends.base import ManifestItem
@ -57,7 +58,7 @@ def tags(node):
"""Set of tags that have been applied to the test"""
try:
value = node.get("tags")
if isinstance(value, (str, unicode)):
if isinstance(value, text_type):
return {value}
return set(value)
except KeyError:
@ -66,7 +67,7 @@ def tags(node):
def prefs(node):
def value(ini_value):
if isinstance(ini_value, (str, unicode)):
if isinstance(ini_value, text_type):
return tuple(pref_piece.strip() for pref_piece in ini_value.split(':', 1))
else:
# this should be things like @Reset, which are apparently type 'object'
@ -74,7 +75,7 @@ def prefs(node):
try:
node_prefs = node.get("prefs")
if type(node_prefs) in (str, unicode):
if isinstance(node_prefs, text_type):
rv = dict(value(node_prefs))
else:
rv = dict(value(item) for item in node_prefs)
@ -86,7 +87,7 @@ def prefs(node):
def set_prop(name, node):
try:
node_items = node.get(name)
if isinstance(node_items, (str, unicode)):
if isinstance(node_items, text_type):
rv = {node_items}
else:
rv = set(node_items)
@ -99,7 +100,7 @@ def leak_threshold(node):
rv = {}
try:
node_items = node.get("leak-threshold")
if isinstance(node_items, (str, unicode)):
if isinstance(node_items, text_type):
node_items = [node_items]
for item in node_items:
process, value = item.rsplit(":", 1)
@ -156,7 +157,7 @@ def fuzzy_prop(node):
if not isinstance(value, list):
value = [value]
for item in value:
if not isinstance(item, (str, unicode)):
if not isinstance(item, text_type):
rv.append(item)
continue
parts = item.rsplit(":", 1)
@ -478,7 +479,7 @@ def get_manifest(metadata_root, test_path, url_base, run_info):
"""
manifest_path = expected.expected_path(metadata_root, test_path)
try:
with open(manifest_path) as f:
with open(manifest_path, "rb") as f:
return static.compile(f,
run_info,
data_cls_getter=data_cls_getter,
@ -497,7 +498,7 @@ def get_dir_manifest(path, run_info):
values should be computed.
"""
try:
with open(path) as f:
with open(path, "rb") as f:
return static.compile(f,
run_info,
data_cls_getter=lambda x,y: DirectoryManifest)

View file

@ -150,5 +150,5 @@ class IncludeManifest(ManifestItem):
def get_manifest(manifest_path):
with open(manifest_path) as f:
with open(manifest_path, "rb") as f:
return conditional.compile(f, data_cls_getter=lambda x, y: IncludeManifest)

View file

@ -3,7 +3,7 @@ import os
from six.moves.urllib.parse import urljoin, urlsplit
from collections import namedtuple, defaultdict, deque
from math import ceil
from six import iterkeys, itervalues, iteritems
from six import integer_types, iterkeys, itervalues, iteritems, string_types, text_type
from .wptmanifest import serialize
from .wptmanifest.node import (DataNode, ConditionalNode, BinaryExpressionNode,
@ -411,7 +411,7 @@ class PropertyUpdate(object):
for e in errors:
if disable_intermittent:
condition = e.cond.children[0] if e.cond else None
msg = disable_intermittent if isinstance(disable_intermittent, (str, unicode)) else "unstable"
msg = disable_intermittent if isinstance(disable_intermittent, string_types+(text_type,)) else "unstable"
self.node.set("disabled", msg, condition)
self.node.new_disabled = True
else:
@ -774,7 +774,7 @@ class AppendOnlyListUpdate(PropertyUpdate):
for item in new:
if item is None:
continue
elif isinstance(item, (str, unicode)):
elif isinstance(item, text_type):
rv.add(item)
else:
rv |= item
@ -897,10 +897,10 @@ def make_expr(prop_set, rhs):
def make_node(value):
if type(value) in (int, float, long):
if isinstance(value, integer_types+(float,)):
node = NumberNode(value)
elif type(value) in (str, unicode):
node = StringNode(unicode(value))
elif isinstance(value, text_type):
node = StringNode(text_type(value))
elif hasattr(value, "__iter__"):
node = ListNode()
for item in value:
@ -909,10 +909,10 @@ def make_node(value):
def make_value_node(value):
if type(value) in (int, float, long):
if isinstance(value, integer_types+(float,)):
node = ValueNode(value)
elif type(value) in (str, unicode):
node = ValueNode(unicode(value))
elif isinstance(value, text_type):
node = ValueNode(text_type(value))
elif hasattr(value, "__iter__"):
node = ListNode()
for item in value:

View file

@ -4,7 +4,7 @@ import os
from collections import defaultdict, namedtuple
from mozlog import structuredlog
from six import itervalues
from six import ensure_str, ensure_text, iteritems, iterkeys, itervalues, text_type
from six.moves import intern
from . import manifestupdate
@ -44,11 +44,11 @@ class RunInfo(object):
return self.canonical_repr == other.canonical_repr
def iteritems(self):
for key, value in self.data.iteritems():
for key, value in iteritems(self.data):
yield key, value
def items(self):
return list(self.iteritems())
return list(iteritems(self))
def update_expected(test_paths, serve_root, log_file_names,
@ -239,7 +239,7 @@ def pack_result(data):
def unpack_result(data):
if isinstance(data, int):
return (status_intern.get(data), None)
if isinstance(data, unicode):
if isinstance(data, text_type):
return (data, None)
# Unpack multiple statuses into a tuple to be used in the Results named tuple below,
# separating `status` and `known_intermittent`.
@ -289,7 +289,7 @@ def update_results(id_test_map,
test_file_items = set(itervalues(id_test_map))
default_expected_by_type = {}
for test_type, test_cls in wpttest.manifest_test_cls.iteritems():
for test_type, test_cls in iteritems(wpttest.manifest_test_cls):
if test_cls.result_cls:
default_expected_by_type[(test_type, False)] = test_cls.result_cls.default_expected
if test_cls.subtest_result_cls:
@ -427,7 +427,7 @@ class ExpectedUpdater(object):
action_map["lsan_leak"](item)
mozleak_data = data.get("mozleak", {})
for scope, scope_data in mozleak_data.iteritems():
for scope, scope_data in iteritems(mozleak_data):
for key, action in [("objects", "mozleak_object"),
("total", "mozleak_total")]:
for item in scope_data.get(key, []):
@ -439,7 +439,7 @@ class ExpectedUpdater(object):
self.run_info = run_info_intern.store(RunInfo(data["run_info"]))
def test_start(self, data):
test_id = intern(data["test"].encode("utf8"))
test_id = intern(ensure_str(data["test"]))
try:
self.id_test_map[test_id]
except KeyError:
@ -449,8 +449,8 @@ class ExpectedUpdater(object):
self.tests_visited[test_id] = set()
def test_status(self, data):
test_id = intern(data["test"].encode("utf8"))
subtest = intern(data["subtest"].encode("utf8"))
test_id = intern(ensure_str(data["test"]))
subtest = intern(ensure_str(data["subtest"]))
test_data = self.id_test_map.get(test_id)
if test_data is None:
return
@ -467,7 +467,7 @@ class ExpectedUpdater(object):
if data["status"] == "SKIP":
return
test_id = intern(data["test"].encode("utf8"))
test_id = intern(ensure_str(data["test"]))
test_data = self.id_test_map.get(test_id)
if test_data is None:
return
@ -480,7 +480,7 @@ class ExpectedUpdater(object):
del self.tests_visited[test_id]
def assertion_count(self, data):
test_id = intern(data["test"].encode("utf8"))
test_id = intern(ensure_str(data["test"]))
test_data = self.id_test_map.get(test_id)
if test_data is None:
return
@ -491,7 +491,7 @@ class ExpectedUpdater(object):
def test_for_scope(self, data):
dir_path = data.get("scope", "/")
dir_id = intern(os.path.join(dir_path, "__dir__").replace(os.path.sep, "/").encode("utf8"))
dir_id = intern(ensure_str(os.path.join(dir_path, "__dir__").replace(os.path.sep, "/")))
if dir_id.startswith("/"):
dir_id = dir_id[1:]
return dir_id, self.id_test_map[dir_id]
@ -530,13 +530,13 @@ def create_test_tree(metadata_path, test_manifest):
assert all_types > exclude_types
include_types = all_types - exclude_types
for item_type, test_path, tests in test_manifest.itertypes(*include_types):
test_file_data = TestFileData(intern(test_manifest.url_base.encode("utf8")),
intern(item_type.encode("utf8")),
test_file_data = TestFileData(intern(ensure_str(test_manifest.url_base)),
intern(ensure_str(item_type)),
metadata_path,
test_path,
tests)
for test in tests:
id_test_map[intern(test.id.encode("utf8"))] = test_file_data
id_test_map[intern(ensure_str(test.id))] = test_file_data
dir_path = os.path.split(test_path)[0].replace(os.path.sep, "/")
while True:
@ -544,9 +544,9 @@ def create_test_tree(metadata_path, test_manifest):
dir_id = dir_path + "/__dir__"
else:
dir_id = "__dir__"
dir_id = intern((test_manifest.url_base + dir_id).lstrip("/").encode("utf8"))
dir_id = intern(ensure_str((test_manifest.url_base + dir_id).lstrip("/")))
if dir_id not in id_test_map:
test_file_data = TestFileData(intern(test_manifest.url_base.encode("utf8")),
test_file_data = TestFileData(intern(ensure_str(test_manifest.url_base)),
None,
metadata_path,
dir_id,
@ -615,7 +615,7 @@ class TestFileData(object):
self.item_type = item_type
self.test_path = test_path
self.metadata_path = metadata_path
self.tests = {intern(item.id.encode("utf8")) for item in tests}
self.tests = {intern(ensure_str(item.id)) for item in tests}
self._requires_update = False
self.data = defaultdict(lambda: defaultdict(PackedResultList))
@ -656,11 +656,11 @@ class TestFileData(object):
# Return subtest nodes present in the expected file, but missing from the data
rv = []
for test_id, subtests in self.data.iteritems():
test = expected.get_test(test_id.decode("utf8"))
for test_id, subtests in iteritems(self.data):
test = expected.get_test(ensure_text(test_id))
if not test:
continue
seen_subtests = set(item.decode("utf8") for item in subtests.iterkeys() if item is not None)
seen_subtests = set(ensure_text(item) for item in iterkeys(subtests) if item is not None)
missing_subtests = set(test.subtests.keys()) - seen_subtests
for item in missing_subtests:
expected_subtest = test.get_subtest(item)
@ -729,9 +729,9 @@ class TestFileData(object):
test_expected = expected.get_test(test_id)
expected_by_test[test_id] = test_expected
for test_id, test_data in self.data.iteritems():
test_id = test_id.decode("utf8")
for subtest_id, results_list in test_data.iteritems():
for test_id, test_data in iteritems(self.data):
test_id = ensure_str(test_id)
for subtest_id, results_list in iteritems(test_data):
for prop, run_info, value in results_list:
# Special case directory metadata
if subtest_id is None and test_id.endswith("__dir__"):
@ -747,8 +747,7 @@ class TestFileData(object):
if subtest_id is None:
item_expected = test_expected
else:
if isinstance(subtest_id, str):
subtest_id = subtest_id.decode("utf8")
subtest_id = ensure_text(subtest_id)
item_expected = test_expected.get_subtest(subtest_id)
if prop == "status":

View file

@ -1,5 +1,6 @@
import importlib
import imp
from six import iteritems
from .browsers import product_list
@ -44,7 +45,7 @@ class Product(object):
self.get_timeout_multiplier = getattr(module, data["timeout_multiplier"])
self.executor_classes = {}
for test_type, cls_name in data["executor"].iteritems():
for test_type, cls_name in iteritems(data["executor"]):
cls = getattr(module, cls_name)
self.executor_classes[test_type] = cls

View file

@ -5,6 +5,8 @@ from abc import ABCMeta, abstractmethod
from six.moves.queue import Empty
from collections import defaultdict, deque
from multiprocessing import Queue
from six import iteritems
from six.moves import xrange
from . import manifestinclude
from . import manifestexpected
@ -124,7 +126,7 @@ class ManifestLoader(object):
def load(self):
rv = {}
for url_base, paths in self.test_paths.iteritems():
for url_base, paths in iteritems(self.test_paths):
manifest_file = self.load_manifest(url_base=url_base,
**paths)
path_data = {"url_base": url_base}
@ -236,7 +238,7 @@ class TestLoader(object):
manifest_items = self.chunker(manifest_items)
for test_type, test_path, tests in manifest_items:
manifest_file = manifests_by_url_base[iter(tests).next().url_base]
manifest_file = manifests_by_url_base[next(iter(tests)).url_base]
metadata_path = self.manifests[manifest_file]["metadata_path"]
inherit_metadata, test_metadata = self.load_metadata(manifest_file, metadata_path, test_path)

View file

@ -1,7 +1,3 @@
import sys
import pytest
from .. import expectedtree, metadata
from collections import defaultdict
@ -32,8 +28,6 @@ def results_object(results):
return results_obj
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_build_tree_0():
# Pass if debug
results = [({"os": "linux", "version": "18.04", "debug": True}, "FAIL"),
@ -53,8 +47,6 @@ def test_build_tree_0():
assert dump_tree(tree) == expected
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_build_tree_1():
# Pass if linux or windows 10
results = [({"os": "linux", "version": "18.04", "debug": True}, "PASS"),
@ -77,8 +69,6 @@ def test_build_tree_1():
assert dump_tree(tree) == expected
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_build_tree_2():
# Fails in a specific configuration
results = [({"os": "linux", "version": "18.04", "debug": True}, "PASS"),
@ -104,8 +94,6 @@ def test_build_tree_2():
assert dump_tree(tree) == expected
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_build_tree_3():
results = [({"os": "linux", "version": "18.04", "debug": True, "unused": False}, "PASS"),
@ -118,8 +106,6 @@ def test_build_tree_3():
assert dump_tree(tree) == expected
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_build_tree_4():
# Check counts for multiple statuses
results = [({"os": "linux", "version": "18.04", "debug": False}, "FAIL"),

View file

@ -1,4 +1,3 @@
import sys
from io import BytesIO
import pytest
@ -6,8 +5,6 @@ import pytest
from .. import manifestexpected
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
@pytest.mark.parametrize("fuzzy, expected", [
(b"ref.html:1;200", [("ref.html", ((1, 1), (200, 200)))]),
(b"ref.html:0-1;100-200", [("ref.html", ((0, 1), (100, 200)))]),
@ -25,7 +22,7 @@ from .. import manifestexpected
((u"test.html", u"ref1.html", "=="), ((5,10), (100, 100)))]),
])
def test_fuzzy(fuzzy, expected):
data = """
data = b"""
[test.html]
fuzzy: %s""" % fuzzy
f = BytesIO(data)

View file

@ -20,8 +20,6 @@ skip: true
@pytest.mark.xfail(sys.platform == "win32",
reason="NamedTemporaryFile cannot be reopened on Win32")
@pytest.mark.xfail(sys.version[0] == "3",
reason="wptmanifest.parser doesn't support py3")
def test_filter_unicode():
tests = make_mock_manifest(("test", "a", 10), ("test", "a/b", 10),
("test", "c", 10))

View file

@ -108,7 +108,7 @@ def create_log(entries):
getattr(logger, action)(**kwargs)
logger.remove_handler(handler)
else:
json.dump(entries, data)
data.write(json.dumps(entries).encode())
data.seek(0)
return data
@ -132,11 +132,9 @@ def create_test_manifest(tests, url_base="/"):
return m
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_0():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: FAIL""")]
@ -154,11 +152,9 @@ def test_update_0():
assert updated[0][1].is_empty
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_1():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: ERROR""")]
@ -177,11 +173,9 @@ def test_update_1():
assert new_manifest.get_test(test_id).children[0].get("expected", default_run_info) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_known_intermittent_1():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: PASS""")]
@ -218,11 +212,9 @@ def test_update_known_intermittent_1():
"expected", default_run_info) == ["PASS", "FAIL"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_known_intermittent_2():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: PASS""")]
@ -243,11 +235,9 @@ def test_update_known_intermittent_2():
"expected", default_run_info) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_existing_known_intermittent():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: [PASS, FAIL]""")]
@ -286,11 +276,9 @@ def test_update_existing_known_intermittent():
"expected", default_run_info) == ["PASS", "ERROR", "FAIL"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_remove_previous_intermittent():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: [PASS, FAIL]""")]
@ -334,8 +322,6 @@ def test_update_remove_previous_intermittent():
"expected", default_run_info) == ["PASS", "ERROR"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_new_test_with_intermittent():
tests = [("path/to/test.htm", [test_id], "testharness", None)]
@ -373,8 +359,6 @@ def test_update_new_test_with_intermittent():
"expected", default_run_info) == ["PASS", "FAIL"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_expected_tie_resolution():
tests = [("path/to/test.htm", [test_id], "testharness", None)]
@ -402,11 +386,9 @@ def test_update_expected_tie_resolution():
"expected", default_run_info) == ["PASS", "FAIL"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_reorder_expected():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: [PASS, FAIL]""")]
@ -445,10 +427,8 @@ def test_update_reorder_expected():
"expected", default_run_info) == ["FAIL", "PASS"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_and_preserve_unchanged_expected_intermittent():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected:
if os == "android": [PASS, FAIL]
@ -488,11 +468,9 @@ def test_update_and_preserve_unchanged_expected_intermittent():
"expected", default_run_info) == "PASS"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_test_with_intermittent_to_one_expected_status():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: [PASS, FAIL]""")]
@ -513,10 +491,8 @@ def test_update_test_with_intermittent_to_one_expected_status():
"expected", default_run_info) == "ERROR"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_intermittent_with_conditions():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected:
if os == "android": [PASS, FAIL]""")]
@ -548,10 +524,8 @@ def test_update_intermittent_with_conditions():
"expected", run_info_1) == ["PASS", "TIMEOUT", "FAIL"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_and_remove_intermittent_with_conditions():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected:
if os == "android": [PASS, FAIL]""")]
@ -583,11 +557,9 @@ def test_update_and_remove_intermittent_with_conditions():
"expected", run_info_1) == ["PASS", "TIMEOUT"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_intermittent_full():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected:
if os == "mac": [FAIL, TIMEOUT]
@ -623,11 +595,9 @@ def test_update_intermittent_full():
"expected", default_run_info) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_intermittent_full_remove():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected:
if os == "mac": [FAIL, TIMEOUT, PASS]
@ -674,11 +644,9 @@ def test_update_intermittent_full_remove():
"expected", default_run_info) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_full_update():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected:
if os == "mac": [FAIL, TIMEOUT]
@ -714,11 +682,9 @@ def test_full_update():
"expected", default_run_info) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_full_orphan():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: FAIL
[subsub test]
@ -747,11 +713,9 @@ def test_full_orphan():
assert len(new_manifest.get_test(test_id).children) == 1
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_reorder_expected_full_conditions():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected:
if os == "mac": [FAIL, TIMEOUT]
@ -808,11 +772,9 @@ def test_update_reorder_expected_full_conditions():
"expected", default_run_info) == ["PASS", "FAIL"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_skip_0():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: FAIL""")]
@ -828,10 +790,8 @@ def test_skip_0():
assert not updated
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_new_subtest():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected: FAIL""")]
@ -853,10 +813,8 @@ def test_new_subtest():
assert new_manifest.get_test(test_id).children[1].get("expected", default_run_info) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_multiple_0():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected: FAIL""")]
@ -892,10 +850,8 @@ def test_update_multiple_0():
"expected", {"debug": False, "os": "linux"}) == "TIMEOUT"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_multiple_1():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected: FAIL""")]
@ -936,10 +892,8 @@ def test_update_multiple_1():
"expected", run_info_3) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_multiple_2():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected: FAIL""")]
@ -976,10 +930,8 @@ def test_update_multiple_2():
"expected", run_info_2) == "TIMEOUT"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_multiple_3():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected:
if debug: FAIL
@ -1018,10 +970,8 @@ def test_update_multiple_3():
"expected", run_info_2) == "TIMEOUT"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_ignore_existing():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected:
if debug: TIMEOUT
@ -1060,8 +1010,6 @@ def test_update_ignore_existing():
"expected", run_info_2) == "NOTRUN"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_new_test():
tests = [("path/to/test.htm", [test_id], "testharness", None)]
@ -1084,10 +1032,8 @@ def test_update_new_test():
"expected", run_info_1) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_duplicate():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected: ERROR""")]
@ -1106,10 +1052,8 @@ def test_update_duplicate():
"expected", run_info_1) == "ERROR"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_disable_intermittent():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected: ERROR""")]
@ -1128,10 +1072,8 @@ def test_update_disable_intermittent():
"disabled", run_info_1) == "Some message"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_stability_conditional_instability():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected: ERROR""")]
@ -1164,10 +1106,8 @@ def test_update_stability_conditional_instability():
"expected", run_info_2) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_full():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected:
if debug: TIMEOUT
@ -1216,10 +1156,8 @@ def test_update_full():
"expected", run_info_2) == "ERROR"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_full_unknown():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected:
if release_or_beta: ERROR
@ -1259,10 +1197,8 @@ def test_update_full_unknown():
"expected", run_info_2) == "ERROR"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_full_unknown_missing():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[subtest_deleted]
expected:
if release_or_beta: ERROR
@ -1282,10 +1218,8 @@ def test_update_full_unknown_missing():
assert len(updated) == 0
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_default():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
[test1]
expected:
if os == "mac": FAIL
@ -1315,10 +1249,8 @@ def test_update_default():
assert new_manifest.is_empty
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_default_1():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected:
if os == "mac": TIMEOUT
@ -1347,10 +1279,8 @@ def test_update_default_1():
"expected", run_info_2) == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_default_2():
tests = [("path/to/test.htm", [test_id], "testharness", """
tests = [("path/to/test.htm", [test_id], "testharness", b"""
[test.htm]
expected:
if os == "mac": TIMEOUT
@ -1379,10 +1309,8 @@ def test_update_default_2():
"expected", run_info_2) == "TIMEOUT"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_assertion_count_0():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
max-asserts: 4
min-asserts: 2
""")]
@ -1403,10 +1331,8 @@ def test_update_assertion_count_0():
assert new_manifest.get_test(test_id).get("min-asserts") == "2"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_assertion_count_1():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
max-asserts: 4
min-asserts: 2
""")]
@ -1427,10 +1353,8 @@ def test_update_assertion_count_1():
assert new_manifest.get_test(test_id).has_key("min-asserts") is False
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_assertion_count_2():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
max-asserts: 4
min-asserts: 2
""")]
@ -1447,10 +1371,8 @@ def test_update_assertion_count_2():
assert not updated
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_assertion_count_3():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]
max-asserts: 4
min-asserts: 2
""")]
@ -1481,10 +1403,8 @@ def test_update_assertion_count_3():
assert new_manifest.get_test(test_id).get("min-asserts") == "2"
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_assertion_count_4():
tests = [("path/to/test.htm", [test_id], "testharness", """[test.htm]""")]
tests = [("path/to/test.htm", [test_id], "testharness", b"""[test.htm]""")]
log_0 = suite_log([("test_start", {"test": test_id}),
("assertion_count", {"test": test_id,
@ -1512,11 +1432,9 @@ def test_update_assertion_count_4():
assert new_manifest.get_test(test_id).has_key("min-asserts") is False
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_lsan_0():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, "")]
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"")]
log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
"frames": ["foo", "bar"]})])
@ -1529,11 +1447,9 @@ def test_update_lsan_0():
assert new_manifest.get("lsan-allowed") == ["foo"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_lsan_1():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, """
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"""
lsan-allowed: [foo]""")]
log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
@ -1549,13 +1465,11 @@ lsan-allowed: [foo]""")]
assert new_manifest.get("lsan-allowed") == ["baz", "foo"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_lsan_2():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/__dir__", ["path/__dir__"], None, """
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/__dir__", ["path/__dir__"], None, b"""
lsan-allowed: [foo]"""),
("path/to/__dir__", [dir_id], None, "")]
("path/to/__dir__", [dir_id], None, b"")]
log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
"frames": ["foo", "bar"],
@ -1571,11 +1485,9 @@ lsan-allowed: [foo]"""),
assert new_manifest.get("lsan-allowed") == ["baz"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_lsan_3():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, "")]
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"")]
log_0 = suite_log([("lsan_leak", {"scope": "path/to/",
"frames": ["foo", "bar"]})],
@ -1593,11 +1505,9 @@ def test_update_lsan_3():
assert new_manifest.get("lsan-allowed") == ["baz", "foo"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_wptreport_0():
tests = [("path/to/test.htm", [test_id], "testharness",
"""[test.htm]
b"""[test.htm]
[test1]
expected: FAIL""")]
@ -1615,11 +1525,9 @@ def test_update_wptreport_0():
assert updated[0][1].is_empty
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_wptreport_1():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, "")]
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"")]
log = {"run_info": default_run_info.copy(),
"results": [],
@ -1632,11 +1540,9 @@ def test_update_wptreport_1():
assert updated[0][1].get("lsan-allowed") == ["baz"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_leak_total_0():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, "")]
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"")]
log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
"process": "default",
@ -1651,11 +1557,9 @@ def test_update_leak_total_0():
assert new_manifest.get("leak-threshold") == ['default:51200']
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_leak_total_1():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, "")]
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"")]
log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
"process": "default",
@ -1667,11 +1571,9 @@ def test_update_leak_total_1():
assert not updated
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_leak_total_2():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, """
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"""
leak-total: 110""")]
log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
@ -1684,11 +1586,9 @@ leak-total: 110""")]
assert not updated
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_leak_total_3():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, """
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"""
leak-total: 100""")]
log_0 = suite_log([("mozleak_total", {"scope": "path/to/",
@ -1704,11 +1604,9 @@ leak-total: 100""")]
assert new_manifest.get("leak-threshold") == ['default:51200']
@pytest.mark.xfail(sys.version[0] == "3",
reason="metadata doesn't support py3")
def test_update_leak_total_4():
tests = [("path/to/test.htm", [test_id], "testharness", ""),
("path/to/__dir__", [dir_id], None, """
tests = [("path/to/test.htm", [test_id], "testharness", b""),
("path/to/__dir__", [dir_id], None, b"""
leak-total: 110""")]
log_0 = suite_log([
@ -1737,8 +1635,6 @@ class UpdateRunner(StepRunner):
steps = [TestStep]
@pytest.mark.xfail(sys.version[0] == "3",
reason="update.state doesn't support py3")
def test_update_pickle():
logger = structuredlog.StructuredLogger("expected_test")
args = {

View file

@ -1,5 +1,3 @@
import pytest
import sys
from io import BytesIO
from mock import Mock
@ -7,29 +5,29 @@ from manifest import manifest as wptmanifest
from manifest.item import TestharnessTest
from .. import manifestexpected, wpttest
dir_ini_0 = """\
dir_ini_0 = b"""\
prefs: [a:b]
"""
dir_ini_1 = """\
dir_ini_1 = b"""\
prefs: [@Reset, b:c]
max-asserts: 2
min-asserts: 1
tags: [b, c]
"""
dir_ini_2 = """\
dir_ini_2 = b"""\
lsan-max-stack-depth: 42
"""
test_0 = """\
test_0 = b"""\
[0.html]
prefs: [c:d]
max-asserts: 3
tags: [a, @Reset]
"""
test_1 = """\
test_1 = b"""\
[1.html]
prefs:
if os == 'win': [a:b, c:d]
@ -37,12 +35,12 @@ test_1 = """\
if os == 'win': FAIL
"""
test_2 = """\
test_2 = b"""\
[2.html]
lsan-max-stack-depth: 42
"""
test_3 = """\
test_3 = b"""\
[3.html]
[subtest1]
expected: [PASS, FAIL]
@ -54,32 +52,32 @@ test_3 = """\
expected: FAIL
"""
test_4 = """\
test_4 = b"""\
[4.html]
expected: FAIL
"""
test_5 = """\
test_5 = b"""\
[5.html]
"""
test_6 = """\
test_6 = b"""\
[6.html]
expected: [OK, FAIL]
"""
test_7 = """\
test_7 = b"""\
[7.html]
blink_expect_any_subtest_status: yep
"""
test_fuzzy = """\
test_fuzzy = b"""\
[fuzzy.html]
fuzzy: fuzzy-ref.html:1;200
"""
testharness_test = """<script src="/resources/testharness.js"></script>
testharness_test = b"""<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>"""
@ -117,8 +115,6 @@ def make_test_object(test_name,
return wpttest.from_manifest(tests, test, inherit_metadata, test_metadata.get_test(test.id))
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_metadata_inherit():
items = [("test", "a", 10), ("test", "a/b", 10), ("test", "c", 10)]
inherit_metadata = [
@ -136,8 +132,6 @@ def test_metadata_inherit():
assert test_obj.tags == {"a", "dir:a"}
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_conditional():
items = [("test", "a", 10), ("test", "a/b", 10), ("test", "c", 10)]
@ -147,8 +141,6 @@ def test_conditional():
assert test_obj.expected() == "FAIL"
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_metadata_lsan_stack_depth():
items = [("test", "a", 10), ("test", "a/b", 10)]
@ -172,8 +164,6 @@ def test_metadata_lsan_stack_depth():
assert test_obj.lsan_max_stack_depth == 42
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_subtests():
test_obj = make_test_object(test_3, "a/3.html", 3, ("test", "a", 4), None, False)
assert test_obj.expected("subtest1") == "PASS"
@ -184,40 +174,30 @@ def test_subtests():
assert test_obj.known_intermittent("subtest3") == []
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_expected_fail():
test_obj = make_test_object(test_4, "a/4.html", 4, ("test", "a", 5), None, False)
assert test_obj.expected() == "FAIL"
assert test_obj.known_intermittent() == []
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_no_expected():
test_obj = make_test_object(test_5, "a/5.html", 5, ("test", "a", 6), None, False)
assert test_obj.expected() == "OK"
assert test_obj.known_intermittent() == []
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_known_intermittent():
test_obj = make_test_object(test_6, "a/6.html", 6, ("test", "a", 7), None, False)
assert test_obj.expected() == "OK"
assert test_obj.known_intermittent() == ["FAIL"]
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_expect_any_subtest_status():
test_obj = make_test_object(test_7, "a/7.html", 7, ("test", "a", 8), None, False)
assert test_obj.expected() == "OK"
assert test_obj.expect_any_subtest_status() is True
@pytest.mark.xfail(sys.version[0] == "3",
reason="bytes/text confusion in py3")
def test_metadata_fuzzy():
manifest_data = {
"items": {"reftest": {"a/fuzzy.html": [["a/fuzzy.html",
@ -234,7 +214,7 @@ def test_metadata_fuzzy():
test_path="a/fuzzy.html",
url_base="/")
test = manifest.iterpath("a/fuzzy.html").next()
test = next(manifest.iterpath("a/fuzzy.html"))
test_obj = wpttest.from_manifest(manifest, test, [], test_metadata.get_test(test.id))
assert test_obj.fuzzy == {('/a/fuzzy.html', '/a/fuzzy-ref.html', '=='): [[2, 3], [10, 15]]}

View file

@ -11,7 +11,7 @@ class BaseState(object):
return rv
logger.debug("No existing state found")
return object.__new__(cls, logger)
return super(BaseState, cls).__new__(cls)
def __init__(self, logger):
"""Object containing state variables created when running Steps.

View file

@ -37,7 +37,7 @@ def require_arg(kwargs, name, value_func=None):
def create_parser(product_choices=None):
from mozlog import commandline
import products
from . import products
if product_choices is None:
config_data = config.load()

View file

@ -14,7 +14,7 @@ def setup(args, defaults, formatter_defaults=None):
formatter_defaults=formatter_defaults)
setup_stdlib_logger()
for name in args.keys():
for name in list(args.keys()):
if name.startswith("log_"):
args.pop(name)

View file

@ -3,19 +3,20 @@ from __future__ import print_function, unicode_literals
import json
import os
import sys
from six import iteritems, itervalues
from wptserve import sslutils
import environment as env
import products
import testloader
import wptcommandline
import wptlogging
import wpttest
from . import environment as env
from . import products
from . import testloader
from . import wptcommandline
from . import wptlogging
from . import wpttest
from mozlog import capture, handlers
from font import FontInstaller
from testrunner import ManagerGroup
from browsers.base import NullBrowser
from .font import FontInstaller
from .testrunner import ManagerGroup
from .browsers.base import NullBrowser
here = os.path.split(__file__)[0]
@ -102,7 +103,7 @@ def list_disabled(test_paths, product, **kwargs):
run_info, test_loader = get_loader(test_paths, product,
run_info_extras=run_info_extras, **kwargs)
for test_type, tests in test_loader.disabled_tests.iteritems():
for test_type, tests in iteritems(test_loader.disabled_tests):
for test in tests:
rv.append({"test": test.id, "reason": test.disabled()})
print(json.dumps(rv, indent=2))
@ -127,7 +128,7 @@ def get_pause_after_test(test_loader, **kwargs):
if kwargs["headless"]:
return False
tests = test_loader.tests
is_single_testharness = (sum(len(item) for item in tests.itervalues()) == 1 and
is_single_testharness = (sum(len(item) for item in itervalues(tests)) == 1 and
len(tests.get("testharness", [])) == 1)
if kwargs["repeat"] == 1 and kwargs["rerun"] == 1 and is_single_testharness:
return True

View file

@ -2,6 +2,7 @@ import os
import subprocess
from six.moves.urllib.parse import urljoin
from collections import defaultdict
from six import string_types
from .wptmanifest.parser import atoms
@ -337,7 +338,7 @@ class Test(object):
try:
expected = metadata.get("expected")
if isinstance(expected, (basestring)):
if isinstance(expected, string_types):
return expected
elif isinstance(expected, list):
return expected[0]

View file

@ -10,8 +10,8 @@ promise_test(async () => {
builder
.addFunction("swap", type_if_fi)
.addBody([
kExprGetLocal, 1,
kExprGetLocal, 0,
kExprLocalGet, 1,
kExprLocalGet, 0,
kExprReturn,
])
.exportFunc();
@ -31,8 +31,8 @@ promise_test(async () => {
const swap = builder
.addFunction("swap", type_if_fi)
.addBody([
kExprGetLocal, 1,
kExprGetLocal, 0,
kExprLocalGet, 1,
kExprLocalGet, 0,
kExprReturn,
]);
builder

View file

@ -183,7 +183,7 @@ const instanceTestFactory = [
builder
.addFunction("fn", kSig_i_v)
.addBody([
kExprGetGlobal,
kExprGlobalGet,
index,
kExprReturn,
])

View file

@ -81,22 +81,10 @@ test(() => {
const bytes1 = [87, 101, 98, 65, 115, 115, 101, 109, 98, 108, 121];
const bytes2 = [74, 83, 65, 80, 73];
const binary = new Binary;
binary.emit_section(kUnknownSectionCode, section => {
section.emit_string("name");
section.emit_bytes(bytes1);
});
binary.emit_section(kUnknownSectionCode, section => {
section.emit_string("name");
section.emit_bytes(bytes2);
});
binary.emit_section(kUnknownSectionCode, section => {
section.emit_string("foo");
section.emit_bytes(bytes1);
});
const builder = new WasmModuleBuilder();
builder.addExplicitSection(binary.trunc_buffer());
builder.addCustomSection("name", bytes1);
builder.addCustomSection("name", bytes2);
builder.addCustomSection("foo", bytes1);
const buffer = builder.toBuffer()
const module = new WebAssembly.Module(buffer);
@ -119,14 +107,8 @@ test(() => {
const bytes = [87, 101, 98, 65, 115, 115, 101, 109, 98, 108, 121];
const name = "yee\uD801\uDC37eey"
const binary = new Binary;
binary.emit_section(kUnknownSectionCode, section => {
section.emit_string(name);
section.emit_bytes(bytes);
});
const builder = new WasmModuleBuilder();
builder.addExplicitSection(binary.trunc_buffer());
builder.addCustomSection(name, bytes);
const buffer = builder.toBuffer();
const module = new WebAssembly.Module(buffer);
@ -140,14 +122,8 @@ test(() => {
test(() => {
const bytes = [87, 101, 98, 65, 115, 115, 101, 109, 98, 108, 121];
const binary = new Binary;
binary.emit_section(kUnknownSectionCode, section => {
section.emit_string("na\uFFFDme");
section.emit_bytes(bytes);
});
const builder = new WasmModuleBuilder();
builder.addExplicitSection(binary.trunc_buffer());
builder.addCustomSection("na\uFFFDme", bytes);
const buffer = builder.toBuffer();
const module = new WebAssembly.Module(buffer);

View file

@ -66,6 +66,8 @@ let kStartSectionCode = 8; // Start function declaration
let kElementSectionCode = 9; // Elements section
let kCodeSectionCode = 10; // Function code
let kDataSectionCode = 11; // Data segments
let kDataCountSectionCode = 12; // Data segment count (between Element & Code)
let kExceptionSectionCode = 13; // Exception section (between Global & Export)
// Name section types
let kModuleNameCode = 0;
@ -76,7 +78,13 @@ let kWasmFunctionTypeForm = 0x60;
let kWasmAnyFunctionTypeForm = 0x70;
let kHasMaximumFlag = 1;
let kResizableMaximumFlag = 1;
let kSharedHasMaximumFlag = 3;
// Segment flags
let kActiveNoIndex = 0;
let kPassive = 1;
let kActiveWithIndex = 2;
let kPassiveWithElements = 5;
// Function declaration flags
let kDeclFunctionName = 0x01;
@ -91,14 +99,21 @@ let kWasmI64 = 0x7e;
let kWasmF32 = 0x7d;
let kWasmF64 = 0x7c;
let kWasmS128 = 0x7b;
let kWasmAnyRef = 0x6f;
let kWasmAnyFunc = 0x70;
let kWasmExnRef = 0x68;
let kExternalFunction = 0;
let kExternalTable = 1;
let kExternalMemory = 2;
let kExternalGlobal = 3;
let kExternalException = 4;
let kTableZero = 0;
let kMemoryZero = 0;
let kSegmentZero = 0;
let kExceptionAttribute = 0;
// Useful signatures
let kSig_i_i = makeSig([kWasmI32], [kWasmI32]);
@ -123,11 +138,30 @@ let kSig_v_l = makeSig([kWasmI64], []);
let kSig_v_d = makeSig([kWasmF64], []);
let kSig_v_dd = makeSig([kWasmF64, kWasmF64], []);
let kSig_v_ddi = makeSig([kWasmF64, kWasmF64, kWasmI32], []);
let kSig_ii_v = makeSig([], [kWasmI32, kWasmI32]);
let kSig_iii_v = makeSig([], [kWasmI32, kWasmI32, kWasmI32]);
let kSig_ii_i = makeSig([kWasmI32], [kWasmI32, kWasmI32]);
let kSig_iii_i = makeSig([kWasmI32], [kWasmI32, kWasmI32, kWasmI32]);
let kSig_ii_ii = makeSig([kWasmI32, kWasmI32], [kWasmI32, kWasmI32]);
let kSig_iii_ii = makeSig([kWasmI32, kWasmI32], [kWasmI32, kWasmI32, kWasmI32]);
let kSig_v_f = makeSig([kWasmF32], []);
let kSig_f_f = makeSig([kWasmF32], [kWasmF32]);
let kSig_f_d = makeSig([kWasmF64], [kWasmF32]);
let kSig_d_d = makeSig([kWasmF64], [kWasmF64]);
let kSig_r_r = makeSig([kWasmAnyRef], [kWasmAnyRef]);
let kSig_a_a = makeSig([kWasmAnyFunc], [kWasmAnyFunc]);
let kSig_e_e = makeSig([kWasmExnRef], [kWasmExnRef]);
let kSig_i_r = makeSig([kWasmAnyRef], [kWasmI32]);
let kSig_v_r = makeSig([kWasmAnyRef], []);
let kSig_v_a = makeSig([kWasmAnyFunc], []);
let kSig_v_e = makeSig([kWasmExnRef], []);
let kSig_v_rr = makeSig([kWasmAnyRef, kWasmAnyRef], []);
let kSig_v_aa = makeSig([kWasmAnyFunc, kWasmAnyFunc], []);
let kSig_r_v = makeSig([], [kWasmAnyRef]);
let kSig_a_v = makeSig([], [kWasmAnyFunc]);
let kSig_a_i = makeSig([kWasmI32], [kWasmAnyFunc]);
let kSig_e_v = makeSig([], [kWasmExnRef]);
function makeSig(params, results) {
return {params: params, results: results};
@ -163,6 +197,8 @@ let kExprElse = 0x05;
let kExprTry = 0x06;
let kExprCatch = 0x07;
let kExprThrow = 0x08;
let kExprRethrow = 0x09;
let kExprBrOnExn = 0x0a;
let kExprEnd = 0x0b;
let kExprBr = 0x0c;
let kExprBrIf = 0x0d;
@ -170,13 +206,17 @@ let kExprBrTable = 0x0e;
let kExprReturn = 0x0f;
let kExprCallFunction = 0x10;
let kExprCallIndirect = 0x11;
let kExprReturnCall = 0x12;
let kExprReturnCallIndirect = 0x13;
let kExprDrop = 0x1a;
let kExprSelect = 0x1b;
let kExprGetLocal = 0x20;
let kExprSetLocal = 0x21;
let kExprTeeLocal = 0x22;
let kExprGetGlobal = 0x23;
let kExprSetGlobal = 0x24;
let kExprLocalGet = 0x20;
let kExprLocalSet = 0x21;
let kExprLocalTee = 0x22;
let kExprGlobalGet = 0x23;
let kExprGlobalSet = 0x24;
let kExprTableGet = 0x25;
let kExprTableSet = 0x26;
let kExprI32LoadMem = 0x28;
let kExprI64LoadMem = 0x29;
let kExprF32LoadMem = 0x2a;
@ -329,6 +369,108 @@ let kExprI32ReinterpretF32 = 0xbc;
let kExprI64ReinterpretF64 = 0xbd;
let kExprF32ReinterpretI32 = 0xbe;
let kExprF64ReinterpretI64 = 0xbf;
let kExprI32SExtendI8 = 0xc0;
let kExprI32SExtendI16 = 0xc1;
let kExprI64SExtendI8 = 0xc2;
let kExprI64SExtendI16 = 0xc3;
let kExprI64SExtendI32 = 0xc4;
let kExprRefNull = 0xd0;
let kExprRefIsNull = 0xd1;
let kExprRefFunc = 0xd2;
// Prefix opcodes
let kNumericPrefix = 0xfc;
let kSimdPrefix = 0xfd;
let kAtomicPrefix = 0xfe;
// Numeric opcodes.
let kExprMemoryInit = 0x08;
let kExprDataDrop = 0x09;
let kExprMemoryCopy = 0x0a;
let kExprMemoryFill = 0x0b;
let kExprTableInit = 0x0c;
let kExprElemDrop = 0x0d;
let kExprTableCopy = 0x0e;
let kExprTableGrow = 0x0f;
let kExprTableSize = 0x10;
let kExprTableFill = 0x11;
// Atomic opcodes.
let kExprAtomicNotify = 0x00;
let kExprI32AtomicWait = 0x01;
let kExprI64AtomicWait = 0x02;
let kExprI32AtomicLoad = 0x10;
let kExprI32AtomicLoad8U = 0x12;
let kExprI32AtomicLoad16U = 0x13;
let kExprI32AtomicStore = 0x17;
let kExprI32AtomicStore8U = 0x19;
let kExprI32AtomicStore16U = 0x1a;
let kExprI32AtomicAdd = 0x1e;
let kExprI32AtomicAdd8U = 0x20;
let kExprI32AtomicAdd16U = 0x21;
let kExprI32AtomicSub = 0x25;
let kExprI32AtomicSub8U = 0x27;
let kExprI32AtomicSub16U = 0x28;
let kExprI32AtomicAnd = 0x2c;
let kExprI32AtomicAnd8U = 0x2e;
let kExprI32AtomicAnd16U = 0x2f;
let kExprI32AtomicOr = 0x33;
let kExprI32AtomicOr8U = 0x35;
let kExprI32AtomicOr16U = 0x36;
let kExprI32AtomicXor = 0x3a;
let kExprI32AtomicXor8U = 0x3c;
let kExprI32AtomicXor16U = 0x3d;
let kExprI32AtomicExchange = 0x41;
let kExprI32AtomicExchange8U = 0x43;
let kExprI32AtomicExchange16U = 0x44;
let kExprI32AtomicCompareExchange = 0x48;
let kExprI32AtomicCompareExchange8U = 0x4a;
let kExprI32AtomicCompareExchange16U = 0x4b;
let kExprI64AtomicLoad = 0x11;
let kExprI64AtomicLoad8U = 0x14;
let kExprI64AtomicLoad16U = 0x15;
let kExprI64AtomicLoad32U = 0x16;
let kExprI64AtomicStore = 0x18;
let kExprI64AtomicStore8U = 0x1b;
let kExprI64AtomicStore16U = 0x1c;
let kExprI64AtomicStore32U = 0x1d;
let kExprI64AtomicAdd = 0x1f;
let kExprI64AtomicAdd8U = 0x22;
let kExprI64AtomicAdd16U = 0x23;
let kExprI64AtomicAdd32U = 0x24;
let kExprI64AtomicSub = 0x26;
let kExprI64AtomicSub8U = 0x29;
let kExprI64AtomicSub16U = 0x2a;
let kExprI64AtomicSub32U = 0x2b;
let kExprI64AtomicAnd = 0x2d;
let kExprI64AtomicAnd8U = 0x30;
let kExprI64AtomicAnd16U = 0x31;
let kExprI64AtomicAnd32U = 0x32;
let kExprI64AtomicOr = 0x34;
let kExprI64AtomicOr8U = 0x37;
let kExprI64AtomicOr16U = 0x38;
let kExprI64AtomicOr32U = 0x39;
let kExprI64AtomicXor = 0x3b;
let kExprI64AtomicXor8U = 0x3e;
let kExprI64AtomicXor16U = 0x3f;
let kExprI64AtomicXor32U = 0x40;
let kExprI64AtomicExchange = 0x42;
let kExprI64AtomicExchange8U = 0x45;
let kExprI64AtomicExchange16U = 0x46;
let kExprI64AtomicExchange32U = 0x47;
let kExprI64AtomicCompareExchange = 0x49
let kExprI64AtomicCompareExchange8U = 0x4c;
let kExprI64AtomicCompareExchange16U = 0x4d;
let kExprI64AtomicCompareExchange32U = 0x4e;
// Simd opcodes.
let kExprS128LoadMem = 0x00;
let kExprS128StoreMem = 0x01;
let kExprI32x4Splat = 0x0c;
let kExprI32x4Eq = 0x2c;
let kExprS1x4AllTrue = 0x75;
let kExprF32x4Min = 0x9e;
class Binary {
constructor() {
@ -346,7 +488,7 @@ class Binary {
}
trunc_buffer() {
return this.buffer = this.buffer.slice(0, this.length);
return new Uint8Array(this.buffer.buffer, 0, this.length);
}
reset() {
@ -372,7 +514,7 @@ class Binary {
this.buffer[this.length++] = val >> 24;
}
emit_leb(val, max_len) {
emit_leb_u(val, max_len) {
this.ensure_space(max_len);
for (let i = 0; i < max_len; ++i) {
let v = val & 0xff;
@ -387,11 +529,11 @@ class Binary {
}
emit_u32v(val) {
this.emit_leb(val, kMaxVarInt32Size);
this.emit_leb_u(val, kMaxVarInt32Size);
}
emit_u64v(val) {
this.emit_leb(val, kMaxVarInt64Size);
this.emit_leb_u(val, kMaxVarInt64Size);
}
emit_bytes(data) {
@ -443,6 +585,16 @@ class WasmFunctionBuilder {
this.name = name;
this.type_index = type_index;
this.body = [];
this.locals = [];
this.local_names = [];
}
numLocalNames() {
let num_local_names = 0;
for (let loc_name of this.local_names) {
if (loc_name !== undefined) ++num_local_names;
}
return num_local_names;
}
exportAs(name) {
@ -456,9 +608,14 @@ class WasmFunctionBuilder {
}
addBody(body) {
const bodyCopy = body.slice();
bodyCopy.push(kExprEnd);
return this.addBodyWithEnd(bodyCopy);
for (let b of body) {
if (typeof b !== 'number' || (b & (~0xFF)) !== 0 )
throw new Error('invalid body (entries must be 8 bit numbers): ' + body);
}
this.body = body.slice();
// Automatically add the end for the function block to the body.
this.body.push(kExprEnd);
return this;
}
addBodyWithEnd(body) {
@ -466,8 +623,23 @@ class WasmFunctionBuilder {
return this;
}
addLocals(locals) {
this.locals = locals;
getNumLocals() {
let total_locals = 0;
for (let l of this.locals) {
for (let type of ["i32", "i64", "f32", "f64", "s128"]) {
total_locals += l[type + "_count"] || 0;
}
}
return total_locals;
}
addLocals(locals, names) {
const old_num_locals = this.getNumLocals();
this.locals.push(locals);
if (names) {
const missing_names = old_num_locals - this.local_names.length;
this.local_names.push(...new Array(missing_names), ...names);
}
return this;
}
@ -491,21 +663,38 @@ class WasmGlobalBuilder {
}
}
class WasmTableBuilder {
constructor(module, type, initial_size, max_size) {
this.module = module;
this.type = type;
this.initial_size = initial_size;
this.has_max = max_size != undefined;
this.max_size = max_size;
}
exportAs(name) {
this.module.exports.push({name: name, kind: kExternalTable,
index: this.index});
return this;
}
}
class WasmModuleBuilder {
constructor() {
this.types = [];
this.imports = [];
this.exports = [];
this.globals = [];
this.tables = [];
this.exceptions = [];
this.functions = [];
this.table_length_min = 0;
this.table_length_max = undefined;
this.element_segments = [];
this.data_segments = [];
this.segments = [];
this.explicit = [];
this.num_imported_funcs = 0;
this.num_imported_globals = 0;
this.num_imported_tables = 0;
this.num_imported_exceptions = 0;
return this;
}
@ -514,8 +703,8 @@ class WasmModuleBuilder {
return this;
}
addMemory(min, max, exp) {
this.memory = {min: min, max: max, exp: exp};
addMemory(min, max, exp, shared) {
this.memory = {min: min, max: max, exp: exp, shared: shared};
return this;
}
@ -524,6 +713,26 @@ class WasmModuleBuilder {
return this;
}
stringToBytes(name) {
var result = new Binary();
result.emit_string(name);
return result.trunc_buffer()
}
createCustomSection(name, bytes) {
name = this.stringToBytes(name);
var section = new Binary();
section.emit_u8(kUnknownSectionCode);
section.emit_u32v(name.length + bytes.length);
section.emit_bytes(name);
section.emit_bytes(bytes);
return section.trunc_buffer();
}
addCustomSection(name, bytes) {
this.explicit.push(this.createCustomSection(name, bytes));
}
addType(type) {
this.types.push(type);
var pl = type.params.length; // should have params
@ -538,6 +747,24 @@ class WasmModuleBuilder {
return glob;
}
addTable(type, initial_size, max_size = undefined) {
if (type != kWasmAnyRef && type != kWasmAnyFunc && type != kWasmExnRef) {
throw new Error(
'Tables must be of type kWasmAnyRef, kWasmAnyFunc, or kWasmExnRef');
}
let table = new WasmTableBuilder(this, type, initial_size, max_size);
table.index = this.tables.length + this.num_imported_tables;
this.tables.push(table);
return table;
}
addException(type) {
let type_index = (typeof type) == "number" ? type : this.addType(type);
let except_index = this.exceptions.length + this.num_imported_exceptions;
this.exceptions.push(type_index);
return except_index;
}
addFunction(name, type) {
let type_index = (typeof type) == "number" ? type : this.addType(type);
let func = new WasmFunctionBuilder(this, name, type_index);
@ -547,6 +774,9 @@ class WasmModuleBuilder {
}
addImport(module, name, type) {
if (this.functions.length != 0) {
throw new Error('Imported functions must be declared before local ones');
}
let type_index = (typeof type) == "number" ? type : this.addType(type);
this.imports.push({module: module, name: name, kind: kExternalFunction,
type: type_index});
@ -554,23 +784,40 @@ class WasmModuleBuilder {
}
addImportedGlobal(module, name, type, mutable = false) {
if (this.globals.length != 0) {
throw new Error('Imported globals must be declared before local ones');
}
let o = {module: module, name: name, kind: kExternalGlobal, type: type,
mutable: mutable};
this.imports.push(o);
return this.num_imported_globals++;
}
addImportedMemory(module, name, initial = 0, maximum) {
addImportedMemory(module, name, initial = 0, maximum, shared) {
let o = {module: module, name: name, kind: kExternalMemory,
initial: initial, maximum: maximum};
initial: initial, maximum: maximum, shared: shared};
this.imports.push(o);
return this;
}
addImportedTable(module, name, initial, maximum) {
addImportedTable(module, name, initial, maximum, type) {
if (this.tables.length != 0) {
throw new Error('Imported tables must be declared before local ones');
}
let o = {module: module, name: name, kind: kExternalTable, initial: initial,
maximum: maximum};
maximum: maximum, type: type || kWasmAnyFunctionTypeForm};
this.imports.push(o);
return this.num_imported_tables++;
}
addImportedException(module, name, type) {
if (this.exceptions.length != 0) {
throw new Error('Imported exceptions must be declared before local ones');
}
let type_index = (typeof type) == "number" ? type : this.addType(type);
let o = {module: module, name: name, kind: kExternalException, type: type_index};
this.imports.push(o);
return this.num_imported_exceptions++;
}
addExport(name, index) {
@ -585,7 +832,12 @@ class WasmModuleBuilder {
addDataSegment(addr, data, is_global = false) {
this.data_segments.push(
{addr: addr, data: data, is_global: is_global});
{addr: addr, data: data, is_global: is_global, is_active: true});
return this.data_segments.length - 1;
}
addPassiveDataSegment(data) {
this.data_segments.push({data: data, is_active: false});
return this.data_segments.length - 1;
}
@ -593,18 +845,14 @@ class WasmModuleBuilder {
this.exports.push({name: name, kind: kExternalMemory, index: 0});
}
addElementSegment(base, is_global, array, is_import = false) {
this.element_segments.push({base: base, is_global: is_global,
array: array});
if (!is_global) {
var length = base + array.length;
if (length > this.table_length_min && !is_import) {
this.table_length_min = length;
}
if (length > this.table_length_max && !is_import) {
this.table_length_max = length;
}
}
addElementSegment(table, base, is_global, array) {
this.element_segments.push({table: table, base: base, is_global: is_global,
array: array, is_active: true});
return this;
}
addPassiveElementSegment(array, is_import = false) {
this.element_segments.push({array: array, is_active: false});
return this;
}
@ -613,12 +861,30 @@ class WasmModuleBuilder {
if (typeof n != 'number')
throw new Error('invalid table (entries have to be numbers): ' + array);
}
return this.addElementSegment(this.table_length_min, false, array);
if (this.tables.length == 0) {
this.addTable(kWasmAnyFunc, 0);
}
// Adjust the table to the correct size.
let table = this.tables[0];
const base = table.initial_size;
const table_size = base + array.length;
table.initial_size = table_size;
if (table.has_max && table_size > table.max_size) {
table.max_size = table_size;
}
return this.addElementSegment(0, base, false, array);
}
setTableBounds(min, max = undefined) {
this.table_length_min = min;
this.table_length_max = max;
if (this.tables.length != 0) {
throw new Error("The table bounds of table '0' have already been set.");
}
this.addTable(kWasmAnyFunc, min, max);
return this;
}
setName(name) {
this.name = name;
return this;
}
@ -664,15 +930,23 @@ class WasmModuleBuilder {
section.emit_u8(imp.mutable);
} else if (imp.kind == kExternalMemory) {
var has_max = (typeof imp.maximum) != "undefined";
section.emit_u8(has_max ? 1 : 0); // flags
var is_shared = (typeof imp.shared) != "undefined";
if (is_shared) {
section.emit_u8(has_max ? 3 : 2); // flags
} else {
section.emit_u8(has_max ? 1 : 0); // flags
}
section.emit_u32v(imp.initial); // initial
if (has_max) section.emit_u32v(imp.maximum); // maximum
} else if (imp.kind == kExternalTable) {
section.emit_u8(kWasmAnyFunctionTypeForm);
section.emit_u8(imp.type);
var has_max = (typeof imp.maximum) != "undefined";
section.emit_u8(has_max ? 1 : 0); // flags
section.emit_u32v(imp.initial); // initial
if (has_max) section.emit_u32v(imp.maximum); // maximum
} else if (imp.kind == kExternalException) {
section.emit_u32v(kExceptionAttribute);
section.emit_u32v(imp.type);
} else {
throw new Error("unknown/unsupported import kind " + imp.kind);
}
@ -681,31 +955,27 @@ class WasmModuleBuilder {
}
// Add functions declarations
let has_names = false;
let names = false;
if (wasm.functions.length > 0) {
if (debug) print("emitting function decls @ " + binary.length);
binary.emit_section(kFunctionSectionCode, section => {
section.emit_u32v(wasm.functions.length);
for (let func of wasm.functions) {
has_names = has_names || (func.name != undefined &&
func.name.length > 0);
section.emit_u32v(func.type_index);
}
});
}
// Add table section
if (wasm.table_length_min > 0) {
if (debug) print("emitting table @ " + binary.length);
if (wasm.tables.length > 0) {
if (debug) print ("emitting tables @ " + binary.length);
binary.emit_section(kTableSectionCode, section => {
section.emit_u8(1); // one table entry
section.emit_u8(kWasmAnyFunctionTypeForm);
const max = wasm.table_length_max;
const has_max = max !== undefined;
section.emit_u8(has_max ? kHasMaximumFlag : 0);
section.emit_u32v(wasm.table_length_min);
if (has_max) section.emit_u32v(max);
section.emit_u32v(wasm.tables.length);
for (let table of wasm.tables) {
section.emit_u8(table.type);
section.emit_u8(table.has_max);
section.emit_u32v(table.initial_size);
if (table.has_max) section.emit_u32v(table.max_size);
}
});
}
@ -715,7 +985,13 @@ class WasmModuleBuilder {
binary.emit_section(kMemorySectionCode, section => {
section.emit_u8(1); // one memory entry
const has_max = wasm.memory.max !== undefined;
section.emit_u8(has_max ? 1 : 0);
const is_shared = wasm.memory.shared !== undefined;
// Emit flags (bit 0: reszeable max, bit 1: shared memory)
if (is_shared) {
section.emit_u8(has_max ? kSharedHasMaximumFlag : 2);
} else {
section.emit_u8(has_max ? kHasMaximumFlag : 0);
}
section.emit_u32v(wasm.memory.min);
if (has_max) section.emit_u32v(wasm.memory.max);
});
@ -738,7 +1014,7 @@ class WasmModuleBuilder {
break;
case kWasmI64:
section.emit_u8(kExprI64Const);
section.emit_u8(global.init);
section.emit_u64v(global.init);
break;
case kWasmF32:
section.emit_u8(kExprF32Const);
@ -750,10 +1026,22 @@ class WasmModuleBuilder {
f64_view[0] = global.init;
section.emit_bytes(f64_bytes_view);
break;
case kWasmAnyFunc:
case kWasmAnyRef:
if (global.function_index !== undefined) {
section.emit_u8(kExprRefFunc);
section.emit_u32v(global.function_index);
} else {
section.emit_u8(kExprRefNull);
}
break;
case kWasmExnRef:
section.emit_u8(kExprRefNull);
break;
}
} else {
// Emit a global-index initializer.
section.emit_u8(kExprGetGlobal);
section.emit_u8(kExprGlobalGet);
section.emit_u32v(global.init_index);
}
section.emit_u8(kExprEnd); // end of init expression
@ -761,6 +1049,18 @@ class WasmModuleBuilder {
});
}
// Add exceptions.
if (wasm.exceptions.length > 0) {
if (debug) print("emitting exceptions @ " + binary.length);
binary.emit_section(kExceptionSectionCode, section => {
section.emit_u32v(wasm.exceptions.length);
for (let type of wasm.exceptions) {
section.emit_u32v(kExceptionAttribute);
section.emit_u32v(type);
}
});
}
// Add export table.
var mem_export = (wasm.memory !== undefined && wasm.memory.exp);
var exports_count = wasm.exports.length + (mem_export ? 1 : 0);
@ -797,22 +1097,55 @@ class WasmModuleBuilder {
section.emit_u32v(inits.length);
for (let init of inits) {
section.emit_u8(0); // table index / flags
if (init.is_global) {
section.emit_u8(kExprGetGlobal);
if (init.is_active) {
// Active segment.
if (init.table == 0) {
section.emit_u32v(kActiveNoIndex);
} else {
section.emit_u32v(kActiveWithIndex);
section.emit_u32v(init.table);
}
if (init.is_global) {
section.emit_u8(kExprGlobalGet);
} else {
section.emit_u8(kExprI32Const);
}
section.emit_u32v(init.base);
section.emit_u8(kExprEnd);
if (init.table != 0) {
section.emit_u8(kExternalFunction);
}
section.emit_u32v(init.array.length);
for (let index of init.array) {
section.emit_u32v(index);
}
} else {
section.emit_u8(kExprI32Const);
}
section.emit_u32v(init.base);
section.emit_u8(kExprEnd);
section.emit_u32v(init.array.length);
for (let index of init.array) {
section.emit_u32v(index);
// Passive segment.
section.emit_u8(kPassiveWithElements); // flags
section.emit_u8(kWasmAnyFunc);
section.emit_u32v(init.array.length);
for (let index of init.array) {
if (index === null) {
section.emit_u8(kExprRefNull);
section.emit_u8(kExprEnd);
} else {
section.emit_u8(kExprRefFunc);
section.emit_u32v(index);
section.emit_u8(kExprEnd);
}
}
}
}
});
}
// If there are any passive data segments, add the DataCount section.
if (wasm.data_segments.some(seg => !seg.is_active)) {
binary.emit_section(kDataCountSectionCode, section => {
section.emit_u32v(wasm.data_segments.length);
});
}
// Add function bodies.
if (wasm.functions.length > 0) {
// emit function bodies
@ -824,9 +1157,7 @@ class WasmModuleBuilder {
header.reset();
// Function body length will be patched later.
let local_decls = [];
let l = func.locals;
if (l != undefined) {
let local_decls_count = 0;
for (let l of func.locals || []) {
if (l.i32_count > 0) {
local_decls.push({count: l.i32_count, type: kWasmI32});
}
@ -839,6 +1170,18 @@ class WasmModuleBuilder {
if (l.f64_count > 0) {
local_decls.push({count: l.f64_count, type: kWasmF64});
}
if (l.s128_count > 0) {
local_decls.push({count: l.s128_count, type: kWasmS128});
}
if (l.anyref_count > 0) {
local_decls.push({count: l.anyref_count, type: kWasmAnyRef});
}
if (l.anyfunc_count > 0) {
local_decls.push({count: l.anyfunc_count, type: kWasmAnyFunc});
}
if (l.except_count > 0) {
local_decls.push({count: l.except_count, type: kWasmExnRef});
}
}
header.emit_u32v(local_decls.length);
@ -860,17 +1203,21 @@ class WasmModuleBuilder {
binary.emit_section(kDataSectionCode, section => {
section.emit_u32v(wasm.data_segments.length);
for (let seg of wasm.data_segments) {
section.emit_u8(0); // linear memory index 0 / flags
if (seg.is_global) {
// initializer is a global variable
section.emit_u8(kExprGetGlobal);
section.emit_u32v(seg.addr);
if (seg.is_active) {
section.emit_u8(0); // linear memory index 0 / flags
if (seg.is_global) {
// initializer is a global variable
section.emit_u8(kExprGlobalGet);
section.emit_u32v(seg.addr);
} else {
// initializer is a constant
section.emit_u8(kExprI32Const);
section.emit_u32v(seg.addr);
}
section.emit_u8(kExprEnd);
} else {
// initializer is a constant
section.emit_u8(kExprI32Const);
section.emit_u32v(seg.addr);
section.emit_u8(kPassive); // flags
}
section.emit_u8(kExprEnd);
section.emit_u32v(seg.data.length);
section.emit_bytes(seg.data);
}
@ -883,21 +1230,50 @@ class WasmModuleBuilder {
binary.emit_bytes(exp);
}
// Add function names.
if (has_names) {
if (debug) print("emitting names @ " + binary.length);
// Add names.
let num_function_names = 0;
let num_functions_with_local_names = 0;
for (let func of wasm.functions) {
if (func.name !== undefined) ++num_function_names;
if (func.numLocalNames() > 0) ++num_functions_with_local_names;
}
if (num_function_names > 0 || num_functions_with_local_names > 0 ||
wasm.name !== undefined) {
if (debug) print('emitting names @ ' + binary.length);
binary.emit_section(kUnknownSectionCode, section => {
section.emit_string("name");
var count = wasm.functions.length + wasm.num_imported_funcs;
section.emit_u32v(count);
for (var i = 0; i < wasm.num_imported_funcs; i++) {
section.emit_u8(0); // empty string
section.emit_u8(0); // local names count == 0
section.emit_string('name');
// Emit module name.
if (wasm.name !== undefined) {
section.emit_section(kModuleNameCode, name_section => {
name_section.emit_string(wasm.name);
});
}
for (let func of wasm.functions) {
var name = func.name == undefined ? "" : func.name;
section.emit_string(name);
section.emit_u8(0); // local names count == 0
// Emit function names.
if (num_function_names > 0) {
section.emit_section(kFunctionNamesCode, name_section => {
name_section.emit_u32v(num_function_names);
for (let func of wasm.functions) {
if (func.name === undefined) continue;
name_section.emit_u32v(func.index);
name_section.emit_string(func.name);
}
});
}
// Emit local names.
if (num_functions_with_local_names > 0) {
section.emit_section(kLocalNamesCode, name_section => {
name_section.emit_u32v(num_functions_with_local_names);
for (let func of wasm.functions) {
if (func.numLocalNames() == 0) continue;
name_section.emit_u32v(func.index);
name_section.emit_u32v(func.numLocalNames());
for (let i = 0; i < func.local_names.length; ++i) {
if (func.local_names[i] === undefined) continue;
name_section.emit_u32v(i);
name_section.emit_string(func.local_names[i]);
}
}
});
}
});
}
@ -925,13 +1301,24 @@ class WasmModuleBuilder {
}
}
function wasmI32Const(val) {
let bytes = [kExprI32Const];
for (let i = 0; i < 4; ++i) {
bytes.push(0x80 | ((val >> (7 * i)) & 0x7f));
function wasmSignedLeb(val, max_len = 5) {
let res = [];
for (let i = 0; i < max_len; ++i) {
let v = val & 0x7f;
// If {v} sign-extended from 7 to 32 bits is equal to val, we are done.
if (((v << 25) >> 25) == val) {
res.push(v);
return res;
}
res.push(v | 0x80);
val = val >> 7;
}
bytes.push((val >> (7 * 4)) & 0x7f);
return bytes;
throw new Error(
'Leb value <' + val + '> exceeds maximum length of ' + max_len);
}
function wasmI32Const(val) {
return [kExprI32Const, ...wasmSignedLeb(val, 5)];
}
function wasmF32Const(f) {