Update web-platform-tests to revision bf71b1f245ce34e447b7bde8ed46694574a63da7

This commit is contained in:
WPT Sync Bot 2019-01-19 20:34:46 -05:00
parent 7256d123ff
commit e17a773b4e
35 changed files with 1567 additions and 467 deletions

View file

@ -37,6 +37,3 @@
[Revoke blob URL after creating Request, will fetch] [Revoke blob URL after creating Request, will fetch]
expected: FAIL expected: FAIL
[Revoke blob URL after calling fetch, fetch should succeed]
expected: FAIL

View file

@ -303979,26 +303979,6 @@
{} {}
] ]
], ],
"portals/resources/portal-forward-with-broadcast.sub.html": [
[
{}
]
],
"portals/resources/portal-host-cross-origin-navigate.sub.html": [
[
{}
]
],
"portals/resources/portal-host-cross-origin.sub.html": [
[
{}
]
],
"portals/resources/portal-host.html": [
[
{}
]
],
"portals/resources/portals-rendering-portal.html": [ "portals/resources/portals-rendering-portal.html": [
[ [
{} {}
@ -344697,6 +344677,12 @@
{} {}
] ]
], ],
"css/css-logical/logical-box-border-radius.html": [
[
"/css/css-logical/logical-box-border-radius.html",
{}
]
],
"css/css-logical/logical-box-border-shorthands.html": [ "css/css-logical/logical-box-border-shorthands.html": [
[ [
"/css/css-logical/logical-box-border-shorthands.html", "/css/css-logical/logical-box-border-shorthands.html",
@ -345957,6 +345943,12 @@
{} {}
] ]
], ],
"css/css-scroll-anchoring/heuristic-with-offset-update.html": [
[
"/css/css-scroll-anchoring/heuristic-with-offset-update.html",
{}
]
],
"css/css-scroll-anchoring/inheritance.html": [ "css/css-scroll-anchoring/inheritance.html": [
[ [
"/css/css-scroll-anchoring/inheritance.html", "/css/css-scroll-anchoring/inheritance.html",
@ -382811,12 +382803,36 @@
{} {}
] ]
], ],
"kv-storage/cause-errors-via-idb.https.html": [
[
"/kv-storage/cause-errors-via-idb.https.html",
{}
]
],
"kv-storage/entries.https.html": [
[
"/kv-storage/entries.https.html",
{}
]
],
"kv-storage/key-types.https.html": [ "kv-storage/key-types.https.html": [
[ [
"/kv-storage/key-types.https.html", "/kv-storage/key-types.https.html",
{} {}
] ]
], ],
"kv-storage/keys-values-entries.https.html": [
[
"/kv-storage/keys-values-entries.https.html",
{}
]
],
"kv-storage/keys.https.html": [
[
"/kv-storage/keys.https.html",
{}
]
],
"kv-storage/non-secure-context-dynamic-import.html": [ "kv-storage/non-secure-context-dynamic-import.html": [
[ [
"/kv-storage/non-secure-context-dynamic-import.html", "/kv-storage/non-secure-context-dynamic-import.html",
@ -382847,6 +382863,12 @@
{} {}
] ]
], ],
"kv-storage/values.https.html": [
[
"/kv-storage/values.https.html",
{}
]
],
"lifecycle/freeze.html": [ "lifecycle/freeze.html": [
[ [
"/lifecycle/freeze.html", "/lifecycle/freeze.html",
@ -397669,12 +397691,6 @@
{} {}
] ]
], ],
"portals/portals-host-exposure.sub.html": [
[
"/portals/portals-host-exposure.sub.html",
{}
]
],
"portals/portals-host-null.html": [ "portals/portals-host-null.html": [
[ [
"/portals/portals-host-null.html", "/portals/portals-host-null.html",
@ -457209,7 +457225,7 @@
"manual" "manual"
], ],
"clipboard-apis/async-write-image-read-image-manual.https.html": [ "clipboard-apis/async-write-image-read-image-manual.https.html": [
"ac7fb0863e75a1a33451033db054d2bf812d8450", "6117e469792ff61ff30015f2d94f1ceb2e3332ac",
"manual" "manual"
], ],
"clipboard-apis/async-write-text-read-dttext-manual.https.html": [ "clipboard-apis/async-write-text-read-dttext-manual.https.html": [
@ -563332,6 +563348,10 @@
"b33528d9cd16b6de169cbd03e98b867403f090a6", "b33528d9cd16b6de169cbd03e98b867403f090a6",
"testharness" "testharness"
], ],
"css/css-logical/logical-box-border-radius.html": [
"81b8fa0fece70e6f20f3b51ff9011e4775305a33",
"testharness"
],
"css/css-logical/logical-box-border-shorthands.html": [ "css/css-logical/logical-box-border-shorthands.html": [
"d05d864f59261bd1dd0ff2cbd9278a8535a5910e", "d05d864f59261bd1dd0ff2cbd9278a8535a5910e",
"testharness" "testharness"
@ -563445,7 +563465,7 @@
"support" "support"
], ],
"css/css-logical/resources/test-box-properties.js": [ "css/css-logical/resources/test-box-properties.js": [
"1f17ff296ff2c3dcf81db1a112bda24ef04eb126", "ef1854f97de4c93c3156540ed81101fcc9993578",
"support" "support"
], ],
"css/css-logical/resources/test-logical-values.js": [ "css/css-logical/resources/test-logical-values.js": [
@ -570236,6 +570256,10 @@
"cea6b61dfe8b60754f656c860fe4d6f2dfff0c18", "cea6b61dfe8b60754f656c860fe4d6f2dfff0c18",
"testharness" "testharness"
], ],
"css/css-scroll-anchoring/heuristic-with-offset-update.html": [
"7fcbd983ed569025e5f46fe69002ca91e86fd21a",
"testharness"
],
"css/css-scroll-anchoring/inheritance.html": [ "css/css-scroll-anchoring/inheritance.html": [
"035d4ffd2e2c8955d4e8f80af3aff5db9285c8ae", "035d4ffd2e2c8955d4e8f80af3aff5db9285c8ae",
"testharness" "testharness"
@ -615229,7 +615253,7 @@
"support" "support"
], ],
"feature-policy/resources/featurepolicy.js": [ "feature-policy/resources/featurepolicy.js": [
"a0756e385de6534821d3c15048e922384e4610ae", "e2577f35c3fb53abda3934274c3e6a281ba617d9",
"support" "support"
], ],
"feature-policy/resources/picture-in-picture.js": [ "feature-policy/resources/picture-in-picture.js": [
@ -638269,25 +638293,41 @@
"support" "support"
], ],
"kv-storage/api-surface.https.html": [ "kv-storage/api-surface.https.html": [
"65452f55be044aa5ec722da26717f527ee81a4e3", "90e705862d599f2920ebdf5fa07cc3e4ba1f6d46",
"testharness"
],
"kv-storage/cause-errors-via-idb.https.html": [
"21fe36b36cb1dbedca5383abb526b9b4f6c8ce3e",
"testharness"
],
"kv-storage/entries.https.html": [
"0d1ab849a709bc8361bca88de34aa91c1ee3e23b",
"testharness" "testharness"
], ],
"kv-storage/helpers/class-assert.js": [ "kv-storage/helpers/class-assert.js": [
"31b25cab9f2d88d8df59a0b4ecb35eef3765e380", "89f0889c56d3a990a812be9208377090607335a2",
"support" "support"
], ],
"kv-storage/helpers/equality-asserters.js": [ "kv-storage/helpers/equality-asserters.js": [
"ad4623c179d75c8d4ce8b1fa8503f943bf6a7c77", "448ab31348cee50be8820185d8bdfb8f626eb9dc",
"support" "support"
], ],
"kv-storage/helpers/kvs-tests.js": [ "kv-storage/helpers/kvs-tests.js": [
"0ffe71fad780f599a11d915d3b3512c95844f7bd", "a6c4d58dfa5e928768d483df11c7d06180bac9fb",
"support" "support"
], ],
"kv-storage/key-types.https.html": [ "kv-storage/key-types.https.html": [
"0dc930258f8b554c6cae4398df3dba930dcdf03c", "0dc930258f8b554c6cae4398df3dba930dcdf03c",
"testharness" "testharness"
], ],
"kv-storage/keys-values-entries.https.html": [
"b26323809bb3d33551ee9630bcf841fa0246262b",
"testharness"
],
"kv-storage/keys.https.html": [
"a6be29725bf3274f6b5bb92b370962507a29b692",
"testharness"
],
"kv-storage/non-secure-context-dynamic-import.html": [ "kv-storage/non-secure-context-dynamic-import.html": [
"6ccbf84ba1dc6acd4931da279c887635b7f8a771", "6ccbf84ba1dc6acd4931da279c887635b7f8a771",
"testharness" "testharness"
@ -638305,7 +638345,11 @@
"testharness" "testharness"
], ],
"kv-storage/undefined-value.https.html": [ "kv-storage/undefined-value.https.html": [
"89da5d5c44f353bc1f5f93eaeaf3acd89eee386c", "4cb483a3d982ab150fb28c6aee3a1398c273e82c",
"testharness"
],
"kv-storage/values.https.html": [
"64756bf195fc3319d9dd21abad4c5d86fa266cfe",
"testharness" "testharness"
], ],
"lifecycle/META.yml": [ "lifecycle/META.yml": [
@ -639809,7 +639853,7 @@
"testharness" "testharness"
], ],
"mediacapture-streams/MediaStream-default-feature-policy.https.html": [ "mediacapture-streams/MediaStream-default-feature-policy.https.html": [
"21e3f5b9af8567cb015604bbcb021cc04216e4c2", "2e38b9e6864d4525d8a649a7093b9f82a30a10cd",
"testharness" "testharness"
], ],
"mediacapture-streams/MediaStream-finished-add.https.html": [ "mediacapture-streams/MediaStream-finished-add.https.html": [
@ -640429,7 +640473,7 @@
"support" "support"
], ],
"mixed-content/generic/tools/generate.py": [ "mixed-content/generic/tools/generate.py": [
"e7a315d59ceab0d56d19d409c6f82ba84592bc37", "1e0a404a709add4a31404f874ca4fc035c9c5702",
"support" "support"
], ],
"mixed-content/generic/tools/regenerate": [ "mixed-content/generic/tools/regenerate": [
@ -640437,7 +640481,7 @@
"support" "support"
], ],
"mixed-content/generic/tools/spec_validator.py": [ "mixed-content/generic/tools/spec_validator.py": [
"0ae2990f4e1b3c89fbc142d0428272bbd7d462d1", "686579ece5797abfdb5441e080db85758fe1b220",
"support" "support"
], ],
"mixed-content/generic/worker.js": [ "mixed-content/generic/worker.js": [
@ -650876,10 +650920,6 @@
"ac1505d2a5b2fe1df083eae75893483e025a2ad7", "ac1505d2a5b2fe1df083eae75893483e025a2ad7",
"testharness" "testharness"
], ],
"portals/portals-host-exposure.sub.html": [
"83e31bd4735131d35b2a03ae82d07be364497689",
"testharness"
],
"portals/portals-host-null.html": [ "portals/portals-host-null.html": [
"e0f1d63743c54c687d62f86abe278873fa823430", "e0f1d63743c54c687d62f86abe278873fa823430",
"testharness" "testharness"
@ -650904,22 +650944,6 @@
"cf09caebc0ff9ac38facde84075a7af5be19fd48", "cf09caebc0ff9ac38facde84075a7af5be19fd48",
"support" "support"
], ],
"portals/resources/portal-forward-with-broadcast.sub.html": [
"39bda69b0eef9b0062809507bfb91d9fc3401d95",
"support"
],
"portals/resources/portal-host-cross-origin-navigate.sub.html": [
"44c6c16c5771f1027c3cc82e966342bbaa80ad8d",
"support"
],
"portals/resources/portal-host-cross-origin.sub.html": [
"aa369d39f0bd674a5cb1a9ad8954e3106a807687",
"support"
],
"portals/resources/portal-host.html": [
"5043a158ea74ef173f166c0580f9c1a27242bd14",
"support"
],
"portals/resources/portals-rendering-portal.html": [ "portals/resources/portals-rendering-portal.html": [
"1b6f23f512da5bb7d1c7b5b85e48277470d2e146", "1b6f23f512da5bb7d1c7b5b85e48277470d2e146",
"support" "support"
@ -651857,11 +651881,11 @@
"support" "support"
], ],
"referrer-policy/generic/tools/common_paths.py": [ "referrer-policy/generic/tools/common_paths.py": [
"9d73401387fe8eea101f76c241b08d2b5e9adca8", "1066fb5bb238ed5dfd57bc5ba0e98af624bfd9b4",
"support" "support"
], ],
"referrer-policy/generic/tools/generate.py": [ "referrer-policy/generic/tools/generate.py": [
"84aa9feab424d062a05f9a00b914760a1f788dbf", "46314d445c15897147285a1ce9a826505014f9f5",
"support" "support"
], ],
"referrer-policy/generic/tools/regenerate": [ "referrer-policy/generic/tools/regenerate": [
@ -651869,7 +651893,7 @@
"support" "support"
], ],
"referrer-policy/generic/tools/spec_validator.py": [ "referrer-policy/generic/tools/spec_validator.py": [
"b59532060a3ef3c2f9c5f6b8b5490e5f8e968be7", "b1749d2c39911eb90b96479042c4dc2a4ba1360e",
"support" "support"
], ],
"referrer-policy/generic/unsupported-csp-referrer-directive.html": [ "referrer-policy/generic/unsupported-csp-referrer-directive.html": [
@ -682789,11 +682813,11 @@
"testharness" "testharness"
], ],
"webrtc-quic/RTCQuicTransport-helper.js": [ "webrtc-quic/RTCQuicTransport-helper.js": [
"7e28feae0937d4a28710be5f0e807c4af0f7c039", "b8d9eaed5aad3a18a583200bc958fc375f2b24da",
"support" "support"
], ],
"webrtc-quic/RTCQuicTransport.https.html": [ "webrtc-quic/RTCQuicTransport.https.html": [
"081f0b4d976e54301953621a9ef43d3f2c57aa8e", "c64ed6af093c690ece59a5c68e949c4ae4f5e6af",
"testharness" "testharness"
], ],
"webrtc-stats/META.yml": [ "webrtc-stats/META.yml": [

View file

@ -3,6 +3,3 @@
[scroll-behavior: smooth on DIV element] [scroll-behavior: smooth on DIV element]
expected: FAIL expected: FAIL
[Instant scrolling while doing history navigation.]
expected: FAIL

View file

@ -32,7 +32,7 @@
[single-byte-decoder.html?XMLHttpRequest] [single-byte-decoder.html?XMLHttpRequest]
expected: TIMEOUT expected: CRASH
[ISO-8859-2: iso_8859-2:1987 (XMLHttpRequest)] [ISO-8859-2: iso_8859-2:1987 (XMLHttpRequest)]
expected: FAIL expected: FAIL

View file

@ -0,0 +1,4 @@
[traverse_the_history_5.html]
[Multiple history traversals, last would be aborted]
expected: FAIL

View file

@ -12,6 +12,3 @@
[Verifies the resolution of entry.startTime is at least 5 microseconds.] [Verifies the resolution of entry.startTime is at least 5 microseconds.]
expected: TIMEOUT expected: TIMEOUT
[Verifies the resolution of performance.now() is at least 5 microseconds.]
expected: FAIL

View file

@ -1,4 +1,5 @@
[005.html] [005.html]
expected: ERROR
[dedicated worker in shared worker in dedicated worker] [dedicated worker in shared worker in dedicated worker]
expected: FAIL expected: FAIL

View file

@ -6,17 +6,20 @@
<script src="/resources/testharness.js"></script> <script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script> <script src="/resources/testharnessreport.js"></script>
<div>Original Image:</div> <p>
<image id='image-to-copy' src="resources/greenbox.png"></image> <p>The bottom image should display the same image as the top image.</p>
<div>Image after copy/paste:</div> <p>Original Image:</p>
<image id='image-on-clipboard'></image> <image id='image-to-copy' width='20' height='20'
src="resources/greenbox.png"></image>
<canvas id="canvas" width="30" height="30"></canvas> <p>Image after copy/paste:</p>
<image id='image-on-clipboard'></image>
<canvas id='canvas' width='20' height='20'></canvas>
</p>
<script> <script>
// Must compare a bitmap as opposed to simply blob data, because an encoded // Must compare a bitmap as opposed to simply blob data, because an encoded
// image may have different contents depending on encoder. // image may have different contents depending on encoder.
const getBitmapString = async blob => { async function getBitmapString(blob) {
const imageBitmap = await createImageBitmap(blob); const imageBitmap = await createImageBitmap(blob);
const canvas = document.getElementById('canvas'); const canvas = document.getElementById('canvas');
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
@ -28,14 +31,13 @@ const getBitmapString = async blob => {
return imageData.data.toString(); return imageData.data.toString();
}; };
const loadBlob = async fileName => { async function loadBlob(fileName) {
const fetched = await fetch(fileName); const fetched = await fetch(fileName);
return await fetched.blob(); return await fetched.blob();
} }
promise_test(async t => { promise_test(async t => {
const input = await loadBlob( const input = await loadBlob('resources/greenbox.png');
'http://localhost:8001/clipboard-apis/resources/greenbox.png');
await navigator.clipboard.writeImageExperimental(input); await navigator.clipboard.writeImageExperimental(input);
const output = await navigator.clipboard.readImageExperimental(); const output = await navigator.clipboard.readImageExperimental();
@ -49,9 +51,8 @@ promise_test(async t => {
assert_equals(comparableOutput, comparableInput); assert_equals(comparableOutput, comparableInput);
}, "Verify write and read clipboard (DOMString)"); }, "Verify write and read clipboard (DOMString)");
</script> </script>
<br/><br/> <p>
Note: This is a manual test because it writes/reads to the shared system Note: This is a manual test because it writes/reads to the shared system
clipboard and thus cannot be run async with other tests that might interact clipboard and thus cannot be run async with other tests that might interact
with the clipboard. with the clipboard.
<br/><br/> </p>
The bottom image should display the same image as the top image.

View file

@ -0,0 +1,18 @@
<!DOCTYPE html>
<meta charset="utf-8" />
<title>CSS Logical Properties: flow-relative border-radius</title>
<link rel="author" title="Mats Palmgren" href="mailto:mats@mozilla.com" />
<link rel="help" href="https://drafts.csswg.org/css-logical-1/#border-radius-properties">
<meta name="assert" content="This test checks the interaction of the flow-relative border-*-radius properties with the physical ones in different writing modes." />
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id="log"></div>
<script type="module">
import {runTests, createCornerPropertyGroup} from "./resources/test-box-properties.js";
runTests(createCornerPropertyGroup("border-*-radius", {
type: "length",
prerequisites: {"border-style": "solid"},
}));
</script>

View file

@ -66,6 +66,41 @@ export function createBoxPropertyGroup(property, descriptor) {
return {logical, physical, shorthands, type, prerequisites, property}; return {logical, physical, shorthands, type, prerequisites, property};
} }
/**
* Creates a group physical and logical box-corner properties.
*
* @param {string} property
* A string representing the property names, like "border-*-radius".
* @param {Object} descriptor
* @param {string|string[]} descriptor.type
* Describes the kind of values accepted by the property, like "length".
* Must be a key or a collection of keys from the `testValues` object.
* @param {Object={}} descriptor.prerequisites
* Represents property declarations that are needed by `property` to work.
* For example, border-width properties require a border style.
*/
export function createCornerPropertyGroup(property, descriptor) {
const logical = {};
const physical = {};
const shorthands = {};
for (const logicalCorner of ["start-start", "start-end", "end-start", "end-end"]) {
const prop = property.replace("*", logicalCorner);
const [block_side, inline_side] = logicalCorner.split("-");
const b = "block" + block_side.charAt(0).toUpperCase() + block_side.slice(1);
const i = "inline" + inline_side.charAt(0).toUpperCase() + inline_side.slice(1);
const index = b + "-" + i; // e.g. "blockStart-inlineEnd"
logical[index] = prop;
}
let prerequisites = "";
for (const physicalCorner of ["top-left", "top-right", "bottom-left", "bottom-right"]) {
const prop = property.replace("*", physicalCorner);
physical[physicalCorner] = prop;
prerequisites += makeDeclaration(descriptor.prerequisites, physicalCorner);
}
const type = [].concat(descriptor.type);
return {logical, physical, shorthands, type, prerequisites, property};
}
/** /**
* Creates a group of physical and logical sizing properties. * Creates a group of physical and logical sizing properties.
* *
@ -101,6 +136,7 @@ export function runTests(group) {
const logicals = Object.values(group.logical); const logicals = Object.values(group.logical);
const physicals = Object.values(group.physical); const physicals = Object.values(group.physical);
const shorthands = group.shorthands ? Object.entries(group.shorthands) : null; const shorthands = group.shorthands ? Object.entries(group.shorthands) : null;
const is_corner = group.property == "border-*-radius";
test(function() { test(function() {
const expected = []; const expected = [];
@ -141,7 +177,22 @@ export function runTests(group) {
const associated = {}; const associated = {};
for (const [logicalSide, logicalProp] of Object.entries(group.logical)) { for (const [logicalSide, logicalProp] of Object.entries(group.logical)) {
const physicalProp = group.physical[writingMode[logicalSide]]; let physicalProp;
if (is_corner) {
const [ block_side, inline_side] = logicalSide.split("-");
const physicalSide1 = writingMode[block_side];
const physicalSide2 = writingMode[inline_side];
let physicalCorner;
// mirror "left-top" to "top-left" etc
if (["top", "bottom"].includes(physicalSide1)) {
physicalCorner = physicalSide1 + "-" + physicalSide2;
} else {
physicalCorner = physicalSide2 + "-" + physicalSide1;
}
physicalProp = group.physical[physicalCorner];
} else {
physicalProp = group.physical[writingMode[logicalSide]];
}
associated[logicalProp] = physicalProp; associated[logicalProp] = physicalProp;
associated[physicalProp] = logicalProp; associated[physicalProp] = logicalProp;
} }

View file

@ -0,0 +1,58 @@
<!DOCTYPE html>
<meta charset="utf-8">
<link rel="help" href="https://drafts.csswg.org/css-scroll-anchoring-1/">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<html>
<head>
<style type="text/css">
#scroller {
overflow: scroll;
height: 500px;
height: 500px;
}
#before {
height: 200px;
}
#anchor {
position: relative;
width: 200px;
height: 200px;
margin-bottom: 500px;
background-color: blue;
/*
* To trigger the Gecko bug that's being regression-tested here, we
* need 'top' to start out at a non-'auto' value, so that the
* dynamic change can trigger Gecko's "RecomputePosition" fast path
*/
top: 0px;
}
</style>
</head>
<body>
<div id="scroller">
<div id="before">
</div>
<div id="anchor">
</div>
</div>
<script type="text/javascript">
test(() => {
let scroller = document.querySelector('#scroller');
let before = document.querySelector('#before');
let anchor = document.querySelector('#anchor');
// Scroll down to select #anchor as a scroll anchor
scroller.scrollTop = 200;
// Adjust the 'top' of #anchor, which should trigger a suppression
anchor.style.top = '10px';
// Expand #before and make sure we don't apply an adjustment
before.style.height = '300px';
assert_equals(scroller.scrollTop, 200);
}, 'Positioned ancestors with dynamic changes to offsets trigger scroll suppressions.');
</script>
</body>
</html>

View file

@ -83,7 +83,7 @@ function test_feature_availability_with_post_message_result(
// tests the feature availability and posts the result back to the parent. // tests the feature availability and posts the result back to the parent.
// Otherwise, does nothing. // Otherwise, does nothing.
function test_feature_in_iframe(feature_name, feature_promise_factory) { function test_feature_in_iframe(feature_name, feature_promise_factory) {
if (location.hash.includes(feature_name)) { if (location.hash.endsWith(`#${feature_name}`)) {
feature_promise_factory().then( feature_promise_factory().then(
() => window.parent.postMessage('#OK', '*'), () => window.parent.postMessage('#OK', '*'),
(e) => window.parent.postMessage('#' + e.name, '*')); (e) => window.parent.postMessage('#' + e.name, '*'));

View file

@ -23,7 +23,9 @@ test(() => {
classAssert.propertyKeys(StorageArea.prototype, [ classAssert.propertyKeys(StorageArea.prototype, [
"constructor", "set", "get", "delete", "clear", "constructor", "set", "get", "delete", "clear",
"keys", "values", "entries", "backingStore" "keys", "values", "entries", "backingStore"
], []); ], [
Symbol.asyncIterator
]);
classAssert.methods(StorageArea.prototype, { classAssert.methods(StorageArea.prototype, {
set: 2, set: 2,
@ -40,6 +42,10 @@ test(() => {
}); });
}, "StorageArea.prototype methods and properties"); }, "StorageArea.prototype methods and properties");
test(() => {
assert_equals(StorageArea.prototype[Symbol.asyncIterator], StorageArea.prototype.entries);
}, "[Symbol.asyncIterator]() and entries() must be the same function");
testWithArea(async area => { testWithArea(async area => {
classAssert.propertyKeys(area, [], []); classAssert.propertyKeys(area, [], []);
}, "Instances don't have any properties"); }, "Instances don't have any properties");

View file

@ -0,0 +1,53 @@
<!DOCTYPE html>
<meta charset="utf-8">
<title>KV Storage: causing errors by directly manipulating the IDB</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/IndexedDB/support-promises.js"></script>
<script type="module">
import { testWithArea } from "./helpers/kvs-tests.js";
const mustFail = {
"set()": area => area.set(1, "value 1"),
"get()": area => area.get(1),
"delete()": area => area.delete(1),
"keys()": area => {
const iter = area.keys();
return iter.next();
},
"values()": area => {
const iter = area.values();
return iter.next();
},
"entries()": area => {
const iter = area.entries();
return iter.next();
}
};
for (const [method, testFn] of Object.entries(mustFail)) {
testWithArea(async (area, t) => {
const { database, store, version } = area.backingStore;
const db = await migrateNamedDatabase(t, database, version + 1, () => {});
const result = testFn(area);
await promise_rejects(t, "VersionError", result);
}, `${method}: upgrading the database must cause a "VersionError" DOMException`);
testWithArea(async (area, t) => {
const { database, store } = area.backingStore;
// Set up a new database with that name, but with no object stores!
// NB: this depends on the fact that createNameDatabase sets the initial version to 1, which is
// the same as the database version used/expected by KV Storage.
const db = await createNamedDatabase(t, database, () => {});
const result = testFn(area);
await promise_rejects(t, "NotFoundError", result);
}, `${method}: creating a same-named database with no object store must cause a "NotFoundError" DOMException`);
}
</script>

View file

@ -0,0 +1,290 @@
<!DOCTYPE html>
<meta charset="utf-8">
<title>KV Storage: entries() trickier tests</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script type="module">
import { testWithArea } from "./helpers/kvs-tests.js";
import * as classAssert from "./helpers/class-assert.js";
import {
assertAsyncIteratorEquals,
assertAsyncIteratorCustomEquals,
assertArrayCustomEquals,
assertEqualPostKeyRoundtripping
} from "./helpers/equality-asserters.js";
function assertEqualsArrayOrUndefined(actual, expected, label) {
if (expected === undefined) {
return assert_equals(actual, expected, label);
}
return assert_array_equals(actual, expected, label);
}
testWithArea(async area => {
await area.set(1, "value 1");
await area.set(2, "value 2");
await area.set(3, "value 3");
await assertAsyncIteratorCustomEquals(
area.entries(),
[[1, "value 1"], [2, "value 2"], [3, "value 3"]],
assert_array_equals
);
}, "Using for-await-of to collect the results works");
testWithArea(async area => {
// We're not testing every key type since this isn't a test of IndexedDB.
await area.set(1, "value 1");
await area.set(new Date(500), "value date 500");
await area.set(-1, "value -1");
await area.set(new Date(-20), "value date -20");
await area.set("aaa", "value aaa");
await area.set("a", "value a");
await area.set(-Infinity, "value -Infinity");
await assertAsyncIteratorCustomEquals(
area.entries(),
[
[-Infinity, "value -Infinity"],
[-1, "value -1"],
[1, "value 1"],
[new Date(-20), "value date -20"],
[new Date(500), "value date 500"],
["a", "value a"],
["aaa", "value aaa"]
],
(actual, expected, label) => {
return assertArrayCustomEquals(actual, expected, assertEqualPostKeyRoundtripping, label);
}
);
}, "Results are returned in IndexedDB order");
testWithArea(async area => {
await area.set(1, "value 1");
await area.set(2, "value 2");
await area.set(3, "value 3");
const iter = area.entries();
const iterResults = [
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next()
];
classAssert.iterResultsCustom(
iterResults,
[
[[1, "value 1"], false],
[[2, "value 2"], false],
[[3, "value 3"], false],
[undefined, true],
[undefined, true],
[undefined, true]
],
assertEqualsArrayOrUndefined
);
}, "Manual testing of .next() calls, with awaiting");
testWithArea(async area => {
area.set(1, "value 1");
area.set(2, "value 2");
area.set(3, "value 3");
const iter = area.entries();
const promises = [
iter.next(),
iter.next(),
iter.next(),
iter.next(),
iter.next(),
iter.next()
];
const iterResults = await Promise.all(promises);
classAssert.iterResultsCustom(
iterResults,
[
[[1, "value 1"], false],
[[2, "value 2"], false],
[[3, "value 3"], false],
[undefined, true],
[undefined, true],
[undefined, true]
],
assertEqualsArrayOrUndefined
);
}, "Manual testing of .next() calls, no awaiting");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.set(15, "value 15");
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [30, "value 30"], [40, "value 40"]],
assert_array_equals
);
}, "Inserting an entry before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.set(25, "value 25");
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [25, "value 25"], [30, "value 30"], [40, "value 40"]],
assert_array_equals
);
}, "Inserting an entry after the current entry must show up in iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.delete(10);
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [30, "value 30"], [40, "value 40"]],
assert_array_equals
);
}, "Deleting an entry before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.delete(20);
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [30, "value 30"], [40, "value 40"]],
assert_array_equals
);
}, "Deleting the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.delete(30);
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [40, "value 40"]],
assert_array_equals
);
}, "Deleting an entry after the current entry must show up in iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.set(10, "value 10, but changed!!");
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [30, "value 30"], [40, "value 40"]],
assert_array_equals
);
}, "Modifying a value before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.set(20, "value 20, but changed!!");
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [30, "value 30"], [40, "value 40"]],
assert_array_equals
);
}, "Modifying a value at the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const entry of area.entries()) {
seen.push(entry);
if (entry[0] === 20) {
await area.set(30, "value 30, but changed!!");
}
}
assertArrayCustomEquals(
seen,
[[10, "value 10"], [20, "value 20"], [30, "value 30, but changed!!"], [40, "value 40"]],
assert_array_equals
);
}, "Modifying a value after the current entry must show up in iteration");
</script>

View file

@ -38,6 +38,38 @@ export function propertyKeys(o, expectedNames, expectedSymbols, label) {
`${label}property symbols`); `${label}property symbols`);
} }
export function iterResultCustom(o, expectedValue, expectedDone, valueAsserter, label) {
label = formatLabel(label);
assert_equals(typeof expectedDone, "boolean",
`${label} iterResult assert usage check: expectedDone must be a boolean`);
propertyKeys(o, ["value", "done"], [], label);
assert_equals(Object.getPrototypeOf(o), Object.prototype, `${label}prototype must be Object.prototype`);
valueAsserter(o.value, expectedValue, `${label}value`);
assert_equals(o.done, expectedDone, `${label}done`);
}
export function iterResult(o, expectedValue, expectedDone, label) {
return iterResultCustom(o, expectedValue, expectedDone, assert_equals, label);
}
export function iterResultsCustom(actualArray, expectedArrayOfArrays, valueAsserter, label) {
label = formatLabel(label);
assert_equals(actualArray.length, expectedArrayOfArrays.length,
`${label} iterResults assert usage check: actual and expected must have the same length`);
for (let i = 0; i < actualArray.length; ++i) {
const [expectedValue, expectedDone] = expectedArrayOfArrays[i];
iterResultCustom(actualArray[i], expectedValue, expectedDone, valueAsserter, `${label}iter result ${i}`);
}
}
export function iterResults(actualArray, expectedArrayOfArrays, label) {
return iterResultsCustom(actualArray, expectedArrayOfArrays, assert_equals, label);
}
export function methods(o, expectedMethods) { export function methods(o, expectedMethods) {
for (const [name, length] of Object.entries(expectedMethods)) { for (const [name, length] of Object.entries(expectedMethods)) {
method(o, name, length); method(o, name, length);
@ -103,5 +135,5 @@ function isConstructorTest(o) {
} }
function formatLabel(label) { function formatLabel(label) {
return label !== undefined ? ` ${label}` : ""; return label !== undefined ? `${label} ` : "";
} }

View file

@ -1,37 +1,91 @@
export function assertEqualDates(actual, expected, label) { export function assertEqualDates(actual, expected, label) {
assert_equals(expected.constructor, Date, label = formatLabel(label);
"assertEqualDates usage check: expected must be a Date");
const labelPart = label === undefined ? "" : `${label}: `; assert_equals(expected.constructor, Date,
assert_equals(actual.constructor, Date, `${labelPart}must be a Date`); `${label}assertEqualDates usage check: expected must be a Date`);
assert_equals(actual.valueOf(), expected.valueOf(), `${labelPart}timestamps must match`);
assert_equals(actual.constructor, Date, `${label}must be a Date`);
assert_equals(actual.valueOf(), expected.valueOf(), `${label}timestamps must match`);
}
export function assertEqualPostKeyRoundtripping(actual, expected, label) {
label = formatLabel(label);
// Please extend this to support other types as needed!
assert_true(
typeof expected === "number" || typeof expected === "string" || expected.constructor === Date,
`${label}assertEqualPostKeyRoundtripping usage check: currently only supports numbers, strings, and dates`
);
if (expected.constructor === Date) {
assert_equals(actual.constructor, Date, `${label}comparing to Date(${Number(expected)}) (actual = ${actual})`);
actual = Number(actual);
expected = Number(expected);
}
assert_equals(actual, expected, label);
} }
export function assertEqualArrayBuffers(actual, expected, label) { export function assertEqualArrayBuffers(actual, expected, label) {
assert_equals(expected.constructor, ArrayBuffer, label = formatLabel(label);
"assertEqualArrayBuffers usage check: expected must be an ArrayBuffer");
const labelPart = label === undefined ? "" : `${label}: `; assert_equals(expected.constructor, ArrayBuffer,
assert_equals(actual.constructor, ArrayBuffer, `${labelPart}must be an ArrayBuffer`); `${label}assertEqualArrayBuffers usage check: expected must be an ArrayBuffer`);
assert_array_equals(new Uint8Array(actual), new Uint8Array(expected), `${labelPart}must match`);
assert_equals(actual.constructor, ArrayBuffer, `${label}must be an ArrayBuffer`);
assert_array_equals(new Uint8Array(actual), new Uint8Array(expected), `${label}must match`);
} }
export function assertArrayBufferEqualsABView(actual, expected, label) { export function assertArrayBufferEqualsABView(actual, expected, label) {
label = formatLabel(label);
assert_true(ArrayBuffer.isView(expected), assert_true(ArrayBuffer.isView(expected),
"assertArrayBufferEqualsABView usage check: expected must be an ArrayBuffer view"); `${label}assertArrayBufferEqualsABView usage check: expected must be an ArrayBuffer view`);
assertEqualArrayBuffers(actual, expected.buffer, label); assertEqualArrayBuffers(actual, expected.buffer, label);
} }
export function assertArrayCustomEquals(actual, expected, equalityAsserter, label) { export function assertAsyncIteratorEquals(actual, expected, label) {
assert_true(Array.isArray(expected), return assertAsyncIteratorCustomEquals(actual, expected, Object.is, label);
"assertArrayCustomEquals usage check: expected must be an Array"); }
const labelPart = label === undefined ? "" : `${label}: `; export function assertArrayCustomEquals(actual, expected, equalityAsserter, label) {
assert_true(Array.isArray(actual), `${labelPart}must be an array`); label = formatLabel(label);
assert_equals(actual.length, expected.length, `${labelPart}length must be as expected`);
assert_true(Array.isArray(expected),
`${label} assertArrayCustomEquals usage check: expected must be an Array`);
assert_true(Array.isArray(actual), `${label}must be an array`);
assert_equals(actual.length, expected.length, `${label}length must be as expected`);
for (let i = 0; i < actual.length; ++i) { for (let i = 0; i < actual.length; ++i) {
equalityAsserter(actual[i], expected[i], `${labelPart}index ${i}`); equalityAsserter(actual[i], expected[i], `${label}index ${i}`);
} }
} }
export async function assertAsyncIteratorCustomEquals(actual, expected, equalityAsserter, label) {
label = formatLabel(label);
assert_true(Array.isArray(expected),
`${label} assertAsyncIteratorCustomEquals usage check: expected must be an Array`);
const collected = await collectAsyncIterator(actual);
assert_equals(collected.length, expected.length, `${label}length must be as expected`);
for (let i = 0; i < collected.length; ++i) {
equalityAsserter(collected[i], expected[i], `${label}index ${i}`);
}
}
async function collectAsyncIterator(asyncIterator) {
const array = [];
for await (const entry of asyncIterator) {
array.push(entry);
}
return array;
}
function formatLabel(label) {
return label !== undefined ? `${label} ` : "";
}

View file

@ -1,5 +1,5 @@
import { StorageArea, storage as defaultArea } from "std:kv-storage"; import { StorageArea, storage as defaultArea } from "std:kv-storage";
import { assertArrayCustomEquals } from "./equality-asserters.js"; import { assertAsyncIteratorEquals, assertAsyncIteratorCustomEquals } from "./equality-asserters.js";
export function testWithArea(testFn, description) { export function testWithArea(testFn, description) {
promise_test(t => { promise_test(t => {
@ -36,23 +36,24 @@ function testVariousMethodsInner(key, value, keyEqualityAsserter) {
await assertPromiseEquals(area.get(key), value, "get()", "the set value"); await assertPromiseEquals(area.get(key), value, "get()", "the set value");
const keysPromise = area.keys(); const keysIter = area.keys();
assertIsPromise(keysPromise, "keys()"); await assertAsyncIteratorCustomEquals(keysIter, [key], keyEqualityAsserter, "keys() must have the key");
assertArrayCustomEquals(await keysPromise, [key], keyEqualityAsserter, "keys() must have the key");
const valuesPromise = area.values(); const valuesIter = area.values();
assertIsPromise(valuesPromise); await assertAsyncIteratorEquals(valuesIter, [value], "values() must have the value");
assert_array_equals(await valuesPromise, [value], "values() must have the value");
const entriesPromise = area.entries(); const entriesIter = area.entries();
assertIsPromise(entriesPromise, "entries()");
const entries = await entriesPromise; const entry0 = await entriesIter.next();
assert_true(Array.isArray(entries), "entries() must give an array"); assert_false(entry0.done, "entries() 0th iter-result must not be done");
assert_equals(entries.length, 1, "entries() must have only one value"); assert_true(Array.isArray(entry0.value), "entries() 0th iter-result value must be an array");
assert_true(Array.isArray(entries[0]), "entries() 0th element must be an array"); assert_equals(entry0.value.length, 2, "entries() 0th iter-result value must have 2 elements");
assert_equals(entries[0].length, 2, "entries() 0th element must have 2 elements"); keyEqualityAsserter(entry0.value[0], key, "entries() 0th iter-result value's 0th element must be the key");
keyEqualityAsserter(entries[0][0], key, "entries() 0th element's 0th element must be the key"); assert_equals(entry0.value[1], value, "entries() 0th iter-result value's 1st element must be the value");
assert_equals(entries[0][1], value, "entries() 0th element's 1st element must be the value");
const entry1 = await entriesIter.next();
assert_true(entry1.done, "entries() 1st iter-result must be done");
assert_equals(entry1.value, undefined, "entries() 1st iter-result must have undefined value");
await assertPromiseEquals(area.delete(key), undefined, "delete()", "undefined"); await assertPromiseEquals(area.delete(key), undefined, "delete()", "undefined");

View file

@ -0,0 +1,95 @@
<!DOCTYPE html>
<meta charset="utf-8">
<title>KV Storage: keys()/values()/entries()</title>
<!--
This file contains tests that are easy to generalize over all three methods.
See sibling files for more complicated tests which are not worth generalizing.
-->
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/IndexedDB/support-promises.js"></script>
<script type="module">
import { testWithArea } from "./helpers/kvs-tests.js";
import * as classAssert from "./helpers/class-assert.js";
import { assertAsyncIteratorEquals } from "./helpers/equality-asserters.js";
// Also uses some global functions included via support-promises.js.
const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function*() {}).prototype);
testWithArea(async area => {
const keysProto = Object.getPrototypeOf(area.keys());
const valuesProto = Object.getPrototypeOf(area.values());
const entriesProto = Object.getPrototypeOf(area.entries());
assert_equals(keysProto, valuesProto, "keys() and values() return values' must have the same [[Prototype]]");
assert_equals(valuesProto, entriesProto, "values() and entries () return values' must have the same [[Prototype]]");
}, "keys()/values()/entries() all use the same prototype object");
for (const method of ["keys", "values", "entries"]) {
testWithArea(async area => {
const iter = area[method]();
const proto = Object.getPrototypeOf(iter);
assert_equals(Object.getPrototypeOf(proto), AsyncIteratorPrototype,
"[[Prototype]] must be the async iterator prototype");
classAssert.propertyKeys(proto, ["next"], [], "must only have a next() method");
}, `${method}() return value is an async iterator of the expected shape`);
testWithArea(async area => {
const iter = area[method]();
const promise = iter.next();
await area.set(1, "value 1");
const iterResults = [
await promise,
await iter.next()
];
classAssert.iterResults(iterResults, [
[undefined, true],
[undefined, true]
]);
}, `${method}(): .next() on empty means forever done, even if you set more`);
testWithArea(async area => {
for await (const key of area[method]()) {
assert_unreached("Loop body must not be entered");
}
}, `${method}(): for-await-of loop body never executes`);
testWithArea(async (area, t) => {
await area.set(1, "value 1");
const iter = area[method]();
const { database, store, version } = area.backingStore;
await migrateNamedDatabase(t, database, version + 1, () => {});
const iterResultPromise1 = iter.next();
const iterResultPromise2 = iter.next();
await promise_rejects(t, "VersionError", iterResultPromise1, "first next()");
await promise_rejects(t, "VersionError", iterResultPromise2, "second next()");
const iterResultPromise3 = iter.next();
assert_not_equals(iterResultPromise1, iterResultPromise2,
"Two promises retrieved from synchronous next() calls must be different (1 vs 2)");
assert_not_equals(iterResultPromise1, iterResultPromise3,
"Two promises, one retrieved after waiting for the other, must be different (1 vs 3)");
assert_not_equals(iterResultPromise2, iterResultPromise3,
"Two promises, one retrieved after waiting for the other, must be different (2 vs 3)");
await promise_rejects(t, "VersionError", iterResultPromise2, "third next()");
const reason1 = await iterResultPromise1.catch(r => r);
const reason2 = await iterResultPromise2.catch(r => r);
const reason3 = await iterResultPromise3.catch(r => r);
assert_equals(reason1, reason2, "reasons must be the same (1 vs 2)");
assert_equals(reason2, reason3, "reasons must be the same (2 vs 3)");
}, `${method}(): error path: returns new rejected promises, each with the same reason`);
}
</script>

View file

@ -0,0 +1,236 @@
<!DOCTYPE html>
<meta charset="utf-8">
<title>KV Storage: keys() trickier tests</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script type="module">
import { testWithArea } from "./helpers/kvs-tests.js";
import * as classAssert from "./helpers/class-assert.js";
import {
assertAsyncIteratorEquals,
assertAsyncIteratorCustomEquals,
assertEqualPostKeyRoundtripping
} from "./helpers/equality-asserters.js";
testWithArea(async area => {
await area.set(1, "value 1");
await area.set(2, "value 2");
await area.set(3, "value 3");
await assertAsyncIteratorEquals(area.keys(), [1, 2, 3]);
}, "Using for-await-of to collect the results works");
testWithArea(async area => {
// We're not testing every key type since this isn't a test of IndexedDB.
await area.set(1, "value 1");
await area.set(new Date(500), "value date 500");
await area.set(-1, "value -1");
await area.set(new Date(-20), "value date -20");
await area.set("aaa", "value aaa");
await area.set("a", "value a");
await area.set(-Infinity, "value -Infinity");
await assertAsyncIteratorCustomEquals(
area.keys(),
[
-Infinity,
-1,
1,
new Date(-20),
new Date(500),
"a",
"aaa"
],
assertEqualPostKeyRoundtripping
);
}, "Results are returned in IndexedDB order");
testWithArea(async area => {
await area.set(1, "value 1");
await area.set(2, "value 2");
await area.set(3, "value 3");
const iter = area.keys();
const iterResults = [
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next()
];
classAssert.iterResults(iterResults, [
[1, false],
[2, false],
[3, false],
[undefined, true],
[undefined, true],
[undefined, true]
]);
}, "Manual testing of .next() calls, with awaiting");
testWithArea(async area => {
area.set(1, "value 1");
area.set(2, "value 2");
area.set(3, "value 3");
const iter = area.keys();
const promises = [
iter.next(),
iter.next(),
iter.next(),
iter.next(),
iter.next(),
iter.next()
];
const iterResults = await Promise.all(promises);
classAssert.iterResults(iterResults, [
[1, false],
[2, false],
[3, false],
[undefined, true],
[undefined, true],
[undefined, true]
]);
}, "Manual testing of .next() calls, no awaiting");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.set(15, "value 15");
}
}
assert_array_equals(seen, [10, 20, 30, 40]);
}, "Inserting an entry before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.set(25, "value 25");
}
}
assert_array_equals(seen, [10, 20, 25, 30, 40]);
}, "Inserting an entry after the current entry must show up in iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.delete(10);
}
}
assert_array_equals(seen, [10, 20, 30, 40]);
}, "Deleting an entry before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.delete(20);
}
}
assert_array_equals(seen, [10, 20, 30, 40]);
}, "Deleting the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.delete(30);
}
}
assert_array_equals(seen, [10, 20, 40]);
}, "Deleting an entry after the current entry must show up in iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.set(10, "value 10, but changed!!");
}
}
assert_array_equals(seen, [10, 20, 30, 40]);
}, "Modifying a value before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.set(20, "value 20, but changed!!");
}
}
assert_array_equals(seen, [10, 20, 30, 40]);
}, "Modifying a value at the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const key of area.keys()) {
seen.push(key);
if (key === 20) {
await area.set(30, "value 30, but changed!!");
}
}
assert_array_equals(seen, [10, 20, 30, 40]);
}, "Modifying a value after the current entry must have no effect on iteration (since we're iterating keys)");
</script>

View file

@ -9,6 +9,7 @@
<script type="module"> <script type="module">
import { StorageArea } from "std:kv-storage"; import { StorageArea } from "std:kv-storage";
import { testWithArea } from "./helpers/kvs-tests.js"; import { testWithArea } from "./helpers/kvs-tests.js";
import { assertAsyncIteratorEquals } from "./helpers/equality-asserters.js";
testWithArea(async (area) => { testWithArea(async (area) => {
assert_equals(await area.get("key"), undefined); assert_equals(await area.get("key"), undefined);
@ -18,9 +19,9 @@ testWithArea(async (area) => {
await area.set("key", undefined); await area.set("key", undefined);
assert_equals(await area.get("key"), undefined); assert_equals(await area.get("key"), undefined);
assert_equals((await area.keys()).length, 0, "number of keys"); await assertAsyncIteratorEquals(area.keys(), [], "keys");
assert_equals((await area.values()).length, 0, "number of values"); await assertAsyncIteratorEquals(area.values(), [], "values");
assert_equals((await area.entries()).length, 0, "number of entries"); await assertAsyncIteratorEquals(area.entries(), [], "entries");
}, "Setting undefined as a value when nothing was present is a no-op"); }, "Setting undefined as a value when nothing was present is a no-op");
testWithArea(async (area) => { testWithArea(async (area) => {
@ -29,8 +30,8 @@ testWithArea(async (area) => {
assert_equals(await area.get("key"), undefined); assert_equals(await area.get("key"), undefined);
assert_equals((await area.keys()).length, 0, "number of keys"); await assertAsyncIteratorEquals(area.keys(), [], "keys");
assert_equals((await area.values()).length, 0, "number of values"); await assertAsyncIteratorEquals(area.values(), [], "values");
assert_equals((await area.entries()).length, 0, "number of entries"); await assertAsyncIteratorEquals(area.entries(), [], "entries");
}, "Setting undefined as a value deletes what was previously there"); }, "Setting undefined as a value deletes what was previously there");
</script> </script>

View file

@ -0,0 +1,231 @@
<!DOCTYPE html>
<meta charset="utf-8">
<title>KV Storage: values() trickier tests</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script type="module">
import { testWithArea } from "./helpers/kvs-tests.js";
import * as classAssert from "./helpers/class-assert.js";
import { assertAsyncIteratorEquals } from "./helpers/equality-asserters.js";
testWithArea(async area => {
await area.set(1, "value 1");
await area.set(2, "value 2");
await area.set(3, "value 3");
await assertAsyncIteratorEquals(area.values(), ["value 1", "value 2", "value 3"]);
}, "Using for-await-of to collect the results works");
testWithArea(async area => {
// We're not testing every key type since this isn't a test of IndexedDB.
await area.set(1, "value 1");
await area.set(new Date(500), "value date 500");
await area.set(-1, "value -1");
await area.set(new Date(-20), "value date -20");
await area.set("aaa", "value aaa");
await area.set("a", "value a");
await area.set(-Infinity, "value -Infinity");
await assertAsyncIteratorEquals(
area.values(),
[
"value -Infinity",
"value -1",
"value 1",
"value date -20",
"value date 500",
"value a",
"value aaa"
]
);
}, "Results are returned in IndexedDB key order");
testWithArea(async area => {
await area.set(1, "value 1");
await area.set(2, "value 2");
await area.set(3, "value 3");
const iter = area.values();
const iterResults = [
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next(),
await iter.next()
];
classAssert.iterResults(iterResults, [
["value 1", false],
["value 2", false],
["value 3", false],
[undefined, true],
[undefined, true],
[undefined, true]
]);
}, "Manual testing of .next() calls, with awaiting");
testWithArea(async area => {
area.set(1, "value 1");
area.set(2, "value 2");
area.set(3, "value 3");
const iter = area.values();
const promises = [
iter.next(),
iter.next(),
iter.next(),
iter.next(),
iter.next(),
iter.next()
];
const iterResults = await Promise.all(promises);
classAssert.iterResults(iterResults, [
["value 1", false],
["value 2", false],
["value 3", false],
[undefined, true],
[undefined, true],
[undefined, true]
]);
}, "Manual testing of .next() calls, no awaiting");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.set(15, "value 15");
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 30", "value 40"]);
}, "Inserting an entry before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.set(25, "value 25");
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 25", "value 30", "value 40"]);
}, "Inserting an entry after the current entry must show up in iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.delete(10);
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 30", "value 40"]);
}, "Deleting an entry before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.delete(20);
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 30", "value 40"]);
}, "Deleting the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.delete(30);
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 40"]);
}, "Deleting an entry after the current entry must show up in iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.set(10, "value 10, but changed!!");
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 30", "value 40"]);
}, "Modifying a value before the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.set(20, "value 20, but changed!!");
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 30", "value 40"]);
}, "Modifying a value at the current entry must have no effect on iteration");
testWithArea(async area => {
await area.set(10, "value 10");
await area.set(20, "value 20");
await area.set(30, "value 30");
await area.set(40, "value 40");
let seen = [];
for await (const value of area.values()) {
seen.push(value);
if (value === "value 20") {
await area.set(30, "value 30, but changed!!");
}
}
assert_array_equals(seen, ["value 10", "value 20", "value 30, but changed!!", "value 40"]);
}, "Modifying a value after the current entry must show up in iteration");
</script>

View file

@ -7,56 +7,73 @@
<script> <script>
'use strict'; 'use strict';
// The promise_factory must return a promise that runs the feature and async function gUM({audio, video}) {
// resolves if feature usage is successful, otherwise rejects. Using let stream;
// getUserMedia is successful if at least one mic/camera is returned when try {
// mic/camera has been explicitly allowed by feature policy. stream = await navigator.mediaDevices.getUserMedia({audio, video});
function promise_factory(allowed_features) { // getUserMedia must guarantee the number of tracks requested or fail.
return new Promise((resolve, reject) => { if ((audio && stream.getAudioTracks().length == 0) ||
navigator.mediaDevices.getUserMedia({video: true, audio: true}).then( (video && stream.getVideoTracks().length == 0)) {
function(stream) { throw {name: `All requested devices must be present with ` +
// If microphone is allowed, there should be at least one microphone `audio ${audio} and video ${video}, or fail`};
// in the result. If camera is allowed, there should be at least one }
// camera in the result. } finally {
if ((allowed_features.includes('microphone') && if (stream) {
stream.getAudioTracks().length == 0) || stream.getTracks().forEach(track => track.stop());
(allowed_features.includes('camera') && }
stream.getVideoTracks().length == 0)) { }
reject('Feature policy allowed feature but devices not ' + }
'present.');
} else {
// Otherwise the result is expected.
resolve();
}
},
function(error) { reject(error); });
});
};
var cross_domain = get_host_info().HTTPS_REMOTE_ORIGIN; async function must_disallow_gUM({audio, video}) {
run_all_fp_tests_allow_self( try {
cross_domain, await gUM({audio, video});
'microphone', } catch (e) {
'NotAllowedError', if (e.name == 'NotAllowedError') {
function() { return;
return promise_factory('microphone'); }
}); throw e;
}
throw {name: `audio ${audio} and video ${video} constraints must not be ` +
`allowed.`};
}
const cross_domain = get_host_info().HTTPS_REMOTE_ORIGIN;
run_all_fp_tests_allow_self( run_all_fp_tests_allow_self(
cross_domain, cross_domain,
'camera', 'microphone',
'NotAllowedError', 'NotAllowedError',
function() { async () => {
return promise_factory('camera'); await gUM({audio: true});
}); if (window.location.href.includes(cross_domain)) {
await must_disallow_gUM({video: true});
await must_disallow_gUM({audio: true, video: true});
}
}
);
run_all_fp_tests_allow_self( run_all_fp_tests_allow_self(
cross_domain, cross_domain,
'camera; microphone', 'camera',
'NotAllowedError', 'NotAllowedError',
function() { async () => {
return promise_factory('camera; microphone'); await gUM({video: true});
}); if (window.location.href.includes(cross_domain)) {
await must_disallow_gUM({audio: true});
await must_disallow_gUM({audio: true, video: true});
}
}
);
run_all_fp_tests_allow_self(
cross_domain,
'camera;microphone',
'NotAllowedError',
async () => {
await gUM({audio: true, video: true});
await gUM({audio: true});
await gUM({video: true});
}
);
</script> </script>
</body> </body>

View file

@ -2,6 +2,7 @@
from __future__ import print_function from __future__ import print_function
import copy
import os, sys, json import os, sys, json
from common_paths import * from common_paths import *
import spec_validator import spec_validator
@ -67,10 +68,11 @@ def generate_selection(selection, spec, test_html_template_basename):
test_html_template_basename) test_html_template_basename)
generated_disclaimer = disclaimer_template \ generated_disclaimer = disclaimer_template \
% {'generating_script_filename': os.path.relpath(__file__, % {'generating_script_filename': os.path.relpath(__file__,
test_root_directory), test_root_directory),
'html_template_filename': os.path.relpath(html_template_filename, 'html_template_filename': os.path.relpath(html_template_filename,
test_root_directory)} test_root_directory)}
# Adjust the template for the test invoking JS. Indent it to look nice.
selection['generated_disclaimer'] = generated_disclaimer.rstrip() selection['generated_disclaimer'] = generated_disclaimer.rstrip()
test_description_template = \ test_description_template = \
test_description_template.rstrip().replace("\n", "\n" + " " * 33) test_description_template.rstrip().replace("\n", "\n" + " " * 33)
@ -104,6 +106,7 @@ def generate_selection(selection, spec, test_html_template_basename):
# Write out the generated HTML file. # Write out the generated HTML file.
write_file(test_filename, test_html_template % selection) write_file(test_filename, test_html_template % selection)
def generate_test_source_files(spec_json, target): def generate_test_source_files(spec_json, target):
test_expansion_schema = spec_json['test_expansion_schema'] test_expansion_schema = spec_json['test_expansion_schema']
specification = spec_json['specification'] specification = spec_json['specification']
@ -116,33 +119,45 @@ def generate_test_source_files(spec_json, target):
html_template = "test.%s.html.template" % target html_template = "test.%s.html.template" % target
artifact_order = test_expansion_schema.keys() + ['name'] artifact_order = test_expansion_schema.keys() + ['name']
artifact_order.remove('expansion')
# Create list of excluded tests. # Create list of excluded tests.
exclusion_dict = {} exclusion_dict = {}
for excluded_pattern in spec_json['excluded_tests']: for excluded_pattern in spec_json['excluded_tests']:
excluded_expansion = \ excluded_expansion = \
expand_pattern(excluded_pattern, expand_pattern(excluded_pattern, test_expansion_schema)
test_expansion_schema) for excluded_selection in permute_expansion(excluded_expansion,
for excluded_selection in permute_expansion(excluded_expansion, artifact_order): artifact_order):
excluded_selection_path = selection_pattern % excluded_selection excluded_selection_path = selection_pattern % excluded_selection
exclusion_dict[excluded_selection_path] = True exclusion_dict[excluded_selection_path] = True
for spec in specification: for spec in specification:
# Used to make entries with expansion="override" override preceding
# entries with the same |selection_path|.
output_dict = {}
for expansion_pattern in spec['test_expansion']: for expansion_pattern in spec['test_expansion']:
expansion = expand_pattern(expansion_pattern, expansion = expand_pattern(expansion_pattern, test_expansion_schema)
test_expansion_schema)
for selection in permute_expansion(expansion, artifact_order): for selection in permute_expansion(expansion, artifact_order):
selection_path = selection_pattern % selection selection_path = selection_pattern % selection
if not selection_path in exclusion_dict: if not selection_path in exclusion_dict:
generate_selection(selection, if selection_path in output_dict:
spec, if expansion_pattern['expansion'] != 'override':
html_template) print("Error: %s's expansion is default but overrides %s" % (selection['name'], output_dict[selection_path]['name']))
sys.exit(1)
output_dict[selection_path] = copy.deepcopy(selection)
else: else:
print('Excluding selection:', selection_path) print('Excluding selection:', selection_path)
for selection_path in output_dict:
selection = output_dict[selection_path]
generate_selection(selection,
spec,
html_template)
def main(target, spec_filename): def main(target, spec_filename):
spec_json = load_spec_json(spec_filename); spec_json = load_spec_json(spec_filename)
spec_validator.assert_valid_spec_json(spec_json) spec_validator.assert_valid_spec_json(spec_json)
generate_test_source_files(spec_json, target) generate_test_source_files(spec_json, target)

View file

@ -30,17 +30,17 @@ def assert_contains(obj, field):
assert field in obj, 'Must contain field "%s"' % field assert field in obj, 'Must contain field "%s"' % field
def assert_string_from(obj, field, items): def assert_value_from(obj, field, items):
assert obj[field] in items, \ assert obj[field] in items, \
'Field "%s" must be from: %s' % (field, str(items)) 'Field "%s" must be from: %s' % (field, str(items))
def assert_string_or_list_items_from(obj, field, items): def assert_atom_or_list_items_from(obj, field, items):
if isinstance(obj[field], basestring): if isinstance(obj[field], basestring) or isinstance(obj[field], int):
assert_string_from(obj, field, items) assert_value_from(obj, field, items)
return return
assert isinstance(obj[field], list), "%s must be a list!" % field assert isinstance(obj[field], list), '%s must be a list' % field
for allowed_value in obj[field]: for allowed_value in obj[field]:
assert allowed_value != '*', "Wildcard is not supported for lists!" assert allowed_value != '*', "Wildcard is not supported for lists!"
assert allowed_value in items, \ assert allowed_value in items, \
@ -63,8 +63,8 @@ def assert_value_unique_in(value, used_values):
def assert_valid_artifact(exp_pattern, artifact_key, schema): def assert_valid_artifact(exp_pattern, artifact_key, schema):
if isinstance(schema, list): if isinstance(schema, list):
assert_string_or_list_items_from(exp_pattern, artifact_key, assert_atom_or_list_items_from(exp_pattern, artifact_key,
["*"] + schema) ["*"] + schema)
return return
for sub_artifact_key, sub_schema in schema.iteritems(): for sub_artifact_key, sub_schema in schema.iteritems():
@ -110,7 +110,7 @@ def validate(spec_json, details):
for spec_exp in spec['test_expansion']: for spec_exp in spec['test_expansion']:
details['object'] = spec_exp details['object'] = spec_exp
assert_non_empty_string(spec_exp, 'name') assert_non_empty_string(spec_exp, 'name')
# The name is unique in same expansion group. # The name is unique in same expansion group.
assert_value_unique_in((spec_exp['expansion'], spec_exp['name']), assert_value_unique_in((spec_exp['expansion'], spec_exp['name']),
used_spec_names) used_spec_names)
assert_contains_only_fields(spec_exp, valid_test_expansion_fields) assert_contains_only_fields(spec_exp, valid_test_expansion_fields)
@ -136,7 +136,14 @@ def validate(spec_json, details):
for excluded_test_expansion in excluded_tests: for excluded_test_expansion in excluded_tests:
assert_contains_only_fields(excluded_test_expansion, assert_contains_only_fields(excluded_test_expansion,
valid_test_expansion_fields) valid_test_expansion_fields)
details['object'] = excluded_test_expansion
for artifact in test_expansion_schema:
details['test_expansion_field'] = artifact
assert_valid_artifact(
excluded_test_expansion,
artifact,
test_expansion_schema[artifact])
del details['test_expansion_field']
del details['object'] del details['object']

View file

@ -1,44 +0,0 @@
<!DOCTYPE html>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<body>
<script>
let channelIndex = 0;
async function openPortalAndReceiveMessage(portalSrc) {
let channelName = `portals-host-exposure-${channelIndex++}`
let broadcastChannel = new BroadcastChannel(channelName);
try {
let received = new Promise((resolve, reject) => {
broadcastChannel.addEventListener('message', e => {
resolve(e.data);
}, {once: true})
});
let portal = document.createElement('portal');
portal.src = `${portalSrc}?broadcastchannel=${channelName}`;
document.body.appendChild(portal);
return await received;
} finally {
broadcastChannel.close();
}
}
promise_test(async t => {
let {hasHost} = await openPortalAndReceiveMessage(
'resources/portal-host.html');
assert_true(hasHost, "window.portalHost should be defined");
}, "window.portalHost should be exposed in same-origin portal");
promise_test(async t => {
let {hasHost} = await openPortalAndReceiveMessage(
'http://{{hosts[alt][www]}}:{{ports[http][0]}}/portals/resources/portal-host-cross-origin.sub.html');
assert_true(hasHost, "window.portalHost should be defined");
}, "window.portalHost should be exposed in cross-origin portal");
promise_test(async t => {
let {hasHost} = await openPortalAndReceiveMessage(
'resources/portal-host-cross-origin-navigate.sub.html');
assert_true(hasHost, "window.portalHost should be defined");
}, "window.portalHost should be exposed in portal after cross-origin navigation");
</script>
</body>

View file

@ -1,14 +0,0 @@
<!DOCTYPE html>
<body>
<script>
function forwardMessage(e) {
let broadcastChannel = new BroadcastChannel(new URL(location).searchParams.get('broadcastchannel'));
try {
broadcastChannel.postMessage(e.data);
} finally {
broadcastChannel.close();
}
}
window.addEventListener("message", forwardMessage);
</script>
</body>

View file

@ -1,7 +0,0 @@
<!DOCTYPE html>
<body>
<script>
let channelName = new URL(location).searchParams.get('broadcastchannel');
window.location.href = `http://{{hosts[alt][www]}}:{{ports[http][0]}}/portals/resources/portal-host-cross-origin.sub.html?broadcastchannel=${channelName}`;
</script>
</body>

View file

@ -1,15 +0,0 @@
<!DOCTYPE html>
<body>
<script>
let message = {
hasHost: !!window.portalHost
};
let forwardingIframe = document.createElement('iframe');
let channelName = new URL(location).searchParams.get('broadcastchannel');
forwardingIframe.src = `http://{{host}}:{{ports[http][0]}}/portals/resources/portal-forward-with-broadcast.sub.html?broadcastchannel=${channelName}`;
forwardingIframe.onload = () => {
forwardingIframe.contentWindow.postMessage(message, '*');
}
document.body.appendChild(forwardingIframe);
</script>
</body>

View file

@ -1,14 +0,0 @@
<!DOCTYPE html>
<body>
<script>
let message = {
hasHost: !!window.portalHost
};
let broadcastChannel = new BroadcastChannel(new URL(location).searchParams.get('broadcastchannel'));
try {
broadcastChannel.postMessage(message);
} finally {
broadcastChannel.close();
}
</script>
</body>

View file

@ -30,6 +30,11 @@ def get_template(basename):
return f.read() return f.read()
def write_file(filename, contents):
with open(filename, "w") as f:
f.write(contents)
def read_nth_line(fp, line_number): def read_nth_line(fp, line_number):
fp.seek(0) fp.seek(0)
for i, line in enumerate(fp): for i, line in enumerate(fp):
@ -37,11 +42,14 @@ def read_nth_line(fp, line_number):
return line return line
def load_spec_json(): def load_spec_json(path_to_spec = None):
if path_to_spec is None:
path_to_spec = spec_filename
re_error_location = re.compile('line ([0-9]+) column ([0-9]+)') re_error_location = re.compile('line ([0-9]+) column ([0-9]+)')
with open(spec_filename, "r") as f: with open(path_to_spec, "r") as f:
try: try:
spec_json = json.load(f) return json.load(f)
except ValueError as ex: except ValueError as ex:
print(ex.message) print(ex.message)
match = re_error_location.search(ex.message) match = re_error_location.search(ex.message)
@ -49,7 +57,4 @@ def load_spec_json():
line_number, column = int(match.group(1)), int(match.group(2)) line_number, column = int(match.group(1)), int(match.group(2))
print(read_nth_line(f, line_number).rstrip()) print(read_nth_line(f, line_number).rstrip())
print(" " * (column - 1) + "^") print(" " * (column - 1) + "^")
sys.exit(1) sys.exit(1)
return spec_json

View file

@ -9,24 +9,29 @@ import spec_validator
import argparse import argparse
def expand_test_expansion_pattern(spec_test_expansion, test_expansion_schema): def expand_pattern(expansion_pattern, test_expansion_schema):
expansion = {} expansion = {}
for artifact in spec_test_expansion: for artifact_key in expansion_pattern:
artifact_value = spec_test_expansion[artifact] artifact_value = expansion_pattern[artifact_key]
if artifact_value == '*': if artifact_value == '*':
expansion[artifact] = test_expansion_schema[artifact] expansion[artifact_key] = test_expansion_schema[artifact_key]
elif isinstance(artifact_value, list): elif isinstance(artifact_value, list):
expansion[artifact] = artifact_value expansion[artifact_key] = artifact_value
elif isinstance(artifact_value, dict):
# Flattened expansion.
expansion[artifact_key] = []
values_dict = expand_pattern(artifact_value,
test_expansion_schema[artifact_key])
for sub_key in values_dict.keys():
expansion[artifact_key] += values_dict[sub_key]
else: else:
expansion[artifact] = [artifact_value] expansion[artifact_key] = [artifact_value]
return expansion return expansion
def permute_expansion(expansion, selection = {}, artifact_index = 0): def permute_expansion(expansion, artifact_order, selection = {}, artifact_index = 0):
artifact_order = ['delivery_method', 'redirection', 'origin', assert isinstance(artifact_order, list), "artifact_order should be a list"
'source_protocol', 'target_protocol', 'subresource',
'referrer_url', 'name']
if artifact_index >= len(artifact_order): if artifact_index >= len(artifact_order):
yield selection yield selection
@ -37,6 +42,7 @@ def permute_expansion(expansion, selection = {}, artifact_index = 0):
for artifact_value in expansion[artifact_key]: for artifact_value in expansion[artifact_key]:
selection[artifact_key] = artifact_value selection[artifact_key] = artifact_value
for next_selection in permute_expansion(expansion, for next_selection in permute_expansion(expansion,
artifact_order,
selection, selection,
artifact_index + 1): artifact_index + 1):
yield next_selection yield next_selection
@ -116,8 +122,8 @@ def generate_selection(selection, spec, subresource_path,
selection['meta_delivery_method'] = "\n " + \ selection['meta_delivery_method'] = "\n " + \
selection['meta_delivery_method'] selection['meta_delivery_method']
with open(test_filename, 'w') as f: # Write out the generated HTML file.
f.write(test_html_template % selection) write_file(test_filename, test_html_template % selection)
def generate_test_source_files(spec_json, target): def generate_test_source_files(spec_json, target):
@ -125,20 +131,22 @@ def generate_test_source_files(spec_json, target):
specification = spec_json['specification'] specification = spec_json['specification']
spec_json_js_template = get_template('spec_json.js.template') spec_json_js_template = get_template('spec_json.js.template')
with open(generated_spec_json_filename, 'w') as f: write_file(generated_spec_json_filename,
f.write(spec_json_js_template spec_json_js_template % {'spec_json': json.dumps(spec_json)})
% {'spec_json': json.dumps(spec_json)})
# Choose a debug/release template depending on the target. # Choose a debug/release template depending on the target.
html_template = "test.%s.html.template" % target html_template = "test.%s.html.template" % target
artifact_order = test_expansion_schema.keys() + ['name']
artifact_order.remove('expansion')
# Create list of excluded tests. # Create list of excluded tests.
exclusion_dict = {} exclusion_dict = {}
for excluded_pattern in spec_json['excluded_tests']: for excluded_pattern in spec_json['excluded_tests']:
excluded_expansion = \ excluded_expansion = \
expand_test_expansion_pattern(excluded_pattern, expand_pattern(excluded_pattern, test_expansion_schema)
test_expansion_schema) for excluded_selection in permute_expansion(excluded_expansion,
for excluded_selection in permute_expansion(excluded_expansion): artifact_order):
excluded_selection_path = selection_pattern % excluded_selection excluded_selection_path = selection_pattern % excluded_selection
exclusion_dict[excluded_selection_path] = True exclusion_dict[excluded_selection_path] = True
@ -147,14 +155,13 @@ def generate_test_source_files(spec_json, target):
# entries with the same |selection_path|. # entries with the same |selection_path|.
output_dict = {} output_dict = {}
for spec_test_expansion in spec['test_expansion']: for expansion_pattern in spec['test_expansion']:
expansion = expand_test_expansion_pattern(spec_test_expansion, expansion = expand_pattern(expansion_pattern, test_expansion_schema)
test_expansion_schema) for selection in permute_expansion(expansion, artifact_order):
for selection in permute_expansion(expansion):
selection_path = selection_pattern % selection selection_path = selection_pattern % selection
if not selection_path in exclusion_dict: if not selection_path in exclusion_dict:
if selection_path in output_dict: if selection_path in output_dict:
if spec_test_expansion['expansion'] != 'override': if expansion_pattern['expansion'] != 'override':
print("Error: %s's expansion is default but overrides %s" % (selection['name'], output_dict[selection_path]['name'])) print("Error: %s's expansion is default but overrides %s" % (selection['name'], output_dict[selection_path]['name']))
sys.exit(1) sys.exit(1)
output_dict[selection_path] = copy.deepcopy(selection) output_dict[selection_path] = copy.deepcopy(selection)
@ -166,13 +173,13 @@ def generate_test_source_files(spec_json, target):
subresource_path = \ subresource_path = \
spec_json["subresource_path"][selection["subresource"]] spec_json["subresource_path"][selection["subresource"]]
generate_selection(selection, generate_selection(selection,
spec, spec,
subresource_path, subresource_path,
html_template) html_template)
def main(target): def main(target, spec_filename):
spec_json = load_spec_json(); spec_json = load_spec_json(spec_filename)
spec_validator.assert_valid_spec_json(spec_json) spec_validator.assert_valid_spec_json(spec_json)
generate_test_source_files(spec_json, target) generate_test_source_files(spec_json, target)
@ -182,6 +189,8 @@ if __name__ == '__main__':
parser.add_argument('-t', '--target', type = str, parser.add_argument('-t', '--target', type = str,
choices = ("release", "debug"), default = "release", choices = ("release", "debug"), default = "release",
help = 'Sets the appropriate template for generating tests') help = 'Sets the appropriate template for generating tests')
parser.add_argument('-s', '--spec', type = str, default = None,
help = 'Specify a file used for describing and generating the tests')
# TODO(kristijanburnik): Add option for the spec_json file. # TODO(kristijanburnik): Add option for the spec_json file.
args = parser.parse_args() args = parser.parse_args()
main(args.target) main(args.target, args.spec)

View file

@ -11,36 +11,42 @@ def assert_non_empty_string(obj, field):
'Field "%s" must be a string' % field 'Field "%s" must be a string' % field
assert len(obj[field]) > 0, 'Field "%s" must not be empty' % field assert len(obj[field]) > 0, 'Field "%s" must not be empty' % field
def assert_non_empty_list(obj, field): def assert_non_empty_list(obj, field):
assert isinstance(obj[field], list), \ assert isinstance(obj[field], list), \
'%s must be a list' % field '%s must be a list' % field
assert len(obj[field]) > 0, \ assert len(obj[field]) > 0, \
'%s list must not be empty' % field '%s list must not be empty' % field
def assert_non_empty_dict(obj, field): def assert_non_empty_dict(obj, field):
assert isinstance(obj[field], dict), \ assert isinstance(obj[field], dict), \
'%s must be a dict' % field '%s must be a dict' % field
assert len(obj[field]) > 0, \ assert len(obj[field]) > 0, \
'%s dict must not be empty' % field '%s dict must not be empty' % field
def assert_contains(obj, field): def assert_contains(obj, field):
assert field in obj, 'Must contain field "%s"' % field assert field in obj, 'Must contain field "%s"' % field
def assert_value_from(obj, field, items): def assert_value_from(obj, field, items):
assert obj[field] in items, \ assert obj[field] in items, \
'Field "%s" must be from: %s' % (field, str(items)) 'Field "%s" must be from: %s' % (field, str(items))
def assert_atom_or_list_items_from(obj, field, items): def assert_atom_or_list_items_from(obj, field, items):
if isinstance(obj[field], basestring) or isinstance(obj[field], int): if isinstance(obj[field], basestring) or isinstance(obj[field], int):
assert_value_from(obj, field, items) assert_value_from(obj, field, items)
return return
assert_non_empty_list(obj, field) assert isinstance(obj[field], list), '%s must be a list' % field
for allowed_value in obj[field]: for allowed_value in obj[field]:
assert allowed_value != '*', "Wildcard is not supported for lists!" assert allowed_value != '*', "Wildcard is not supported for lists!"
assert allowed_value in items, \ assert allowed_value in items, \
'Field "%s" must be from: %s' % (field, str(items)) 'Field "%s" must be from: %s' % (field, str(items))
def assert_contains_only_fields(obj, expected_fields): def assert_contains_only_fields(obj, expected_fields):
for expected_field in expected_fields: for expected_field in expected_fields:
assert_contains(obj, expected_field) assert_contains(obj, expected_field)
@ -49,11 +55,22 @@ def assert_contains_only_fields(obj, expected_fields):
assert actual_field in expected_fields, \ assert actual_field in expected_fields, \
'Unexpected field "%s".' % actual_field 'Unexpected field "%s".' % actual_field
def assert_value_unique_in(value, used_values): def assert_value_unique_in(value, used_values):
assert value not in used_values, 'Duplicate value "%s"!' % str(value) assert value not in used_values, 'Duplicate value "%s"!' % str(value)
used_values[value] = True used_values[value] = True
def assert_valid_artifact(exp_pattern, artifact_key, schema):
if isinstance(schema, list):
assert_atom_or_list_items_from(exp_pattern, artifact_key,
["*"] + schema)
return
for sub_artifact_key, sub_schema in schema.iteritems():
assert_valid_artifact(exp_pattern[artifact_key], sub_artifact_key,
sub_schema)
def validate(spec_json, details): def validate(spec_json, details):
""" Validates the json specification for generating tests. """ """ Validates the json specification for generating tests. """
@ -102,13 +119,13 @@ def validate(spec_json, details):
assert_non_empty_string(spec_exp, 'name') assert_non_empty_string(spec_exp, 'name')
# The name is unique in same expansion group. # The name is unique in same expansion group.
assert_value_unique_in((spec_exp['expansion'], spec_exp['name']), assert_value_unique_in((spec_exp['expansion'], spec_exp['name']),
used_spec_names) used_spec_names)
assert_contains_only_fields(spec_exp, valid_test_expansion_fields) assert_contains_only_fields(spec_exp, valid_test_expansion_fields)
for artifact in test_expansion_schema: for artifact in test_expansion_schema:
details['test_expansion_field'] = artifact details['test_expansion_field'] = artifact
assert_atom_or_list_items_from( assert_valid_artifact(spec_exp, artifact,
spec_exp, artifact, ['*'] + test_expansion_schema[artifact]) test_expansion_schema[artifact])
del details['test_expansion_field'] del details['test_expansion_field']
# Validate the test_expansion schema members. # Validate the test_expansion schema members.
@ -129,10 +146,10 @@ def validate(spec_json, details):
details['object'] = excluded_test_expansion details['object'] = excluded_test_expansion
for artifact in test_expansion_schema: for artifact in test_expansion_schema:
details['test_expansion_field'] = artifact details['test_expansion_field'] = artifact
assert_atom_or_list_items_from( assert_valid_artifact(
excluded_test_expansion, excluded_test_expansion,
artifact, artifact,
['*'] + test_expansion_schema[artifact]) test_expansion_schema[artifact])
del details['test_expansion_field'] del details['test_expansion_field']
# Validate subresource paths. # Validate subresource paths.

View file

@ -7,21 +7,11 @@
// makeIceTransport // makeIceTransport
// makeGatherAndStartTwoIceTransports // makeGatherAndStartTwoIceTransports
// Return a promise to generate an RTCCertificate with the given keygen
// algorithm or a default one if none provided.
function generateCertificate(keygenAlgorithm) {
return RTCPeerConnection.generateCertificate({
name: 'ECDSA',
namedCurve: 'P-256',
...keygenAlgorithm,
});
}
// Construct an RTCQuicTransport instance with the given RTCIceTransport // Construct an RTCQuicTransport instance with the given RTCIceTransport
// instance and the given certificates. The RTCQuicTransport instance will be // instance and the given certificates. The RTCQuicTransport instance will be
// automatically cleaned up when the test finishes. // automatically cleaned up when the test finishes.
function makeQuicTransport(t, iceTransport, certificates) { function makeQuicTransport(t, iceTransport) {
const quicTransport = new RTCQuicTransport(iceTransport, certificates); const quicTransport = new RTCQuicTransport(iceTransport);
t.add_cleanup(() => quicTransport.stop()); t.add_cleanup(() => quicTransport.stop());
return quicTransport; return quicTransport;
} }
@ -30,9 +20,8 @@ function makeQuicTransport(t, iceTransport, certificates) {
// and a single, newly-generated certificate. The RTCQuicTransport and // and a single, newly-generated certificate. The RTCQuicTransport and
// RTCIceTransport instances will be automatically cleaned up when the test // RTCIceTransport instances will be automatically cleaned up when the test
// finishes. // finishes.
async function makeStandaloneQuicTransport(t) { function makeStandaloneQuicTransport(t) {
const certificate = await generateCertificate(); return makeQuicTransport(t, makeIceTransport(t));
return makeQuicTransport(t, makeIceTransport(t), [ certificate ]);
} }
// Construct two RTCQuicTransport instances and each call start() with the other // Construct two RTCQuicTransport instances and each call start() with the other
@ -40,17 +29,16 @@ async function makeStandaloneQuicTransport(t) {
// Returns a 2-list: // Returns a 2-list:
// [ server RTCQuicTransport, // [ server RTCQuicTransport,
// client RTCQuicTransport ] // client RTCQuicTransport ]
async function makeAndStartTwoQuicTransports(t) { function makeAndStartTwoQuicTransports(t) {
const [ localCertificate, remoteCertificate ] =
await Promise.all([ generateCertificate(), generateCertificate() ]);
const [ localIceTransport, remoteIceTransport ] = const [ localIceTransport, remoteIceTransport ] =
makeGatherAndStartTwoIceTransports(t); makeGatherAndStartTwoIceTransports(t);
const localQuicTransport = const localQuicTransport =
makeQuicTransport(t, localIceTransport, [ localCertificate ]); makeQuicTransport(t, localIceTransport);
const remoteQuicTransport = const remoteQuicTransport =
makeQuicTransport(t, remoteIceTransport, [ remoteCertificate ]); makeQuicTransport(t, remoteIceTransport);
localQuicTransport.start(remoteQuicTransport.getLocalParameters()); const remote_key = remoteQuicTransport.getKey();
remoteQuicTransport.start(localQuicTransport.getLocalParameters()); localQuicTransport.listen(remote_key);
remoteQuicTransport.connect();
return [ localQuicTransport, remoteQuicTransport ]; return [ localQuicTransport, remoteQuicTransport ];
} }

View file

@ -17,153 +17,50 @@
// makeAndGatherTwoIceTransports // makeAndGatherTwoIceTransports
// The following helper functions are called from RTCQuicTransport-helper.js: // The following helper functions are called from RTCQuicTransport-helper.js:
// generateCertificate
// makeQuicTransport // makeQuicTransport
// makeStandaloneQuicTransport // makeStandaloneQuicTransport
// makeAndStartTwoQuicTransports // makeAndStartTwoQuicTransports
// makeTwoConnectedQuicTransports // makeTwoConnectedQuicTransports
promise_test(async t => { test(t => {
const certificate = await generateCertificate();
const iceTransport = makeIceTransport(t); const iceTransport = makeIceTransport(t);
const quicTransport = makeQuicTransport(t, iceTransport, [ certificate ]); const quicTransport = makeQuicTransport(t, iceTransport);
assert_equals(quicTransport.transport, iceTransport, assert_equals(quicTransport.transport, iceTransport,
'Expect transport to be the same as the one passed in the constructor.'); 'Expect transport to be the same as the one passed in the constructor.');
assert_equals(quicTransport.state, 'new', `Expect state to be 'new'.`); assert_equals(quicTransport.state, 'new', `Expect state to be 'new'.`);
assert_object_equals(quicTransport.getLocalParameters(),
{ role: 'auto', fingerprints: certificate.getFingerprints() },
'Expect local parameters to be initialized.');
assert_equals(quicTransport.getRemoteParameters(), null,
'Expect no remote parameters.');
assert_array_equals(quicTransport.getCertificates(), [ certificate ],
'Expect one certificate.');
assert_array_equals(quicTransport.getRemoteCertificates(), [],
'Expect no remote certificates.');
}, 'RTCQuicTransport initial properties are set.'); }, 'RTCQuicTransport initial properties are set.');
promise_test(async t => { test(t => {
const [ firstCertificate, secondCertificate ] =
await Promise.all([ generateCertificate(), generateCertificate() ]);
const quicTransport =
makeQuicTransport(t, makeIceTransport(t),
[ firstCertificate, secondCertificate ]);
assert_array_equals(quicTransport.getCertificates(),
[ firstCertificate, secondCertificate ]);
}, 'getCertificates() returns the certificates passed in the constructor.');
promise_test(async t => {
const [ firstCertificate, secondCertificate ] =
await Promise.all([ generateCertificate(), generateCertificate() ]);
const quicTransport =
makeQuicTransport(t, makeIceTransport(t),
[ firstCertificate, secondCertificate ]);
assert_object_equals(quicTransport.getLocalParameters(), {
role: 'auto',
fingerprints:
[ firstCertificate.getFingerprints()[0],
secondCertificate.getFingerprints()[0] ],
});
assert_array_equals(quicTransport.getCertificates(),
[ firstCertificate, secondCertificate ]);
}, 'getLocalParameters() has fingerprints for all certificates passed in the ' +
'constructor.');
promise_test(async t => {
const expiredCertificate = await generateCertificate({ expires: 0 });
assert_throws(new TypeError(),
() => makeQuicTransport(t, makeIceTransport(t), [ expiredCertificate ]));
}, 'RTCQuicTransport constructor throws if passed an expired certificate.');
promise_test(async t => {
const certificate = await generateCertificate();
const iceTransport = makeIceTransport(t); const iceTransport = makeIceTransport(t);
iceTransport.stop(); iceTransport.stop();
assert_throws('InvalidStateError', assert_throws('InvalidStateError',
() => makeQuicTransport(t, iceTransport, [ certificate ])); () => makeQuicTransport(t, iceTransport));
}, 'RTCQuicTransport constructor throws if passed a closed RTCIceTransport.'); }, 'RTCQuicTransport constructor throws if passed a closed RTCIceTransport.');
promise_test(async t => { test(t => {
const certificate = await generateCertificate();
const iceTransport = makeIceTransport(t); const iceTransport = makeIceTransport(t);
const firstQuicTransport = const firstQuicTransport =
makeQuicTransport(t, iceTransport, [ certificate ]); makeQuicTransport(t, iceTransport);
assert_throws('InvalidStateError', assert_throws('InvalidStateError',
() => makeQuicTransport(t, iceTransport, [ certificate ])); () => makeQuicTransport(t, iceTransport));
}, 'RTCQuicTransport constructor throws if passed an RTCIceTransport that ' + }, 'RTCQuicTransport constructor throws if passed an RTCIceTransport that ' +
'already has an active RTCQuicTransport.'); 'already has an active RTCQuicTransport.');
promise_test(async t => { test(t => {
const quicTransport = await makeStandaloneQuicTransport(t); const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.stop(); quicTransport.stop();
assert_equals(quicTransport.state, 'closed'); assert_equals(quicTransport.state, 'closed');
}, `stop() changes state to 'closed'.`); }, `stop() changes state to 'closed'.`);
promise_test(async t => { test(t => {
const quicTransport = await makeStandaloneQuicTransport(t); const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.transport.stop(); quicTransport.transport.stop();
assert_equals(quicTransport.state, 'closed'); assert_equals(quicTransport.state, 'closed');
}, `RTCIceTransport.stop() changes RTCQuicTransport.state to 'closed'.`); }, `RTCIceTransport.stop() changes RTCQuicTransport.state to 'closed'.`);
promise_test(async t => {
const quicTransport = await makeStandaloneQuicTransport(t);
quicTransport.start(quicTransport.getLocalParameters());
assert_equals(quicTransport.state, 'new');
}, 'start() with a non-started RTCIceTransport does not change state.');
promise_test(async t => {
const certificate = await generateCertificate();
const [ localIceTransport, remoteIceTransport ] =
makeAndGatherTwoIceTransports(t);
const quicTransport =
makeQuicTransport(t, localIceTransport, [ certificate ]);
quicTransport.start(quicTransport.getLocalParameters());
const iceTransportWatcher =
new EventWatcher(t, remoteIceTransport, 'icecandidate');
await iceTransportWatcher.wait_for('icecandidate');
localIceTransport.start(remoteIceTransport.getLocalParameters(),
'controlling');
assert_equals(quicTransport.state, 'connecting');
}, 'start() with a non-started RTCIceTransport later changes state to ' +
`'connecting' once the RTCIceTransport.start() is called.`);
promise_test(async t => {
const certificate = await generateCertificate();
const [ localIceTransport, remoteIceTransport ] =
makeAndGatherTwoIceTransports(t);
const quicTransport =
makeQuicTransport(t, localIceTransport, [ certificate ]);
const iceTransportWatcher =
new EventWatcher(t, remoteIceTransport, 'icecandidate');
await iceTransportWatcher.wait_for('icecandidate');
localIceTransport.start(remoteIceTransport.getLocalParameters());
quicTransport.start(quicTransport.getLocalParameters());
assert_equals(quicTransport.state, 'connecting');
}, `start() with a started RTCIceTransport changes state to 'connecting'.`);
promise_test(async t => {
const quicTransport = await makeStandaloneQuicTransport(t);
quicTransport.stop();
assert_throws('InvalidStateError',
() => quicTransport.start(quicTransport.getLocalParameters()));
}, 'start() throws if called after stop().');
promise_test(async t => {
const quicTransport = await makeStandaloneQuicTransport(t);
quicTransport.transport.stop();
assert_throws('InvalidStateError',
() => quicTransport.start(quicTransport.getLocalParameters()));
}, 'start() throws if called after the RTCIceTransport has stopped.');
promise_test(async t => {
const quicTransport = await makeStandaloneQuicTransport(t);
quicTransport.start(quicTransport.getLocalParameters());
assert_throws('InvalidStateError',
() => quicTransport.start(quicTransport.getLocalParameters()));
}, 'start() throws if called twice.');
promise_test(async t => { promise_test(async t => {
const [ localQuicTransport, remoteQuicTransport ] = const [ localQuicTransport, remoteQuicTransport ] =
await makeAndStartTwoQuicTransports(t); makeAndStartTwoQuicTransports(t);
const localWatcher = new EventWatcher(t, localQuicTransport, 'statechange'); const localWatcher = new EventWatcher(t, localQuicTransport, 'statechange');
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'statechange'); const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'statechange');
await Promise.all([ await Promise.all([
@ -185,5 +82,97 @@ promise_test(async t => {
assert_equals(remoteQuicTransport.state, 'closed'); assert_equals(remoteQuicTransport.state, 'closed');
}, `stop() fires a statechange event to 'closed' on the remote transport`); }, `stop() fires a statechange event to 'closed' on the remote transport`);
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.connect();
assert_equals(quicTransport.state, 'connecting');
}, `connect() changes state to 'connecting'.`);
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.connect();
assert_throws('InvalidStateError',
() => quicTransport.connect());
}, 'connect() throws if already called connect().');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.listen(new Uint8Array([12345]));
assert_throws('InvalidStateError',
() => quicTransport.connect());
}, 'connect() throws if already called listen().');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.stop();
assert_throws('InvalidStateError',
() => quicTransport.connect());
}, 'connect() throws after stop().');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.transport.stop();
assert_throws('InvalidStateError',
() => quicTransport.connect());
}, 'connect() throws if called after RTCIceTransport has stopped.');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.listen(new Uint8Array([12345]));
assert_equals(quicTransport.state, 'connecting');
}, `listen() changes state to 'connecting'.`);
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.connect();
assert_throws('InvalidStateError',
() => quicTransport.listen(new Uint8Array([12345])));
}, 'listen() throws if already called connect().');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.listen(new Uint8Array([12345]));
assert_throws('InvalidStateError',
() => quicTransport.listen(new Uint8Array([12345])));
}, 'listen() throws if already called listen().');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.stop();
assert_throws('InvalidStateError',
() => quicTransport.listen(new Uint8Array([12345])));
}, 'listen() throws after stop().');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
quicTransport.transport.stop();
assert_throws('InvalidStateError',
() => quicTransport.listen(new Uint8Array([12345])));
}, 'listen() throws if called after RTCIceTransport has stopped.');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
const key = quicTransport.getKey();
assert_equals(key.byteLength, 16);
}, 'RTCQuicTransport.getKey() attribute is 16 bytes.');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
const key = new Uint8Array();
assert_throws('NotSupportedError',
() => quicTransport.listen(key));
}, 'listen() throws if given an empty key.');
test(t => {
const quicTransport = makeStandaloneQuicTransport(t);
const key = quicTransport.getKey();
let update_key = new Uint8Array(key);
for (let i = 0; i < update_key.length; i++) {
update_key[i] = 0;
}
const new_key = quicTransport.getKey();
assert_not_equals(update_key, new Uint8Array(new_key));
}, 'Cannot mutate key retrieved from getKey().');
</script> </script>