mirror of
https://github.com/servo/servo.git
synced 2025-08-06 06:00:15 +01:00
Auto merge of #27690 - servo-wpt-sync:wpt_update_08-10-2020, r=servo-wpt-sync
Sync WPT with upstream (08-10-2020) Automated downstream sync of changes from upstream as of 08-10-2020. [no-wpt-sync] r? @servo-wpt-sync
This commit is contained in:
commit
5fcc10a263
90 changed files with 1701 additions and 1510 deletions
|
@ -1,4 +0,0 @@
|
||||||
[hit-test-floats-002.html]
|
|
||||||
[Hit test float]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
[hit-test-floats-005.html]
|
||||||
|
[Miss clipped float]
|
||||||
|
expected: FAIL
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
[elementFromPoint-001.html]
|
|
||||||
[CSSOM View - 5 - extensions to the Document interface]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
|
@ -17,6 +17,3 @@
|
||||||
[test the top of layer]
|
[test the top of layer]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[test some point of the element: top left corner]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
[matchMedia-display-none-iframe.html]
|
||||||
|
expected: ERROR
|
|
@ -312,12 +312,6 @@
|
||||||
[Response: combined response Content-Type: text/html;" \\" text/plain ";charset=GBK]
|
[Response: combined response Content-Type: text/html;" \\" text/plain ";charset=GBK]
|
||||||
expected: NOTRUN
|
expected: NOTRUN
|
||||||
|
|
||||||
[<iframe>: separate response Content-Type: text/html;" text/plain]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[<iframe>: separate response Content-Type: text/html */*;charset=gbk]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[<iframe>: combined response Content-Type: text/html;" text/plain]
|
[<iframe>: combined response Content-Type: text/html;" text/plain]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
@ -327,3 +321,15 @@
|
||||||
[<iframe>: separate response Content-Type: text/plain */*;charset=gbk]
|
[<iframe>: separate response Content-Type: text/plain */*;charset=gbk]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: combined response Content-Type: text/html;charset=gbk text/plain text/html]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: combined response Content-Type: text/html */*]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: combined response Content-Type: text/html */*;charset=gbk]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: separate response Content-Type: text/html;" \\" text/plain]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -11,9 +11,6 @@
|
||||||
[X-Content-Type-Options%3A%20nosniff%2C%2C%40%23%24%23%25%25%26%5E%26%5E*()()11!]
|
[X-Content-Type-Options%3A%20nosniff%2C%2C%40%23%24%23%25%25%26%5E%26%5E*()()11!]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[X-Content-Type-Options%3A%20%22nosniFF%22]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[X-Content-Type-Options%3A%20no%0D%0AX-Content-Type-Options%3A%20nosniff]
|
[X-Content-Type-Options%3A%20no%0D%0AX-Content-Type-Options%3A%20nosniff]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[traverse_the_history_5.html]
|
[traverse_the_history_4.html]
|
||||||
[Multiple history traversals, last would be aborted]
|
[Multiple history traversals, last would be aborted]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[supported-elements.html]
|
[supported-elements.html]
|
||||||
|
expected: TIMEOUT
|
||||||
[Contenteditable element should support autofocus]
|
[Contenteditable element should support autofocus]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
@ -9,7 +10,7 @@
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Area element should support autofocus]
|
[Area element should support autofocus]
|
||||||
expected: FAIL
|
expected: TIMEOUT
|
||||||
|
|
||||||
[Host element with delegatesFocus should support autofocus]
|
[Host element with delegatesFocus should support autofocus]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[iframe_sandbox_popups_escaping-1.html]
|
[iframe_sandbox_popups_escaping-1.html]
|
||||||
expected: CRASH
|
expected: TIMEOUT
|
||||||
[Check that popups from a sandboxed iframe escape the sandbox if\n allow-popups-to-escape-sandbox is used]
|
[Check that popups from a sandboxed iframe escape the sandbox if\n allow-popups-to-escape-sandbox is used]
|
||||||
expected: TIMEOUT
|
expected: TIMEOUT
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[iframe_sandbox_popups_nonescaping-2.html]
|
[iframe_sandbox_popups_nonescaping-2.html]
|
||||||
expected: TIMEOUT
|
expected: CRASH
|
||||||
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
||||||
expected: NOTRUN
|
expected: NOTRUN
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[iframe_sandbox_popups_nonescaping-3.html]
|
[iframe_sandbox_popups_nonescaping-3.html]
|
||||||
|
expected: TIMEOUT
|
||||||
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
||||||
expected: FAIL
|
expected: NOTRUN
|
||||||
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[script-onerror-insertion-point-2.html]
|
|
||||||
expected: TIMEOUT
|
|
|
@ -3,3 +3,6 @@
|
||||||
[The incumbent settings object while executing the compiled callback via Web IDL's invoke must be that of the node document]
|
[The incumbent settings object while executing the compiled callback via Web IDL's invoke must be that of the node document]
|
||||||
expected: TIMEOUT
|
expected: TIMEOUT
|
||||||
|
|
||||||
|
[The entry settings object while executing the compiled callback via Web IDL's invoke must be that of the node document]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[realtimeanalyser-fft-scaling.html]
|
[realtimeanalyser-fft-scaling.html]
|
||||||
|
expected: TIMEOUT
|
||||||
[X 2048-point FFT peak position is not equal to 64. Got 0.]
|
[X 2048-point FFT peak position is not equal to 64. Got 0.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
[audiocontext-not-fully-active.html]
|
[audiocontext-not-fully-active.html]
|
||||||
expected: TIMEOUT
|
|
||||||
[frame in navigated remote-site frame]
|
[frame in navigated remote-site frame]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
[transferred-buffer-output.html]
|
||||||
|
[Executing "Test Convolver with transferred buffer"]
|
||||||
|
expected: FAIL
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
[017.html]
|
|
||||||
expected: TIMEOUT
|
|
||||||
[origin of the script that invoked the method, about:blank]
|
|
||||||
expected: TIMEOUT
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[Worker-constructor.html]
|
|
||||||
expected: ERROR
|
|
|
@ -304,13 +304,20 @@
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"overflow-clip-crash.html": [
|
"overflow-clip-001-crash.html": [
|
||||||
"156343954deba47d0276ea482285048d773b74ab",
|
"156343954deba47d0276ea482285048d773b74ab",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
|
"overflow-clip-002-crash.html": [
|
||||||
|
"e60239655a07ca6ea3173356420c934ec10b2231",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
"shrink-to-fit-auto-overflow-relayout-crash.html": [
|
"shrink-to-fit-auto-overflow-relayout-crash.html": [
|
||||||
"c0f0214ee186a0b13787f6f86bd199e7d6df7ac0",
|
"c0f0214ee186a0b13787f6f86bd199e7d6df7ac0",
|
||||||
[
|
[
|
||||||
|
@ -133132,7 +133139,7 @@
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"content-visibility-075.html": [
|
"content-visibility-075.html": [
|
||||||
"cecbf9bf9c5489bb6b8eb96da42f57943ee254f0",
|
"8400dd3ddcd41aa9e95e2893239fd48ae701042c",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
[
|
[
|
||||||
|
@ -133145,7 +133152,7 @@
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"content-visibility-076.html": [
|
"content-visibility-076.html": [
|
||||||
"8ba2bbe82e67cd65e5fe6b773eb71abfae63f4ad",
|
"93181c675bbfb33d1862e377612440a4cde335cd",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
[
|
[
|
||||||
|
@ -161402,6 +161409,19 @@
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
|
"multicol-span-all-015.html": [
|
||||||
|
"041c211b855e50ad70e65c98af73567848e3534e",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
[
|
||||||
|
[
|
||||||
|
"/css/reference/ref-filled-green-100px-square.xht",
|
||||||
|
"=="
|
||||||
|
]
|
||||||
|
],
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
"multicol-span-all-block-sibling-003.xht": [
|
"multicol-span-all-block-sibling-003.xht": [
|
||||||
"abaa45f0a85023f3f07a9db483629b74d2b09d71",
|
"abaa45f0a85023f3f07a9db483629b74d2b09d71",
|
||||||
[
|
[
|
||||||
|
@ -163122,6 +163142,19 @@
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
|
"overflow-clip-cant-scroll.html": [
|
||||||
|
"529ef1fad94432769428aad25ebcb27e28124d85",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
[
|
||||||
|
[
|
||||||
|
"/css/css-overflow/overflow-clip-cant-scroll-ref.html",
|
||||||
|
"=="
|
||||||
|
]
|
||||||
|
],
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
"overflow-clip-content-visual-overflow.html": [
|
"overflow-clip-content-visual-overflow.html": [
|
||||||
"8ea8e2c3334e17cfa5c5225df837135e64546f7d",
|
"8ea8e2c3334e17cfa5c5225df837135e64546f7d",
|
||||||
[
|
[
|
||||||
|
@ -299423,11 +299456,11 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"content-visibility-075-ref.html": [
|
"content-visibility-075-ref.html": [
|
||||||
"05799e0ba1d3b40dabe2997cc9d2e849be35a149",
|
"1a7f152b61fd153637dbe84f5b062ad8efaf0377",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"content-visibility-076-ref.html": [
|
"content-visibility-076-ref.html": [
|
||||||
"6ea5fcf5aef7c5f9026016c57c390956238f3ff2",
|
"4b37070bd7e03bfcef0baaf9440d978859d1e780",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"inline-container-with-child-ref.html": [
|
"inline-container-with-child-ref.html": [
|
||||||
|
@ -311989,6 +312022,10 @@
|
||||||
"9e502e7e237de7ffb6dfc754450c70f5f03d297e",
|
"9e502e7e237de7ffb6dfc754450c70f5f03d297e",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
|
"overflow-clip-cant-scroll-ref.html": [
|
||||||
|
"d52fa0e9574cdbdb067106bccfd2014b9559b921",
|
||||||
|
[]
|
||||||
|
],
|
||||||
"overflow-clip-content-visual-overflow-ref.html": [
|
"overflow-clip-content-visual-overflow-ref.html": [
|
||||||
"378da736ecfcec921fa257273f7369cb253e3b81",
|
"378da736ecfcec921fa257273f7369cb253e3b81",
|
||||||
[]
|
[]
|
||||||
|
@ -347509,7 +347546,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"remote-playback.idl": [
|
"remote-playback.idl": [
|
||||||
"724f3dfa8d2159aaa90546c5e6787f54daa3b0ef",
|
"252241086755aee14d9b938181c025fbfced363f",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"reporting.idl": [
|
"reporting.idl": [
|
||||||
|
@ -347529,7 +347566,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"sanitizer-api.tentative.idl": [
|
"sanitizer-api.tentative.idl": [
|
||||||
"6778c51bdfd70d13d9e8bdb05a7b87797ec07aa5",
|
"27e4ecf26c302610ef69ab4be8843cb59f9978d2",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"savedata.idl": [
|
"savedata.idl": [
|
||||||
|
@ -347544,6 +347581,10 @@
|
||||||
"9e9dbc67ba73753851f158340803a62e0178e893",
|
"9e9dbc67ba73753851f158340803a62e0178e893",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
|
"screen-wake-lock.idl": [
|
||||||
|
"56bc8f43e4d08ad81f48eff802f46f7951cbe0a9",
|
||||||
|
[]
|
||||||
|
],
|
||||||
"scroll-animations.idl": [
|
"scroll-animations.idl": [
|
||||||
"3ef9f5452906509e5c9bc88bdaf31fbf2b3ff8a8",
|
"3ef9f5452906509e5c9bc88bdaf31fbf2b3ff8a8",
|
||||||
[]
|
[]
|
||||||
|
@ -347632,10 +347673,6 @@
|
||||||
"f077afe8f6aae64415d0cdaaafef4ce8de827647",
|
"f077afe8f6aae64415d0cdaaafef4ce8de827647",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"wake-lock.idl": [
|
|
||||||
"56bc8f43e4d08ad81f48eff802f46f7951cbe0a9",
|
|
||||||
[]
|
|
||||||
],
|
|
||||||
"wasm-js-api.idl": [
|
"wasm-js-api.idl": [
|
||||||
"71d7207a712161d34e5dbe676991dd8c01aa7c06",
|
"71d7207a712161d34e5dbe676991dd8c01aa7c06",
|
||||||
[]
|
[]
|
||||||
|
@ -353624,7 +353661,7 @@
|
||||||
],
|
],
|
||||||
"support": {
|
"support": {
|
||||||
"testcases.sub.js": [
|
"testcases.sub.js": [
|
||||||
"a3cc6d38c8cd3d98a93a082fa4216a83de00d471",
|
"029d30bcf54779dae702e85ee36fe5ef3cdb5a06",
|
||||||
[]
|
[]
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -356719,6 +356756,42 @@
|
||||||
"deserialize-error-frame.html": [
|
"deserialize-error-frame.html": [
|
||||||
"5ec2fcda2cdd8dba66ed97240cb836d45596c905",
|
"5ec2fcda2cdd8dba66ed97240cb836d45596c905",
|
||||||
[]
|
[]
|
||||||
|
],
|
||||||
|
"echo-iframe.html": [
|
||||||
|
"68f68503439fdbcddad14a6960adb451cdddf8f7",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"echo-worker.js": [
|
||||||
|
"806c23710833997ef913ec46d64c2d5e2c438e01",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"helpers.js": [
|
||||||
|
"05d481f277a38719b7a691ff50c8569076625424",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"receiving-shared-worker.js": [
|
||||||
|
"84f779c3db6e135f172855e8e35684a2668fa87a",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"receiving-worker.js": [
|
||||||
|
"4ebb9c5f8fcec29cee822ac24187912fb913af09",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"sending-shared-worker.js": [
|
||||||
|
"e579077894d5b945b34954e9bb367e118b01270c",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"sending-worker.js": [
|
||||||
|
"0b79733f74d97baf6337f49a41a85b0a567cca07",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"service-worker-iframe.html": [
|
||||||
|
"348d067c926f58356d058f1571a47aacb0f13666",
|
||||||
|
[]
|
||||||
|
],
|
||||||
|
"service-worker.js": [
|
||||||
|
"af76b6c11b4ed103b8fdfe5ca206f22fe686a78f",
|
||||||
|
[]
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -358158,7 +358231,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"lint.py": [
|
"lint.py": [
|
||||||
"5027da705774f8d306e4d491608e562daa378d0f",
|
"38225ec958862ec793656013226cc03d469b853d",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"rules.py": [
|
"rules.py": [
|
||||||
|
@ -358363,7 +358436,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"test_lint.py": [
|
"test_lint.py": [
|
||||||
"e9925180f20a84f9653c67daad5fd366738f754a",
|
"55dbbe92dfc3f4df88c594ca16116eb2714edf7f",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"test_path_lints.py": [
|
"test_path_lints.py": [
|
||||||
|
@ -358620,7 +358693,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"serve.py": [
|
"serve.py": [
|
||||||
"8243acc5eb8ec21e2d1d322ec4d93dc536976e08",
|
"b1b9e931317f4e99c7110b2096a0318a8f9edf4e",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"test_functional.py": [
|
"test_functional.py": [
|
||||||
|
@ -365324,7 +365397,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"base.py": [
|
"base.py": [
|
||||||
"8b34eb7cd0cdfb0a4a3a9356549e7d1c8f06851b",
|
"d3ffff6176260115a464fce7d36fe1923f42fcd9",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"docroot": {
|
"docroot": {
|
||||||
|
@ -365484,7 +365557,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"test_handlers.py": [
|
"test_handlers.py": [
|
||||||
"965a214f166cfcb31baaeada3ca2bb747b86342a",
|
"7866e641e6aa2d5d98c9d674d79838235e71929d",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"test_input_file.py": [
|
"test_input_file.py": [
|
||||||
|
@ -365547,7 +365620,7 @@
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"handlers.py": [
|
"handlers.py": [
|
||||||
"6677ab394b7a31d4fe0253e8fbfbb54995a7e006",
|
"d1e9614c031279f990cff1c1787088449e59cc43",
|
||||||
[]
|
[]
|
||||||
],
|
],
|
||||||
"logger.py": [
|
"logger.py": [
|
||||||
|
@ -367929,20 +368002,6 @@
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"webrtc-quic": {
|
|
||||||
"META.yml": [
|
|
||||||
"740c8d2fe84beafbcb7f98cef29aff2378721132",
|
|
||||||
[]
|
|
||||||
],
|
|
||||||
"RTCQuicStream-helper.js": [
|
|
||||||
"5e1f6030bd8b138fd881264a494d6e35c23c7667",
|
|
||||||
[]
|
|
||||||
],
|
|
||||||
"RTCQuicTransport-helper.js": [
|
|
||||||
"b8d9eaed5aad3a18a583200bc958fc375f2b24da",
|
|
||||||
[]
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"webrtc-stats": {
|
"webrtc-stats": {
|
||||||
"META.yml": [
|
"META.yml": [
|
||||||
"10bcf856eb9258e0845f2fdcb6e08c43ebcdf78a",
|
"10bcf856eb9258e0845f2fdcb6e08c43ebcdf78a",
|
||||||
|
@ -486185,14 +486244,21 @@
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"child-shift-with-parent-overflow-hidden.html": [
|
"child-shift-with-parent-overflow-hidden.html": [
|
||||||
"d69e55a51b8088ca0a94f3975155646d21792d88",
|
"ba67f7ed50a54434ce8c39e3b937ec4a0159d3e6",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"child-shift-with-parent-overflow-x-clip.html": [
|
||||||
|
"381727c9501217d02dda41a42bcebdff5e9c8e2f",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"child-shift-with-parent.html": [
|
"child-shift-with-parent.html": [
|
||||||
"7271af6d4a04fbbafebe2b20b372f6f41b31b3a9",
|
"e23bfd0c94a8bea0aef47bf10c279c26b2e868ca",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
|
@ -488885,7 +488951,7 @@
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"MediaRecorder-error.html": [
|
"MediaRecorder-error.html": [
|
||||||
"4daf2d9cd1a6de82165152facf88c4742eb00536",
|
"1c93ed3e3a6ab82504e926486e4edb741cc2e4b6",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
|
@ -488929,7 +488995,7 @@
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"MediaRecorder-stop.html": [
|
"MediaRecorder-stop.html": [
|
||||||
"bb39b027f0fcac0a4aa63918d56d62296607b5e9",
|
"ad6fa8809f6ad3af26969bbcb6a761280e5d705c",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
|
@ -509799,21 +509865,21 @@
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"sanitizer-config.tentative.html": [
|
"sanitizer-config.tentative.html": [
|
||||||
"5c115a467cb64e1e93e9293692d122271d64df8a",
|
"e4dc40a8c632098a7f47b72a3b5a783dffa02119",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"sanitizer-sanitize.tentative.html": [
|
"sanitizer-sanitize.tentative.html": [
|
||||||
"44961e02f4e8dcd876b250f2826cea4c7fe487ae",
|
"32b7424a22507c341454807e7b7f96e64e809775",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"sanitizer-sanitizeToString.tentative.html": [
|
"sanitizer-sanitizeToString.tentative.html": [
|
||||||
"7efbc3ca7bd7d8c84595744d622dc4c92c87e347",
|
"bb805a4c919209e404261cd94062b505975f0b52",
|
||||||
[
|
[
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
|
@ -509998,7 +510064,7 @@
|
||||||
},
|
},
|
||||||
"screen-wake-lock": {
|
"screen-wake-lock": {
|
||||||
"idlharness.https.window.js": [
|
"idlharness.https.window.js": [
|
||||||
"b5eaa14efe2bcd6f686467e86d7832e4804f8710",
|
"11b83c29bd0dae7207fd633860772d44bdac6d97",
|
||||||
[
|
[
|
||||||
"screen-wake-lock/idlharness.https.window.html",
|
"screen-wake-lock/idlharness.https.window.html",
|
||||||
{
|
{
|
||||||
|
@ -518203,6 +518269,62 @@
|
||||||
"timeout": "long"
|
"timeout": "long"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
],
|
||||||
|
"readable-stream.html": [
|
||||||
|
"59b57ce6723c10be746e2b3b478dcf81df105db1",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"reason.html": [
|
||||||
|
"4251aa85b816bb24a00143b642cf8410a4132366",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"service-worker.https.html": [
|
||||||
|
"2ca7f19c910f767a9482ff15ff88bda11971a9fe",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"shared-worker.html": [
|
||||||
|
"cd0415402d5018e2f76f4996edd4e7827adc8ab1",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"transform-stream.html": [
|
||||||
|
"fbfbfe8fc1347ab9211845f439a9a7e105a33539",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"window.html": [
|
||||||
|
"beaf548fe641c51408fd3e1083ea1c005c9c2500",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"worker.html": [
|
||||||
|
"c5dc9fc62f8cf24c9b45289651724fe3774f305d",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"writable-stream.html": [
|
||||||
|
"adc6f457c27e87569be2fd32bfe723d29aef7135",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"transform-streams": {
|
"transform-streams": {
|
||||||
|
@ -532984,6 +533106,13 @@
|
||||||
null,
|
null,
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
|
],
|
||||||
|
"transferred-buffer-output.html": [
|
||||||
|
"e37a98c3861b6c2f0139b870d745ad0977fff10b",
|
||||||
|
[
|
||||||
|
null,
|
||||||
|
{}
|
||||||
|
]
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"the-delaynode-interface": {
|
"the-delaynode-interface": {
|
||||||
|
@ -536011,22 +536140,6 @@
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"webrtc-quic": {
|
|
||||||
"RTCQuicStream.https.html": [
|
|
||||||
"e99914ad40387316e82ed32ed4626888f1011e43",
|
|
||||||
[
|
|
||||||
null,
|
|
||||||
{}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"RTCQuicTransport.https.html": [
|
|
||||||
"cdccaac6c866c970d5620b8f70943b69412cb861",
|
|
||||||
[
|
|
||||||
null,
|
|
||||||
{}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"webrtc-stats": {
|
"webrtc-stats": {
|
||||||
"getStats-remote-candidate-address.html": [
|
"getStats-remote-candidate-address.html": [
|
||||||
"08e2aec90e6836cdcd05b448d2d581eec3bd5535",
|
"08e2aec90e6836cdcd05b448d2d581eec3bd5535",
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
[hit-test-floats-002.html]
|
|
||||||
[Hit test float]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
[hit-test-floats-005.html]
|
||||||
|
[Miss clipped float]
|
||||||
|
expected: FAIL
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
[elementFromPoint-001.html]
|
|
||||||
[CSSOM View - 5 - extensions to the Document interface]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
|
@ -21,6 +21,3 @@
|
||||||
[test the top of layer]
|
[test the top of layer]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[test some point of the element: top left corner]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
[matchMedia-display-none-iframe.html]
|
||||||
|
expected: ERROR
|
|
@ -312,12 +312,6 @@
|
||||||
[fetch(): separate response Content-Type: text/plain ]
|
[fetch(): separate response Content-Type: text/plain ]
|
||||||
expected: NOTRUN
|
expected: NOTRUN
|
||||||
|
|
||||||
[<iframe>: separate response Content-Type: text/html;" text/plain]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[<iframe>: separate response Content-Type: text/html */*;charset=gbk]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[<iframe>: combined response Content-Type: text/html;" text/plain]
|
[<iframe>: combined response Content-Type: text/html;" text/plain]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
@ -327,3 +321,15 @@
|
||||||
[<iframe>: separate response Content-Type: text/plain */*;charset=gbk]
|
[<iframe>: separate response Content-Type: text/plain */*;charset=gbk]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: combined response Content-Type: text/html;charset=gbk text/plain text/html]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: combined response Content-Type: text/html */*]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: combined response Content-Type: text/html */*;charset=gbk]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
[<iframe>: separate response Content-Type: text/html;" \\" text/plain]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -11,9 +11,6 @@
|
||||||
[X-Content-Type-Options%3A%20nosniff%2C%2C%40%23%24%23%25%25%26%5E%26%5E*()()11!]
|
[X-Content-Type-Options%3A%20nosniff%2C%2C%40%23%24%23%25%25%26%5E%26%5E*()()11!]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[X-Content-Type-Options%3A%20%22nosniFF%22]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[X-Content-Type-Options%3A%20no%0D%0AX-Content-Type-Options%3A%20nosniff]
|
[X-Content-Type-Options%3A%20no%0D%0AX-Content-Type-Options%3A%20nosniff]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[traverse_the_history_5.html]
|
[traverse_the_history_4.html]
|
||||||
[Multiple history traversals, last would be aborted]
|
[Multiple history traversals, last would be aborted]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[supported-elements.html]
|
[supported-elements.html]
|
||||||
|
expected: TIMEOUT
|
||||||
[Contenteditable element should support autofocus]
|
[Contenteditable element should support autofocus]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
@ -9,7 +10,7 @@
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Area element should support autofocus]
|
[Area element should support autofocus]
|
||||||
expected: FAIL
|
expected: TIMEOUT
|
||||||
|
|
||||||
[Host element with delegatesFocus should support autofocus]
|
[Host element with delegatesFocus should support autofocus]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[iframe_sandbox_popups_escaping-1.html]
|
[iframe_sandbox_popups_escaping-1.html]
|
||||||
type: testharness
|
type: testharness
|
||||||
expected: CRASH
|
expected: TIMEOUT
|
||||||
[Check that popups from a sandboxed iframe escape the sandbox if\n allow-popups-to-escape-sandbox is used]
|
[Check that popups from a sandboxed iframe escape the sandbox if\n allow-popups-to-escape-sandbox is used]
|
||||||
expected: TIMEOUT
|
expected: TIMEOUT
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[iframe_sandbox_popups_nonescaping-2.html]
|
[iframe_sandbox_popups_nonescaping-2.html]
|
||||||
type: testharness
|
type: testharness
|
||||||
expected: TIMEOUT
|
expected: CRASH
|
||||||
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
||||||
expected: NOTRUN
|
expected: NOTRUN
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
[iframe_sandbox_popups_nonescaping-3.html]
|
[iframe_sandbox_popups_nonescaping-3.html]
|
||||||
type: testharness
|
type: testharness
|
||||||
|
expected: TIMEOUT
|
||||||
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
[Check that popups from a sandboxed iframe do not escape the sandbox]
|
||||||
expected: FAIL
|
expected: NOTRUN
|
||||||
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[script-onerror-insertion-point-2.html]
|
|
||||||
expected: TIMEOUT
|
|
|
@ -4,3 +4,6 @@
|
||||||
[The incumbent settings object while executing the compiled callback via Web IDL's invoke must be that of the node document]
|
[The incumbent settings object while executing the compiled callback via Web IDL's invoke must be that of the node document]
|
||||||
expected: TIMEOUT
|
expected: TIMEOUT
|
||||||
|
|
||||||
|
[The entry settings object while executing the compiled callback via Web IDL's invoke must be that of the node document]
|
||||||
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
[realtimeanalyser-fft-scaling.html]
|
[realtimeanalyser-fft-scaling.html]
|
||||||
|
expected: TIMEOUT
|
||||||
[X 2048-point FFT peak position is not equal to 64. Got 0.]
|
[X 2048-point FFT peak position is not equal to 64. Got 0.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
[audiocontext-not-fully-active.html]
|
[audiocontext-not-fully-active.html]
|
||||||
expected: TIMEOUT
|
|
||||||
[frame in navigated remote-site frame]
|
[frame in navigated remote-site frame]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
[transferred-buffer-output.html]
|
||||||
|
[Executing "Test Convolver with transferred buffer"]
|
||||||
|
expected: FAIL
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
[017.html]
|
|
||||||
expected: TIMEOUT
|
|
||||||
[origin of the script that invoked the method, about:blank]
|
|
||||||
expected: TIMEOUT
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[Worker-constructor.html]
|
|
||||||
expected: ERROR
|
|
|
@ -9,10 +9,10 @@
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.small_child {
|
.small_child {
|
||||||
height: 500px;
|
height: 1000px;
|
||||||
}
|
}
|
||||||
.large_child {
|
.large_child {
|
||||||
height: 5000px;
|
height: 20000px;
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
#target {
|
#target {
|
||||||
|
|
|
@ -12,10 +12,10 @@
|
||||||
<style>
|
<style>
|
||||||
.auto {
|
.auto {
|
||||||
content-visibility: auto;
|
content-visibility: auto;
|
||||||
contain-intrinsic-size: 1px 500px;
|
contain-intrinsic-size: 1px 1000px;
|
||||||
}
|
}
|
||||||
.child {
|
.child {
|
||||||
height: 5000px;
|
height: 20000px;
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
#target {
|
#target {
|
||||||
|
|
|
@ -9,10 +9,10 @@
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.small_child {
|
.small_child {
|
||||||
height: 500px;
|
height: 1000px;
|
||||||
}
|
}
|
||||||
.large_child {
|
.large_child {
|
||||||
height: 5000px;
|
height: 20000px;
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
#target {
|
#target {
|
||||||
|
|
|
@ -12,10 +12,10 @@
|
||||||
<style>
|
<style>
|
||||||
.auto {
|
.auto {
|
||||||
content-visibility: auto;
|
content-visibility: auto;
|
||||||
contain-intrinsic-size: 1px 500px;
|
contain-intrinsic-size: 1px 1000px;
|
||||||
}
|
}
|
||||||
.child {
|
.child {
|
||||||
height: 5000px;
|
height: 20000px;
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
#target {
|
#target {
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<link rel="author" title="Morten Stenshorne" href="mailto:mstensho@chromium.org">
|
||||||
|
<link rel="help" href="https://www.w3.org/TR/css-multicol-1/#spanning-columns">
|
||||||
|
<link rel="help" href="https://www.w3.org/TR/css-break-3/#break-margins">
|
||||||
|
<link rel="match" href="../reference/ref-filled-green-100px-square.xht">
|
||||||
|
<p>Test passes if there is a filled green square and <strong>no red</strong>.</p>
|
||||||
|
<div style="columns:2; column-gap:0; width:100px; background:red;">
|
||||||
|
<div style="height:30px;"></div>
|
||||||
|
<div style="height:30px; background:green;"></div>
|
||||||
|
<div>
|
||||||
|
<div style="column-span:all; height:40px; background:green;">
|
||||||
|
<div style="width:50px; height:40px; background:red;"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div style="margin-top:-70px; height:130px; background:green;"></div>
|
||||||
|
</div>
|
|
@ -0,0 +1,12 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<link rel="author" title="Scott Violet" href="mailto:sky@chromium.org">
|
||||||
|
<link rel="help" href="https://bugs.chromium.org/p/chromium/issues/detail?id=1134937">
|
||||||
|
<style>
|
||||||
|
.item {
|
||||||
|
background: cyan;
|
||||||
|
background-attachment: local;
|
||||||
|
overflow: clip;
|
||||||
|
border-style: solid
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<div class="item">
|
|
@ -0,0 +1,15 @@
|
||||||
|
<!doctype html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Overflow: clip can not be scrolled</title>
|
||||||
|
<link rel="help" href="https://www.w3.org/TR/css-overflow-3/#valdef-overflow-clip">
|
||||||
|
<link rel="author" title="Scott Violet" href="mailto:sky@chromium.org">
|
||||||
|
<style>
|
||||||
|
.child {
|
||||||
|
width: 100px;
|
||||||
|
height: 100px;
|
||||||
|
background-color: green;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<p>You should see two green squares (no red).
|
||||||
|
<div class="child"></div>
|
||||||
|
<div class="child"></div>
|
|
@ -0,0 +1,46 @@
|
||||||
|
<!doctype html>
|
||||||
|
<html class="reftest-wait">
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Overflow: clip can not be scrolled</title>
|
||||||
|
<link rel="help" href="https://www.w3.org/TR/css-overflow-3/#valdef-overflow-clip">
|
||||||
|
<link rel="author" title="Scott Violet" href="mailto:sky@chromium.org">
|
||||||
|
<link rel="match" href="overflow-clip-cant-scroll-ref.html">
|
||||||
|
<style>
|
||||||
|
.parent {
|
||||||
|
width: 100px;
|
||||||
|
height: 100px;
|
||||||
|
overflow: clip;
|
||||||
|
}
|
||||||
|
|
||||||
|
.child1, .child2 {
|
||||||
|
width: 100px;
|
||||||
|
height: 100px;
|
||||||
|
flex: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.child1 {
|
||||||
|
background-color: green;
|
||||||
|
}
|
||||||
|
|
||||||
|
.child2 {
|
||||||
|
background-color: red;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<p>You should see two green squares (no red).
|
||||||
|
<div id="parent1" class="parent" style="display: flex">
|
||||||
|
<div class="child1"></div>
|
||||||
|
<div class="child2"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="parent2" class="parent">
|
||||||
|
<div class="child1"></div>
|
||||||
|
<div class="child2"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
onload = function() {
|
||||||
|
document.getElementById("parent1").scrollLeft = 100;
|
||||||
|
document.getElementById("parent2").scrollTop = 100;
|
||||||
|
document.documentElement.removeAttribute("class");
|
||||||
|
}
|
||||||
|
</script>
|
|
@ -6,7 +6,7 @@
|
||||||
[Exposed=Window]
|
[Exposed=Window]
|
||||||
interface RemotePlayback : EventTarget {
|
interface RemotePlayback : EventTarget {
|
||||||
Promise<long> watchAvailability(RemotePlaybackAvailabilityCallback callback);
|
Promise<long> watchAvailability(RemotePlaybackAvailabilityCallback callback);
|
||||||
Promise<void> cancelWatchAvailability(optional long id);
|
Promise<undefined> cancelWatchAvailability(optional long id);
|
||||||
|
|
||||||
readonly attribute RemotePlaybackState state;
|
readonly attribute RemotePlaybackState state;
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ interface RemotePlayback : EventTarget {
|
||||||
attribute EventHandler onconnect;
|
attribute EventHandler onconnect;
|
||||||
attribute EventHandler ondisconnect;
|
attribute EventHandler ondisconnect;
|
||||||
|
|
||||||
Promise<void> prompt();
|
Promise<undefined> prompt();
|
||||||
};
|
};
|
||||||
|
|
||||||
enum RemotePlaybackState {
|
enum RemotePlaybackState {
|
||||||
|
@ -23,7 +23,7 @@ enum RemotePlaybackState {
|
||||||
"disconnected"
|
"disconnected"
|
||||||
};
|
};
|
||||||
|
|
||||||
callback RemotePlaybackAvailabilityCallback = void(boolean available);
|
callback RemotePlaybackAvailabilityCallback = undefined(boolean available);
|
||||||
|
|
||||||
partial interface HTMLMediaElement {
|
partial interface HTMLMediaElement {
|
||||||
[SameObject] readonly attribute RemotePlayback remote;
|
[SameObject] readonly attribute RemotePlayback remote;
|
||||||
|
|
|
@ -3,12 +3,18 @@
|
||||||
[
|
[
|
||||||
Exposed=Window
|
Exposed=Window
|
||||||
] interface Sanitizer {
|
] interface Sanitizer {
|
||||||
[RaisesException] constructor(optional SanitizerConfig sanitizerConfig = {});
|
constructor(optional SanitizerConfig sanitizerConfig = {});
|
||||||
[CallWith=ScriptState, RaisesException] DocumentFragment sanitize(DOMString input);
|
DocumentFragment sanitize(DOMString input);
|
||||||
[CallWith=ScriptState, RaisesException] DOMString sanitizeToString(DOMString input);
|
DOMString sanitizeToString(DOMString input);
|
||||||
|
|
||||||
|
readonly attribute SanitizerConfig creationOptions;
|
||||||
};
|
};
|
||||||
|
|
||||||
dictionary SanitizerConfig {
|
dictionary SanitizerConfig {
|
||||||
sequence<DOMString>? dropElements;
|
sequence<DOMString> allowElements;
|
||||||
sequence<DOMString>? dropAttributes;
|
sequence<DOMString> blockElements;
|
||||||
|
sequence<DOMString> dropElements;
|
||||||
|
sequence<DOMString> allowAttributes;
|
||||||
|
sequence<DOMString> blockAttributes;
|
||||||
|
sequence<DOMString> dropAttributes;
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
<script src="resources/util.js"></script>
|
<script src="resources/util.js"></script>
|
||||||
<div id="parent" style="position: relative; width: 200px; height: 200px;
|
<div id="parent" style="position: relative; width: 200px; height: 200px;
|
||||||
border: 50px solid blue; overflow: hidden">
|
border: 50px solid blue; overflow: hidden">
|
||||||
<div id="child" style="height: 400px"></div>
|
<div id="child" style="width: 400px; height: 400px"></div>
|
||||||
</div>
|
</div>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<title>Layout Instability: parent/child moved together with overflow-x: clip</title>
|
||||||
|
<link rel="help" href="https://wicg.github.io/layout-instability/" />
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="resources/util.js"></script>
|
||||||
|
<div id="parent" style="position: relative; width: 100px; height: 100px; border: 100px solid blue; overflow-x: clip">
|
||||||
|
<div id="child" style="width: 1000px; height: 300px"></div>
|
||||||
|
</div>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const watcher = new ScoreWatcher;
|
||||||
|
|
||||||
|
// Wait for the initial render to complete.
|
||||||
|
await waitForAnimationFrames(2);
|
||||||
|
|
||||||
|
// Modify the position of the div.
|
||||||
|
const parent = document.querySelector("#parent");
|
||||||
|
parent.style.top = '100px';
|
||||||
|
|
||||||
|
const expectedScore = computeExpectedScore(300 * (400 + 100), 100);
|
||||||
|
|
||||||
|
// Observer fires after the frame is painted.
|
||||||
|
assert_equals(watcher.score, 0);
|
||||||
|
await watcher.promise;
|
||||||
|
assert_equals(watcher.score, expectedScore);
|
||||||
|
}, 'Parent/child movement with overflow-x: clip.');
|
||||||
|
|
||||||
|
</script>
|
|
@ -19,16 +19,12 @@ promise_test(async () => {
|
||||||
const parent = document.querySelector("#parent");
|
const parent = document.querySelector("#parent");
|
||||||
parent.style.top = '100px';
|
parent.style.top = '100px';
|
||||||
|
|
||||||
// If the implementation reports child and parent separately
|
const expectedScore = computeExpectedScore(300 * (400 + 100), 100);
|
||||||
// (overlapping are should be excluded):
|
|
||||||
const expectedScoreMin = computeExpectedScore(300 * (300 + 100) + 100 * 100, 100);
|
|
||||||
// If the implementation reports parent bounding box (including child):
|
|
||||||
const expectedScoreMax = computeExpectedScore(300 * (400 + 100), 100);
|
|
||||||
|
|
||||||
// Observer fires after the frame is painted.
|
// Observer fires after the frame is painted.
|
||||||
assert_equals(watcher.score, 0);
|
assert_equals(watcher.score, 0);
|
||||||
await watcher.promise;
|
await watcher.promise;
|
||||||
assert_between_inclusive(watcher.score, expectedScoreMin, expectedScoreMax);
|
assert_equals(watcher.score, expectedScore);
|
||||||
}, 'Parent/child movement.');
|
}, 'Parent/child movement.');
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -38,7 +38,7 @@
|
||||||
let recorder = new MediaRecorder(video);
|
let recorder = new MediaRecorder(video);
|
||||||
recorder.onerror = t.step_func(mediaRecorderErrorEvent => {
|
recorder.onerror = t.step_func(mediaRecorderErrorEvent => {
|
||||||
assert_true(mediaRecorderErrorEvent instanceof MediaRecorderErrorEvent, 'the type of event should be MediaRecorderErrorEvent');
|
assert_true(mediaRecorderErrorEvent instanceof MediaRecorderErrorEvent, 'the type of event should be MediaRecorderErrorEvent');
|
||||||
assert_equals(mediaRecorderErrorEvent.error.name, 'UnknownError', 'the type of error should be UnknownError when track has been added or removed');
|
assert_equals(mediaRecorderErrorEvent.error.name, 'InvalidModificationError', 'the type of error should be InvalidModificationError when track has been added or removed');
|
||||||
assert_true(mediaRecorderErrorEvent.isTrusted, 'isTrusted should be true when the event is created by C++');
|
assert_true(mediaRecorderErrorEvent.isTrusted, 'isTrusted should be true when the event is created by C++');
|
||||||
assert_equals(recorder.state, "inactive", "MediaRecorder has been stopped after adding a track to stream");
|
assert_equals(recorder.state, "inactive", "MediaRecorder has been stopped after adding a track to stream");
|
||||||
t.done();
|
t.done();
|
||||||
|
@ -57,7 +57,7 @@
|
||||||
let recorder = new MediaRecorder(video);
|
let recorder = new MediaRecorder(video);
|
||||||
recorder.onerror = t.step_func(mediaRecorderErrorEvent => {
|
recorder.onerror = t.step_func(mediaRecorderErrorEvent => {
|
||||||
assert_true(mediaRecorderErrorEvent instanceof MediaRecorderErrorEvent, 'the type of event should be MediaRecorderErrorEvent');
|
assert_true(mediaRecorderErrorEvent instanceof MediaRecorderErrorEvent, 'the type of event should be MediaRecorderErrorEvent');
|
||||||
assert_equals(mediaRecorderErrorEvent.error.name, 'UnknownError', 'the type of error should be UnknownError when track has been added or removed');
|
assert_equals(mediaRecorderErrorEvent.error.name, 'InvalidModificationError', 'the type of error should be InvalidModificationError when track has been added or removed');
|
||||||
assert_true(mediaRecorderErrorEvent.isTrusted, 'isTrusted should be true when the event is created by C++');
|
assert_true(mediaRecorderErrorEvent.isTrusted, 'isTrusted should be true when the event is created by C++');
|
||||||
assert_equals(recorder.state, "inactive", "MediaRecorder has been stopped after removing a track from stream");
|
assert_equals(recorder.state, "inactive", "MediaRecorder has been stopped after removing a track from stream");
|
||||||
t.done();
|
t.done();
|
||||||
|
|
|
@ -115,6 +115,44 @@
|
||||||
new Promise(r => t.step_timeout(r, 0))]);
|
new Promise(r => t.step_timeout(r, 0))]);
|
||||||
}, "MediaRecorder will fire an exception when stopped after having just been spontaneously stopped");
|
}, "MediaRecorder will fire an exception when stopped after having just been spontaneously stopped");
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: true });
|
||||||
|
const recorder = new MediaRecorder(stream);
|
||||||
|
let events = [];
|
||||||
|
const startPromise = new Promise(resolve => recorder.onstart = resolve);
|
||||||
|
const stopPromise = new Promise(resolve => recorder.onstop = resolve);
|
||||||
|
|
||||||
|
startPromise.then(() => events.push("start"));
|
||||||
|
stopPromise.then(() => events.push("stop"));
|
||||||
|
|
||||||
|
recorder.start();
|
||||||
|
recorder.stop();
|
||||||
|
|
||||||
|
await stopPromise;
|
||||||
|
assert_array_equals(events, ["start", "stop"]);
|
||||||
|
}, "MediaRecorder will fire start event even if stopped synchronously");
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: true });
|
||||||
|
const recorder = new MediaRecorder(stream);
|
||||||
|
let events = [];
|
||||||
|
const startPromise = new Promise(resolve => recorder.onstart = resolve);
|
||||||
|
const stopPromise = new Promise(resolve => recorder.onstop = resolve);
|
||||||
|
const errorPromise = new Promise(resolve => recorder.onerror = resolve);
|
||||||
|
const dataPromise = new Promise(resolve => recorder.ondataavailable = resolve);
|
||||||
|
|
||||||
|
startPromise.then(() => events.push("start"));
|
||||||
|
stopPromise.then(() => events.push("stop"));
|
||||||
|
errorPromise.then(() => events.push("error"));
|
||||||
|
dataPromise.then(() => events.push("data"));
|
||||||
|
|
||||||
|
recorder.start();
|
||||||
|
stream.removeTrack(stream.getAudioTracks()[0]);
|
||||||
|
|
||||||
|
await stopPromise;
|
||||||
|
assert_array_equals(events, ["start", "error", "data", "stop"]);
|
||||||
|
}, "MediaRecorder will fire start event even if a track is removed synchronously");
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -3,85 +3,35 @@
|
||||||
<head>
|
<head>
|
||||||
<script src="/resources/testharness.js"></script>
|
<script src="/resources/testharness.js"></script>
|
||||||
<script src="/resources/testharnessreport.js"></script>
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
<script src="support/testcases.sub.js"></script>
|
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<script>
|
<script>
|
||||||
const default_option ={dropElements: null};
|
|
||||||
test(t => {
|
test(t => {
|
||||||
let s = new Sanitizer();
|
let s = new Sanitizer();
|
||||||
assert_true(s instanceof Sanitizer);
|
assert_true(s instanceof Sanitizer);
|
||||||
assert_object_equals(s.creationOptions, default_option);
|
|
||||||
}, "SanitizerAPI creator without config.");
|
}, "SanitizerAPI creator without config.");
|
||||||
|
|
||||||
test(t => {
|
test(t => {
|
||||||
let s = new Sanitizer({});
|
let s = new Sanitizer({});
|
||||||
assert_true(s instanceof Sanitizer);
|
assert_true(s instanceof Sanitizer);
|
||||||
assert_object_equals(s.creationOptions, default_option);
|
|
||||||
}, "SanitizerAPI creator with empty config.");
|
}, "SanitizerAPI creator with empty config.");
|
||||||
|
|
||||||
test(t => {
|
|
||||||
let s = new Sanitizer(null);
|
|
||||||
assert_true(s instanceof Sanitizer);
|
|
||||||
assert_object_equals(s.creationOptions, default_option);
|
|
||||||
}, "SanitizerAPI creator with null as config.");
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
let s = new Sanitizer(undefined);
|
|
||||||
assert_true(s instanceof Sanitizer);
|
|
||||||
assert_object_equals(s.creationOptions, default_option);
|
|
||||||
}, "SanitizerAPI creator with undefined as config.");
|
|
||||||
|
|
||||||
test(t => {
|
test(t => {
|
||||||
let s = new Sanitizer({testConfig: [1,2,3], attr: ["test", "i", "am"]});
|
let s = new Sanitizer({testConfig: [1,2,3], attr: ["test", "i", "am"]});
|
||||||
assert_true(s instanceof Sanitizer);
|
assert_true(s instanceof Sanitizer);
|
||||||
assert_object_equals(s.creationOptions, default_option);
|
|
||||||
}, "SanitizerAPI creator with config ignore unknown values.");
|
}, "SanitizerAPI creator with config ignore unknown values.");
|
||||||
|
|
||||||
|
const config_names = ["dropElements", "blockElements", "allowElements", "dropAttributes", "blockAttributes", "allowAttributes"];
|
||||||
|
config_names.forEach(cname => {
|
||||||
|
let options = {};
|
||||||
|
options[cname] = [];
|
||||||
test(t => {
|
test(t => {
|
||||||
let options = {dropElements: ["div"]};
|
|
||||||
let s = new Sanitizer(options);
|
let s = new Sanitizer(options);
|
||||||
assert_true(s instanceof Sanitizer);
|
assert_true(s instanceof Sanitizer);
|
||||||
assert_object_equals(s.creationOptions, {dropElements: ["DIV"]});
|
|
||||||
|
|
||||||
options.dropElements.push("test");
|
|
||||||
assert_object_equals(s.creationOptions, {dropElements: ["DIV"]});
|
|
||||||
|
|
||||||
s.creationOptions = {dropElements: ["test", "t"]};
|
|
||||||
assert_object_equals(s.creationOptions, {dropElements: ["DIV"]});
|
|
||||||
|
|
||||||
s.creationOptions['dropElements'] = [1,2,3];
|
|
||||||
assert_object_equals(s.creationOptions, {dropElements: ["DIV"]});
|
|
||||||
}, "SanitizerAPI config is not editable.");
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
let s = new Sanitizer({dropElements: []});
|
|
||||||
assert_true(s instanceof Sanitizer);
|
|
||||||
assert_equals(s.sanitizeToString("<div>balabala<i>test</i></div>"), "<div>balabala<i>test</i></div>");
|
assert_equals(s.sanitizeToString("<div>balabala<i>test</i></div>"), "<div>balabala<i>test</i></div>");
|
||||||
}, "SanitizerAPI creator with config {dropElements: []}.")
|
}, "SanitizerAPI creator with config " + JSON.stringify(options) + ".");
|
||||||
|
});
|
||||||
test(t => {
|
|
||||||
let s = new Sanitizer({dropElements: null});
|
|
||||||
assert_true(s instanceof Sanitizer);
|
|
||||||
assert_true(s.creationOptions instanceof Object);
|
|
||||||
assert_object_equals(s.creationOptions, default_option);
|
|
||||||
}, "SanitizerAPI creator with config {dropElements: null}.")
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
let s = new Sanitizer({dropElements: undefined});
|
|
||||||
assert_true(s instanceof Sanitizer);
|
|
||||||
assert_true(s.creationOptions instanceof Object);
|
|
||||||
assert_object_equals(s.creationOptions, default_option);
|
|
||||||
}, "SanitizerAPI creator with config {dropElements: undefined}.");
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
assert_throws_js(TypeError, _ => {let s = new Sanitizer({dropElements: 123})});
|
|
||||||
}, "SanitizerAPI creator with config {dropElements: 123}.");
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
assert_throws_js(TypeError, _ => {let s = new Sanitizer({dropElements: "div"})});
|
|
||||||
}, "SanitizerAPI creator with config {dropElements: div}.");
|
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
|
|
|
@ -16,8 +16,6 @@
|
||||||
|
|
||||||
testcases.forEach(c => test(t => {
|
testcases.forEach(c => test(t => {
|
||||||
let s = new Sanitizer(c.config_input);
|
let s = new Sanitizer(c.config_input);
|
||||||
assert_true(s.creationOptions instanceof Object);
|
|
||||||
assert_object_equals(s.creationOptions, c.config_value);
|
|
||||||
|
|
||||||
fragment = s.sanitize(c.value);
|
fragment = s.sanitize(c.value);
|
||||||
assert_true(fragment instanceof DocumentFragment);
|
assert_true(fragment instanceof DocumentFragment);
|
||||||
|
|
|
@ -10,8 +10,6 @@
|
||||||
<script>
|
<script>
|
||||||
testcases.forEach(c => test(t => {
|
testcases.forEach(c => test(t => {
|
||||||
let s = new Sanitizer(c.config_input);
|
let s = new Sanitizer(c.config_input);
|
||||||
assert_true(s.creationOptions instanceof Object);
|
|
||||||
assert_object_equals(s.creationOptions, c.config_value);
|
|
||||||
assert_equals(s.sanitizeToString(c.value), c.result);
|
assert_equals(s.sanitizeToString(c.value), c.result);
|
||||||
}, "SanitizerAPI sanitizeToString function for " + c.message));
|
}, "SanitizerAPI sanitizeToString function for " + c.message));
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -1,24 +1,12 @@
|
||||||
const testcases = [
|
const testcases = [
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "test", result: "test", message: "string"},
|
{config_input: {}, value: "test", result: "test", message: "string"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "<b>bla</b>", result: "<b>bla</b>", message: "html fragment"},
|
{config_input: {}, value: "<b>bla</b>", result: "<b>bla</b>", message: "html fragment"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "<a<embla", result: "", message: "broken html"},
|
{config_input: {}, value: "<a<embla", result: "", message: "broken html"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: {}, result: "[object Object]", message: "empty object"},
|
{config_input: {}, value: {}, result: "[object Object]", message: "empty object"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: 1, result: "1", message: "number"},
|
{config_input: {}, value: 1, result: "1", message: "number"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: 000, result: "0", message: "zeros"},
|
{config_input: {}, value: 000, result: "0", message: "zeros"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: 1+2, result: "3", message: "arithmetic"},
|
{config_input: {}, value: 1+2, result: "3", message: "arithmetic"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "", result: "", message: "empty string"},
|
{config_input: {}, value: "", result: "", message: "empty string"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: undefined, result: "undefined", message: "undefined"},
|
{config_input: {}, value: "<html><head></head><body>test</body></html>", result: "test", message: "document"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: null, result: "null", message: "null"},
|
{config_input: {}, value: "<div>test", result: "<div>test</div>", message: "html without close tag"},
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "<html><head></head><body>test</body></html>", result: "test", message: "document"},
|
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "<div>test", result: "<div>test</div>", message: "html without close tag"},
|
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "<script>alert('i am a test')<\/script>", result: "", message: "scripts"},
|
|
||||||
{config_input: {}, config_value: {dropElements: null}, value: "<p onclick='a= 123'>Click.</p>", result: "<p>Click.</p>", message: "onclick scripts"},
|
|
||||||
{config_input: {test: 123}, config_value: {dropElements: null}, value: "test", result: "test", message: "invalid config_input"},
|
|
||||||
{config_input: {dropElements: []}, config_value: {dropElements:[]}, value: "test", result: "test", message: "empty dropElements list"},
|
|
||||||
{config_input: {dropElements: ["div"]}, config_value: {dropElements:["DIV"]}, value: "<div>test</div><c>bla", result: "<c>bla</c>", message: "test html without close tag with dropElements list ['div']"},
|
|
||||||
{config_input: {dropElements: ["script"]}, config_value: {dropElements:["SCRIPT"]}, value: "<script>alert('i am a test')<\/script>", result: "", message: "test script with [\"script\"] as dropElements list"},
|
|
||||||
{config_input: {dropElements: ["test", "i"]}, config_value: {dropElements:["TEST","I"]}, value: "<div>balabala<i>test</i></div><test>t</test>", result: "<div>balabala</div>", message: "dropElements list [\"test\", \"i\"]}"},
|
|
||||||
{config_input: {dropElements: ["I", "AM"]}, config_value: {dropElements:["I", "AM"]}, value: "<div>balabala<am>test</am></div>", result: "<div>balabala</div>", message: "dropElements list [\"I\", \"AM\"]}"},
|
|
||||||
{config_input: {dropElements: ["am", "p"]}, config_value: {dropElements:["AM","P"]}, value: "<div>balabala<i>i</i><p>t</p><test>a</test></div>", result: "<div>balabala<i>i</i><test>a</test></div>", message: "dropElements list [\"am\", \"p\"]}"},
|
|
||||||
{config_input: {dropElements: [123, [], "test", "i"]}, config_value: {dropElements:["123","","TEST","I"]}, value: "<div>balabala<i>test</i></div><test>t</test>", result: "<div>balabala</div>", message: "dropElements list with invalid values}"}
|
|
||||||
];
|
];
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
idl_test(
|
idl_test(
|
||||||
['wake-lock'],
|
['screen-wake-lock'],
|
||||||
['dom', 'html'],
|
['dom', 'html'],
|
||||||
async idl_array => {
|
async idl_array => {
|
||||||
idl_array.add_objects({ Navigator: ['navigator'] });
|
idl_array.add_objects({ Navigator: ['navigator'] });
|
||||||
|
|
|
@ -0,0 +1,255 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="resources/helpers.js"></script>
|
||||||
|
<script src="../resources/recording-streams.js"></script>
|
||||||
|
<script src="../resources/test-utils.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = await createTransferredReadableStream({
|
||||||
|
start(controller) {
|
||||||
|
controller.enqueue('a');
|
||||||
|
controller.close();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const reader = rs.getReader();
|
||||||
|
{
|
||||||
|
const {value, done} = await reader.read();
|
||||||
|
assert_false(done, 'should not be done yet');
|
||||||
|
assert_equals(value, 'a', 'first chunk should be a');
|
||||||
|
}
|
||||||
|
{
|
||||||
|
const {done} = await reader.read();
|
||||||
|
assert_true(done, 'should be done now');
|
||||||
|
}
|
||||||
|
}, 'sending one chunk through a transferred stream should work');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
let controller;
|
||||||
|
const rs = await createTransferredReadableStream({
|
||||||
|
start(c) {
|
||||||
|
controller = c;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
for (let i = 0; i < 10; ++i) {
|
||||||
|
controller.enqueue(i);
|
||||||
|
}
|
||||||
|
controller.close();
|
||||||
|
const reader = rs.getReader();
|
||||||
|
for (let i = 0; i < 10; ++i) {
|
||||||
|
const {value, done} = await reader.read();
|
||||||
|
assert_false(done, 'should not be done yet');
|
||||||
|
assert_equals(value, i, 'chunk content should match index');
|
||||||
|
}
|
||||||
|
const {done} = await reader.read();
|
||||||
|
assert_true(done, 'should be done now');
|
||||||
|
}, 'sending ten chunks through a transferred stream should work');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
let controller;
|
||||||
|
const rs = await createTransferredReadableStream({
|
||||||
|
start(c) {
|
||||||
|
controller = c;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const reader = rs.getReader();
|
||||||
|
for (let i = 0; i < 10; ++i) {
|
||||||
|
controller.enqueue(i);
|
||||||
|
const {value, done} = await reader.read();
|
||||||
|
assert_false(done, 'should not be done yet');
|
||||||
|
assert_equals(value, i, 'chunk content should match index');
|
||||||
|
}
|
||||||
|
controller.close();
|
||||||
|
const {done} = await reader.read();
|
||||||
|
assert_true(done, 'should be done now');
|
||||||
|
}, 'sending ten chunks one at a time should work');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
let controller;
|
||||||
|
const rs = await createTransferredReadableStream({
|
||||||
|
start() {
|
||||||
|
this.counter = 0;
|
||||||
|
},
|
||||||
|
pull(controller) {
|
||||||
|
controller.enqueue(this.counter);
|
||||||
|
++this.counter;
|
||||||
|
if (this.counter === 10)
|
||||||
|
controller.close();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const reader = rs.getReader();
|
||||||
|
for (let i = 0; i < 10; ++i) {
|
||||||
|
const {value, done} = await reader.read();
|
||||||
|
assert_false(done, 'should not be done yet');
|
||||||
|
assert_equals(value, i, 'chunk content should match index');
|
||||||
|
}
|
||||||
|
const {done} = await reader.read();
|
||||||
|
assert_true(done, 'should be done now');
|
||||||
|
}, 'sending ten chunks on demand should work');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = recordingReadableStream({}, { highWaterMark: 0 });
|
||||||
|
await delay(0);
|
||||||
|
assert_array_equals(rs.events, [], 'pull() should not have been called');
|
||||||
|
// Eat the message so it can't interfere with other tests.
|
||||||
|
addEventListener('message', () => {}, {once: true});
|
||||||
|
// The transfer is done manually to verify that it is posting the stream that
|
||||||
|
// relieves backpressure, not receiving it.
|
||||||
|
postMessage(rs, '*', [rs]);
|
||||||
|
await delay(0);
|
||||||
|
assert_array_equals(rs.events, ['pull'], 'pull() should have been called');
|
||||||
|
}, 'transferring a stream should relieve backpressure');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = await recordingTransferredReadableStream({
|
||||||
|
pull(controller) {
|
||||||
|
controller.enqueue('a');
|
||||||
|
}
|
||||||
|
}, { highWaterMark: 2 });
|
||||||
|
await delay(0);
|
||||||
|
assert_array_equals(rs.events, ['pull', 'pull', 'pull'],
|
||||||
|
'pull() should have been called three times');
|
||||||
|
}, 'transferring a stream should add one chunk to the queue size');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = await recordingTransferredReadableStream({
|
||||||
|
start(controller) {
|
||||||
|
controller.enqueue(new Uint8Array(1024));
|
||||||
|
controller.enqueue(new Uint8Array(1024));
|
||||||
|
}
|
||||||
|
}, new ByteLengthQueuingStrategy({highWaterMark: 512}));
|
||||||
|
await delay(0);
|
||||||
|
// At this point the queue contains 1024/512 bytes and 1/1 chunk, so it's full
|
||||||
|
// and pull() is not called.
|
||||||
|
assert_array_equals(rs.events, [], 'pull() should not have been called');
|
||||||
|
const reader = rs.getReader();
|
||||||
|
const {value, done} = await reader.read();
|
||||||
|
assert_false(done, 'we should not be done');
|
||||||
|
assert_equals(value.byteLength, 1024, 'expected chunk should be returned');
|
||||||
|
// Now the queue contains 0/512 bytes and 1/1 chunk, so pull() is called. If
|
||||||
|
// the implementation erroneously counted the extra queue space in bytes, then
|
||||||
|
// the queue would contain 1024/513 bytes and pull() wouldn't be called.
|
||||||
|
assert_array_equals(rs.events, ['pull'], 'pull() should have been called');
|
||||||
|
}, 'the extra queue from transferring is counted in chunks');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = await recordingTransferredReadableStream();
|
||||||
|
rs.cancel('message');
|
||||||
|
await delay(0);
|
||||||
|
assert_array_equals(rs.events, ['pull', 'cancel', 'message'],
|
||||||
|
'cancel() should have been called');
|
||||||
|
const reader = rs.getReader();
|
||||||
|
// Check the stream really got closed.
|
||||||
|
await reader.closed;
|
||||||
|
}, 'cancel should be propagated to the original');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
let resolveCancelCalled;
|
||||||
|
const cancelCalled = new Promise(resolve => {
|
||||||
|
resolveCancelCalled = resolve;
|
||||||
|
});
|
||||||
|
const rs = await recordingTransferredReadableStream({
|
||||||
|
cancel() {
|
||||||
|
resolveCancelCalled();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const reader = rs.getReader();
|
||||||
|
const readPromise = reader.read();
|
||||||
|
reader.cancel('done');
|
||||||
|
const { done } = await readPromise;
|
||||||
|
assert_true(done, 'should be done');
|
||||||
|
await cancelCalled;
|
||||||
|
assert_array_equals(rs.events, ['pull', 'cancel', 'done'],
|
||||||
|
'events should match');
|
||||||
|
}, 'cancel should abort a pending read()');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
let cancelComplete = false;
|
||||||
|
const rs = await createTransferredReadableStream({
|
||||||
|
async cancel() {
|
||||||
|
await flushAsyncEvents();
|
||||||
|
cancelComplete = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
await rs.cancel();
|
||||||
|
assert_false(cancelComplete,
|
||||||
|
'cancel() on the underlying sink should not have completed');
|
||||||
|
}, 'stream cancel should not wait for underlying source cancel');
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const rs = await recordingTransferredReadableStream();
|
||||||
|
const reader = rs.getReader();
|
||||||
|
let serializationHappened = false;
|
||||||
|
rs.controller.enqueue({
|
||||||
|
get getter() {
|
||||||
|
serializationHappened = true;
|
||||||
|
return 'a';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
await flushAsyncEvents();
|
||||||
|
assert_false(serializationHappened,
|
||||||
|
'serialization should not have happened yet');
|
||||||
|
const {value, done} = await reader.read();
|
||||||
|
assert_false(done, 'should not be done');
|
||||||
|
assert_equals(value.getter, 'a', 'getter should be a');
|
||||||
|
assert_true(serializationHappened,
|
||||||
|
'serialization should have happened');
|
||||||
|
}, 'serialization should not happen until the value is read');
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const rs = await recordingTransferredReadableStream();
|
||||||
|
const reader = rs.getReader();
|
||||||
|
rs.controller.enqueue(new ReadableStream());
|
||||||
|
await promise_rejects_dom(t, 'DataCloneError', reader.read(),
|
||||||
|
'closed promise should reject');
|
||||||
|
assert_throws_js(TypeError, () => rs.controller.enqueue(),
|
||||||
|
'original stream should be errored');
|
||||||
|
}, 'transferring a non-serializable chunk should error both sides');
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const rs = await createTransferredReadableStream({
|
||||||
|
start(controller) {
|
||||||
|
controller.error('foo');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const reader = rs.getReader();
|
||||||
|
return promise_rejects_exactly(t, 'foo', reader.read(),
|
||||||
|
'error should be passed through');
|
||||||
|
}, 'errors should be passed through');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = await recordingTransferredReadableStream();
|
||||||
|
await delay(0);
|
||||||
|
const reader = rs.getReader();
|
||||||
|
reader.cancel();
|
||||||
|
rs.controller.error();
|
||||||
|
const {done} = await reader.read();
|
||||||
|
assert_true(done, 'should be done');
|
||||||
|
assert_throws_js(TypeError, () => rs.controller.enqueue(),
|
||||||
|
'enqueue should throw');
|
||||||
|
}, 'race between cancel() and error() should leave sides in different states');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = await recordingTransferredReadableStream();
|
||||||
|
await delay(0);
|
||||||
|
const reader = rs.getReader();
|
||||||
|
reader.cancel();
|
||||||
|
rs.controller.close();
|
||||||
|
const {done} = await reader.read();
|
||||||
|
assert_true(done, 'should be done');
|
||||||
|
}, 'race between cancel() and close() should be benign');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const rs = await recordingTransferredReadableStream();
|
||||||
|
await delay(0);
|
||||||
|
const reader = rs.getReader();
|
||||||
|
reader.cancel();
|
||||||
|
rs.controller.enqueue('a');
|
||||||
|
const {done} = await reader.read();
|
||||||
|
assert_true(done, 'should be done');
|
||||||
|
}, 'race between cancel() and enqueue() should be benign');
|
||||||
|
|
||||||
|
</script>
|
132
tests/wpt/web-platform-tests/streams/transferable/reason.html
Normal file
132
tests/wpt/web-platform-tests/streams/transferable/reason.html
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="resources/helpers.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
// Chrome used to special-case the reason for cancel() and abort() in order to
|
||||||
|
// handle exceptions correctly. This is no longer necessary. These tests are
|
||||||
|
// retained to avoid regressions.
|
||||||
|
|
||||||
|
async function getTransferredReason(originalReason) {
|
||||||
|
let resolvePromise;
|
||||||
|
const rv = new Promise(resolve => {
|
||||||
|
resolvePromise = resolve;
|
||||||
|
});
|
||||||
|
const rs = await createTransferredReadableStream({
|
||||||
|
cancel(reason) {
|
||||||
|
resolvePromise(reason);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
await rs.cancel(originalReason);
|
||||||
|
return rv;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const value of ['hi', '\t\r\n', 7, 3.0, undefined, null, true, false,
|
||||||
|
NaN, Infinity]) {
|
||||||
|
promise_test(async () => {
|
||||||
|
const reason = await getTransferredReason(value);
|
||||||
|
assert_equals(reason, value, 'reason should match');
|
||||||
|
}, `reason with a simple value of '${value}' should be preserved`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const badType of [Symbol('hi'), _ => 'hi']) {
|
||||||
|
promise_test(async t => {
|
||||||
|
return promise_rejects_dom(t, 'DataCloneError',
|
||||||
|
getTransferredReason(badType),
|
||||||
|
'cancel() should reject');
|
||||||
|
}, `reason with a type of '${typeof badType}' should be rejected and ` +
|
||||||
|
`error the stream`);
|
||||||
|
}
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const reasonAsJson =
|
||||||
|
`{"foo":[1,"col"],"bar":{"hoge":0.2,"baz":{},"shan":null}}`;
|
||||||
|
const reason = await getTransferredReason(JSON.parse(reasonAsJson));
|
||||||
|
assert_equals(JSON.stringify(reason), reasonAsJson,
|
||||||
|
'object should be preserved');
|
||||||
|
}, 'objects that can be completely expressed in JSON should be preserved');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const circularObject = {};
|
||||||
|
circularObject.self = circularObject;
|
||||||
|
const reason = await getTransferredReason(circularObject);
|
||||||
|
assert_true(reason instanceof Object, 'an Object should be output');
|
||||||
|
assert_equals(reason.self, reason,
|
||||||
|
'the object should have a circular reference');
|
||||||
|
}, 'objects that cannot be expressed in JSON should also be preserved');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const originalReason = new TypeError('hi');
|
||||||
|
const reason = await getTransferredReason(originalReason);
|
||||||
|
assert_true(reason instanceof TypeError,
|
||||||
|
'type should be preserved');
|
||||||
|
assert_equals(reason.message, originalReason.message,
|
||||||
|
'message should be preserved');
|
||||||
|
}, 'the type and message of a TypeError should be preserved');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const originalReason = new TypeError('hi');
|
||||||
|
originalReason.foo = 'bar';
|
||||||
|
const reason = await getTransferredReason(originalReason);
|
||||||
|
assert_false('foo' in reason,
|
||||||
|
'foo should not be preserved');
|
||||||
|
}, 'other attributes of a TypeError should not be preserved');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const originalReason = new TypeError();
|
||||||
|
originalReason.message = [1, 2, 3];
|
||||||
|
const reason = await getTransferredReason(originalReason);
|
||||||
|
assert_equals(reason.message, '1,2,3', 'message should be stringified');
|
||||||
|
}, 'a TypeError message should be converted to a string');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const originalReason = new TypeError();
|
||||||
|
Object.defineProperty(originalReason, 'message', {
|
||||||
|
get() { return 'words'; }
|
||||||
|
});
|
||||||
|
const reason = await getTransferredReason(originalReason);
|
||||||
|
assert_equals(reason.message, '', 'message should not be preserved');
|
||||||
|
}, 'a TypeError message should not be preserved if it is a getter');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const originalReason = new TypeError();
|
||||||
|
delete originalReason.message;
|
||||||
|
TypeError.prototype.message = 'inherited message';
|
||||||
|
const reason = await getTransferredReason(originalReason);
|
||||||
|
delete TypeError.prototype.message;
|
||||||
|
assert_equals(reason.message, '', 'message should not be preserved');
|
||||||
|
}, 'a TypeError message should not be preserved if it is inherited');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const originalReason = new DOMException('yes', 'AbortError');
|
||||||
|
const reason = await getTransferredReason(originalReason);
|
||||||
|
assert_true(reason instanceof DOMException,
|
||||||
|
'reason should be a DOMException');
|
||||||
|
assert_equals(reason.message, originalReason.message,
|
||||||
|
'the messages should match');
|
||||||
|
assert_equals(reason.name, originalReason.name,
|
||||||
|
'the names should match');
|
||||||
|
}, 'DOMException errors should be preserved');
|
||||||
|
|
||||||
|
for (const errorConstructor of [EvalError, RangeError,
|
||||||
|
ReferenceError, SyntaxError, TypeError,
|
||||||
|
URIError]) {
|
||||||
|
promise_test(async () => {
|
||||||
|
const originalReason = new errorConstructor('nope');
|
||||||
|
const reason = await getTransferredReason(originalReason);
|
||||||
|
assert_equals(typeof reason, 'object', 'reason should have type object');
|
||||||
|
assert_true(reason instanceof errorConstructor,
|
||||||
|
`reason should inherit ${errorConstructor.name}`);
|
||||||
|
assert_true(reason instanceof Error, 'reason should inherit Error');
|
||||||
|
assert_equals(reason.constructor, errorConstructor,
|
||||||
|
'reason should have the right constructor');
|
||||||
|
assert_equals(reason.name, errorConstructor.name,
|
||||||
|
`name should match constructor name`);
|
||||||
|
assert_equals(reason.message, 'nope', 'message should match');
|
||||||
|
}, `${errorConstructor.name} should be preserved`);
|
||||||
|
}
|
||||||
|
|
||||||
|
</script>
|
|
@ -0,0 +1,7 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script>
|
||||||
|
addEventListener('message', evt => {
|
||||||
|
evt.source.postMessage(evt.data, '*', [evt.data]);
|
||||||
|
});
|
||||||
|
</script>
|
|
@ -0,0 +1,2 @@
|
||||||
|
// A worker that just transfers back any message that is sent to it.
|
||||||
|
onmessage = evt => postMessage(evt.data, [evt.data]);
|
|
@ -0,0 +1,121 @@
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
// Create a ReadableStream that will pass the tests in
|
||||||
|
// testTransferredReadableStream(), below.
|
||||||
|
function createOriginalReadableStream() {
|
||||||
|
return new ReadableStream({
|
||||||
|
start(controller) {
|
||||||
|
controller.enqueue('a');
|
||||||
|
controller.close();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common tests to roughly determine that |rs| is a correctly transferred
|
||||||
|
// version of a stream created by createOriginalReadableStream().
|
||||||
|
function testTransferredReadableStream(rs) {
|
||||||
|
assert_equals(rs.constructor, ReadableStream,
|
||||||
|
'rs should be a ReadableStream in this realm');
|
||||||
|
assert_true(rs instanceof ReadableStream,
|
||||||
|
'instanceof check should pass');
|
||||||
|
|
||||||
|
// Perform a brand-check on |rs| in the process of calling getReader().
|
||||||
|
const reader = ReadableStream.prototype.getReader.call(rs);
|
||||||
|
|
||||||
|
return reader.read().then(({value, done}) => {
|
||||||
|
assert_false(done, 'done should be false');
|
||||||
|
assert_equals(value, 'a', 'value should be "a"');
|
||||||
|
return reader.read();
|
||||||
|
}).then(({done}) => {
|
||||||
|
assert_true(done, 'done should be true');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function testMessage(msg) {
|
||||||
|
assert_array_equals(msg.ports, [], 'there should be no ports in the event');
|
||||||
|
return testTransferredReadableStream(msg.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
function testMessageEvent(target) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
target.addEventListener('message', ev => {
|
||||||
|
try {
|
||||||
|
resolve(testMessage(ev));
|
||||||
|
} catch (e) {
|
||||||
|
reject(e);
|
||||||
|
}
|
||||||
|
}, {once: true});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function testMessageEventOrErrorMessage(target) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
target.addEventListener('message', ev => {
|
||||||
|
if (typeof ev.data === 'string') {
|
||||||
|
// Assume it's an error message and reject with it.
|
||||||
|
reject(ev.data);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
resolve(testMessage(ev));
|
||||||
|
} catch (e) {
|
||||||
|
reject(e);
|
||||||
|
}
|
||||||
|
}, {once: true});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkTestResults(target) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
target.onmessage = msg => {
|
||||||
|
// testharness.js sends us objects which we need to ignore.
|
||||||
|
if (typeof msg.data !== 'string')
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (msg.data === 'OK') {
|
||||||
|
resolve();
|
||||||
|
} else {
|
||||||
|
reject(msg.data);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// These tests assume that a transferred ReadableStream will behave the same
|
||||||
|
// regardless of how it was transferred. This enables us to simply transfer the
|
||||||
|
// stream to ourselves.
|
||||||
|
function createTransferredReadableStream(underlyingSource) {
|
||||||
|
const original = new ReadableStream(underlyingSource);
|
||||||
|
const promise = new Promise((resolve, reject) => {
|
||||||
|
addEventListener('message', msg => {
|
||||||
|
const rs = msg.data;
|
||||||
|
if (rs instanceof ReadableStream) {
|
||||||
|
resolve(rs);
|
||||||
|
} else {
|
||||||
|
reject(new Error(`what is this thing: "${rs}"?`));
|
||||||
|
}
|
||||||
|
}, {once: true});
|
||||||
|
});
|
||||||
|
postMessage(original, '*', [original]);
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
function recordingTransferredReadableStream(underlyingSource, strategy) {
|
||||||
|
const original = recordingReadableStream(underlyingSource, strategy);
|
||||||
|
const promise = new Promise((resolve, reject) => {
|
||||||
|
addEventListener('message', msg => {
|
||||||
|
const rs = msg.data;
|
||||||
|
if (rs instanceof ReadableStream) {
|
||||||
|
rs.events = original.events;
|
||||||
|
rs.eventsWithoutPulls = original.eventsWithoutPulls;
|
||||||
|
rs.controller = original.controller;
|
||||||
|
resolve(rs);
|
||||||
|
} else {
|
||||||
|
reject(new Error(`what is this thing: "${rs}"?`));
|
||||||
|
}
|
||||||
|
}, {once: true});
|
||||||
|
});
|
||||||
|
postMessage(original, '*', [original]);
|
||||||
|
return promise;
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
'use strict';
|
||||||
|
importScripts('/resources/testharness.js', 'helpers.js');
|
||||||
|
|
||||||
|
onconnect = evt => {
|
||||||
|
const port = evt.source;
|
||||||
|
const promise = testMessageEvent(port);
|
||||||
|
port.start();
|
||||||
|
promise
|
||||||
|
.then(() => port.postMessage('OK'))
|
||||||
|
.catch(err => port.postMessage(`BAD: ${err}`));
|
||||||
|
};
|
|
@ -0,0 +1,7 @@
|
||||||
|
'use strict';
|
||||||
|
importScripts('/resources/testharness.js', 'helpers.js');
|
||||||
|
|
||||||
|
const promise = testMessageEvent(self);
|
||||||
|
promise
|
||||||
|
.then(() => postMessage('OK'))
|
||||||
|
.catch(err => postMessage(`BAD: ${err}`));
|
|
@ -0,0 +1,12 @@
|
||||||
|
'use strict';
|
||||||
|
importScripts('helpers.js');
|
||||||
|
|
||||||
|
onconnect = msg => {
|
||||||
|
const port = msg.source;
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
try {
|
||||||
|
port.postMessage(orig, [orig]);
|
||||||
|
} catch (e) {
|
||||||
|
port.postMessage(e.message);
|
||||||
|
}
|
||||||
|
};
|
|
@ -0,0 +1,5 @@
|
||||||
|
'use strict';
|
||||||
|
importScripts('helpers.js');
|
||||||
|
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
postMessage(orig, [orig]);
|
|
@ -0,0 +1,39 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="helpers.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
setup({
|
||||||
|
explicit_done: true
|
||||||
|
});
|
||||||
|
|
||||||
|
function startTests() {
|
||||||
|
promise_test(() => {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
const promise = checkTestResults(navigator.serviceWorker);
|
||||||
|
navigator.serviceWorker.controller.postMessage(orig, [orig]);
|
||||||
|
assert_true(orig.locked, 'the original stream should be locked');
|
||||||
|
return promise;
|
||||||
|
}, 'serviceWorker.controller.postMessage should be able to transfer a ' +
|
||||||
|
'ReadableStream');
|
||||||
|
|
||||||
|
promise_test(() => {
|
||||||
|
const promise = testMessageEventOrErrorMessage(navigator.serviceWorker);
|
||||||
|
navigator.serviceWorker.controller.postMessage('SEND');
|
||||||
|
return promise;
|
||||||
|
}, 'postMessage in a service worker should be able to transfer ReadableStream');
|
||||||
|
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delay running the tests until we get a message from the page telling us to.
|
||||||
|
// This is to work around an issue where testharness.js doesn't detect
|
||||||
|
// completion of the tests if they fail too early.
|
||||||
|
onmessage = msg => {
|
||||||
|
if (msg.data === 'explicit trigger')
|
||||||
|
startTests();
|
||||||
|
};
|
||||||
|
|
||||||
|
</script>
|
|
@ -0,0 +1,30 @@
|
||||||
|
'use strict';
|
||||||
|
importScripts('/resources/testharness.js', 'helpers.js');
|
||||||
|
|
||||||
|
onmessage = msg => {
|
||||||
|
const client = msg.source;
|
||||||
|
if (msg.data === 'SEND') {
|
||||||
|
sendingTest(client);
|
||||||
|
} else {
|
||||||
|
receivingTest(msg, client);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function sendingTest(client) {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
try {
|
||||||
|
client.postMessage(orig, [orig]);
|
||||||
|
} catch (e) {
|
||||||
|
client.postMessage(e.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function receivingTest(msg, client) {
|
||||||
|
try {
|
||||||
|
msg.waitUntil(testMessage(msg)
|
||||||
|
.then(() => client.postMessage('OK'))
|
||||||
|
.catch(e => client.postMessage(`BAD: ${e}`)));
|
||||||
|
} catch (e) {
|
||||||
|
client.postMessage(`BAD: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="/service-workers/service-worker/resources/test-helpers.sub.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const kServiceWorkerUrl = 'resources/service-worker.js';
|
||||||
|
const kIframeUrl = 'resources/service-worker-iframe.html';
|
||||||
|
|
||||||
|
// A dummy test so that we can use the test-helpers.sub.js functions
|
||||||
|
const test = async_test('service-worker');
|
||||||
|
|
||||||
|
async function registerAndStart() {
|
||||||
|
const reg = await service_worker_unregister_and_register(
|
||||||
|
test, kServiceWorkerUrl, kIframeUrl);
|
||||||
|
await wait_for_state(test, reg.installing, 'activated');
|
||||||
|
const iframe = await with_iframe(kIframeUrl);
|
||||||
|
fetch_tests_from_window(iframe.contentWindow);
|
||||||
|
add_completion_callback(() => iframe.remove());
|
||||||
|
iframe.contentWindow.postMessage('explicit trigger', '*');
|
||||||
|
return service_worker_unregister_and_done(test, kIframeUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
onload = registerAndStart;
|
||||||
|
|
||||||
|
</script>
|
|
@ -0,0 +1,25 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="resources/helpers.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
const w = new SharedWorker('resources/receiving-shared-worker.js');
|
||||||
|
const promise = checkTestResults(w.port);
|
||||||
|
w.port.postMessage(orig, [orig]);
|
||||||
|
assert_true(orig.locked, 'the original stream should be locked');
|
||||||
|
return promise;
|
||||||
|
}, 'worker.postMessage should be able to transfer a ReadableStream');
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const w = new SharedWorker('resources/sending-shared-worker.js');
|
||||||
|
const promise = testMessageEventOrErrorMessage(w.port);
|
||||||
|
w.port.start();
|
||||||
|
return promise;
|
||||||
|
}, 'postMessage in a worker should be able to transfer a ReadableStream');
|
||||||
|
|
||||||
|
</script>
|
|
@ -0,0 +1,104 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="../resources/test-utils.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const orig = new TransformStream();
|
||||||
|
const promise = new Promise(resolve => {
|
||||||
|
addEventListener('message', t.step_func(evt => {
|
||||||
|
const transferred = evt.data;
|
||||||
|
assert_equals(transferred.constructor, TransformStream,
|
||||||
|
'transferred should be a TransformStream in this realm');
|
||||||
|
assert_true(transferred instanceof TransformStream,
|
||||||
|
'instanceof check should pass');
|
||||||
|
|
||||||
|
// Perform a brand-check on |transferred|.
|
||||||
|
const readableGetter = Object.getOwnPropertyDescriptor(
|
||||||
|
TransformStream.prototype, 'readable').get;
|
||||||
|
assert_true(readableGetter.call(transferred) instanceof ReadableStream,
|
||||||
|
'brand check should pass and readable stream should result');
|
||||||
|
const writableGetter = Object.getOwnPropertyDescriptor(
|
||||||
|
TransformStream.prototype, 'writable').get;
|
||||||
|
assert_true(writableGetter.call(transferred) instanceof WritableStream,
|
||||||
|
'brand check should pass and writable stream should result');
|
||||||
|
resolve();
|
||||||
|
}), {once: true});
|
||||||
|
});
|
||||||
|
postMessage(orig, '*', [orig]);
|
||||||
|
assert_true(orig.readable.locked, 'the readable side should be locked');
|
||||||
|
assert_true(orig.writable.locked, 'the writable side should be locked');
|
||||||
|
return promise;
|
||||||
|
}, 'window.postMessage should be able to transfer a TransformStream');
|
||||||
|
|
||||||
|
test(() => {
|
||||||
|
const ts = new TransformStream();
|
||||||
|
const writer = ts.writable.getWriter();
|
||||||
|
assert_throws_dom('DataCloneError', () => postMessage(ts, '*', [ts]),
|
||||||
|
'postMessage should throw');
|
||||||
|
assert_false(ts.readable.locked, 'readable side should not get locked');
|
||||||
|
}, 'a TransformStream with a locked writable should not be transferable');
|
||||||
|
|
||||||
|
test(() => {
|
||||||
|
const ts = new TransformStream();
|
||||||
|
const reader = ts.readable.getReader();
|
||||||
|
assert_throws_dom('DataCloneError', () => postMessage(ts, '*', [ts]),
|
||||||
|
'postMessage should throw');
|
||||||
|
assert_false(ts.writable.locked, 'writable side should not get locked');
|
||||||
|
}, 'a TransformStream with a locked readable should not be transferable');
|
||||||
|
|
||||||
|
test(() => {
|
||||||
|
const ts = new TransformStream();
|
||||||
|
const reader = ts.readable.getReader();
|
||||||
|
const writer = ts.writable.getWriter();
|
||||||
|
assert_throws_dom('DataCloneError', () => postMessage(ts, '*', [ts]),
|
||||||
|
'postMessage should throw');
|
||||||
|
}, 'a TransformStream with both sides locked should not be transferable');
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const source = new ReadableStream({
|
||||||
|
start(controller) {
|
||||||
|
controller.enqueue('hello ');
|
||||||
|
controller.enqueue('there ');
|
||||||
|
controller.close();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let result = '';
|
||||||
|
const sink = new WritableStream({
|
||||||
|
write(chunk) {
|
||||||
|
result += chunk;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const transform1 = new TransformStream({
|
||||||
|
transform(chunk, controller) {
|
||||||
|
controller.enqueue(chunk.toUpperCase());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const transform2 = new TransformStream({
|
||||||
|
transform(chunk, controller) {
|
||||||
|
controller.enqueue(chunk + chunk);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const promise = new Promise(resolve => {
|
||||||
|
addEventListener('message', t.step_func(evt => {
|
||||||
|
const data = evt.data;
|
||||||
|
resolve(data.source
|
||||||
|
.pipeThrough(data.transform1)
|
||||||
|
.pipeThrough(data.transform2)
|
||||||
|
.pipeTo(data.sink));
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
postMessage({source, sink, transform1, transform2}, '*',
|
||||||
|
[source, transform1, sink, transform2]);
|
||||||
|
return promise
|
||||||
|
.then(() => delay(0))
|
||||||
|
.then(() => {
|
||||||
|
assert_equals(result, 'HELLO HELLO THERE THERE ',
|
||||||
|
'transforms should have been applied');
|
||||||
|
});
|
||||||
|
}, 'piping through transferred transforms should work');
|
||||||
|
|
||||||
|
</script>
|
|
@ -0,0 +1,60 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="resources/helpers.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
const promise = testMessageEvent(window);
|
||||||
|
postMessage(orig, '*', [orig]);
|
||||||
|
assert_true(orig.locked, 'the original stream should be locked');
|
||||||
|
return promise;
|
||||||
|
}, 'window.postMessage should be able to transfer a ReadableStream');
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
const promise = new Promise(resolve => {
|
||||||
|
window.addEventListener('message', msg => {
|
||||||
|
const port = msg.data;
|
||||||
|
resolve(testMessageEvent(port));
|
||||||
|
port.start();
|
||||||
|
}, {once: true});
|
||||||
|
});
|
||||||
|
const mc = new MessageChannel();
|
||||||
|
postMessage(mc.port1, '*', [mc.port1]);
|
||||||
|
mc.port2.postMessage(orig, [orig]);
|
||||||
|
mc.port2.close();
|
||||||
|
assert_true(orig.locked, 'the original stream should be locked');
|
||||||
|
return promise;
|
||||||
|
}, 'port.postMessage should be able to transfer a ReadableStream');
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
const promise = new Promise(resolve => {
|
||||||
|
addEventListener('message', t.step_func(evt => {
|
||||||
|
const [rs1, rs2] = evt.data;
|
||||||
|
assert_equals(rs1, rs2, 'both ReadableStreams should be the same object');
|
||||||
|
resolve();
|
||||||
|
}), {once: true});
|
||||||
|
});
|
||||||
|
postMessage([orig, orig], '*', [orig]);
|
||||||
|
return promise;
|
||||||
|
}, 'the same ReadableStream posted multiple times should arrive together');
|
||||||
|
|
||||||
|
const onloadPromise = new Promise(resolve => onload = resolve);
|
||||||
|
|
||||||
|
promise_test(() => {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
const promise = testMessageEvent(window);
|
||||||
|
return onloadPromise.then(() => {
|
||||||
|
const echoIframe = document.querySelector('#echo');
|
||||||
|
echoIframe.contentWindow.postMessage(orig, '*', [orig]);
|
||||||
|
return promise;
|
||||||
|
});
|
||||||
|
}, 'transfer to and from an iframe should work');
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<iframe id=echo src="resources/echo-iframe.html" style="display:none"></iframe>
|
|
@ -0,0 +1,76 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="resources/helpers.js"></script>
|
||||||
|
<script src="../resources/test-utils.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const orig = createOriginalReadableStream();
|
||||||
|
const w = new Worker('resources/receiving-worker.js');
|
||||||
|
t.add_cleanup(() => {
|
||||||
|
w.terminate();
|
||||||
|
});
|
||||||
|
const promise = new Promise((resolve, reject) => {
|
||||||
|
checkTestResults(w).then(resolve, reject);
|
||||||
|
w.onerror = () => reject('error in worker');
|
||||||
|
});
|
||||||
|
w.postMessage(orig, [orig]);
|
||||||
|
assert_true(orig.locked, 'the original stream should be locked');
|
||||||
|
return promise;
|
||||||
|
}, 'worker.postMessage should be able to transfer a ReadableStream');
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const w = new Worker('resources/sending-worker.js');
|
||||||
|
t.add_cleanup(() => {
|
||||||
|
w.terminate();
|
||||||
|
});
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
testMessageEvent(w).then(resolve, reject);
|
||||||
|
w.onerror = () => reject('error in worker');
|
||||||
|
});
|
||||||
|
}, 'postMessage in a worker should be able to transfer a ReadableStream');
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const w = new Worker('resources/echo-worker.js');
|
||||||
|
let controller;
|
||||||
|
const orig = new ReadableStream({
|
||||||
|
start(c) {
|
||||||
|
controller = c;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const targetStream = await new Promise((resolve, reject) => {
|
||||||
|
w.onmessage = evt => resolve(evt.data);
|
||||||
|
w.onerror = () => reject('error in worker');
|
||||||
|
w.postMessage(orig, [orig]);
|
||||||
|
});
|
||||||
|
const reader = targetStream.getReader();
|
||||||
|
const reads = [];
|
||||||
|
// Place a lot of chunks "in transit". This should increase the likelihood
|
||||||
|
// that they is a chunk at each relevant step when the worker is terminated.
|
||||||
|
for (let i = 0; i < 50; ++i) {
|
||||||
|
await delay(0);
|
||||||
|
controller.enqueue(i);
|
||||||
|
const expected = i;
|
||||||
|
reads.push(reader.read().then(({value, done}) => {
|
||||||
|
assert_false(done, 'we should not be done');
|
||||||
|
assert_equals(value, expected, 'value should match expectation');
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
w.terminate();
|
||||||
|
for (let i = 50; i < 60; ++i) {
|
||||||
|
controller.enqueue(i);
|
||||||
|
reads.push(
|
||||||
|
reader.read().then(t.unreached_func('read() should not resolve')));
|
||||||
|
await delay(0);
|
||||||
|
}
|
||||||
|
// We don't expect every read() to complete, but we want to give them a chance
|
||||||
|
// to reject if they're going to.
|
||||||
|
return Promise.race([
|
||||||
|
Promise.all(reads),
|
||||||
|
flushAsyncEvents()
|
||||||
|
]);
|
||||||
|
}, 'terminating a worker should not error the stream');
|
||||||
|
</script>
|
|
@ -0,0 +1,136 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="resources/helpers.js"></script>
|
||||||
|
<script src="../resources/test-utils.js"></script>
|
||||||
|
<script src="../resources/recording-streams.js"></script>
|
||||||
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const orig = new WritableStream();
|
||||||
|
const promise = new Promise(resolve => {
|
||||||
|
addEventListener('message', t.step_func(evt => {
|
||||||
|
const transferred = evt.data;
|
||||||
|
assert_equals(transferred.constructor, WritableStream,
|
||||||
|
'transferred should be a WritableStream in this realm');
|
||||||
|
assert_true(transferred instanceof WritableStream,
|
||||||
|
'instanceof check should pass');
|
||||||
|
|
||||||
|
// Perform a brand-check on |transferred|.
|
||||||
|
const writer = WritableStream.prototype.getWriter.call(transferred);
|
||||||
|
resolve();
|
||||||
|
}), {once: true});
|
||||||
|
});
|
||||||
|
postMessage(orig, '*', [orig]);
|
||||||
|
assert_true(orig.locked, 'the original stream should be locked');
|
||||||
|
return promise;
|
||||||
|
}, 'window.postMessage should be able to transfer a WritableStream');
|
||||||
|
|
||||||
|
test(() => {
|
||||||
|
const ws = new WritableStream();
|
||||||
|
const writer = ws.getWriter();
|
||||||
|
assert_throws_dom('DataCloneError', () => postMessage(ws, '*', [ws]),
|
||||||
|
'postMessage should throw');
|
||||||
|
}, 'a locked WritableStream should not be transferable');
|
||||||
|
|
||||||
|
promise_test(t => {
|
||||||
|
const {writable, readable} = new TransformStream();
|
||||||
|
const promise = new Promise(resolve => {
|
||||||
|
addEventListener('message', t.step_func(async evt => {
|
||||||
|
const {writable, readable} = evt.data;
|
||||||
|
const reader = readable.getReader();
|
||||||
|
const writer = writable.getWriter();
|
||||||
|
const writerPromises = Promise.all([
|
||||||
|
writer.write('hi'),
|
||||||
|
writer.close(),
|
||||||
|
]);
|
||||||
|
const {value, done} = await reader.read();
|
||||||
|
assert_false(done, 'we should not be done');
|
||||||
|
assert_equals(value, 'hi', 'chunk should have been delivered');
|
||||||
|
const readResult = await reader.read();
|
||||||
|
assert_true(readResult.done, 'readable should be closed');
|
||||||
|
await writerPromises;
|
||||||
|
resolve();
|
||||||
|
}), {once: true});
|
||||||
|
});
|
||||||
|
postMessage({writable, readable}, '*', [writable, readable]);
|
||||||
|
return promise;
|
||||||
|
}, 'window.postMessage should be able to transfer a {readable, writable} pair');
|
||||||
|
|
||||||
|
function transfer(stream) {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
addEventListener('message', evt => resolve(evt.data), { once: true });
|
||||||
|
postMessage(stream, '*', [stream]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const orig = new WritableStream(
|
||||||
|
{}, new ByteLengthQueuingStrategy({ highWaterMark: 65536 }));
|
||||||
|
const transferred = await transfer(orig);
|
||||||
|
const writer = transferred.getWriter();
|
||||||
|
assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
|
||||||
|
}, 'desiredSize for a newly-transferred stream should be 1');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
const orig = new WritableStream({
|
||||||
|
write() {
|
||||||
|
return new Promise(() => {});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const transferred = await transfer(orig);
|
||||||
|
const writer = transferred.getWriter();
|
||||||
|
await writer.write('a');
|
||||||
|
assert_equals(writer.desiredSize, 1, 'desiredSize should be 1');
|
||||||
|
}, 'effective queue size of a transferred writable should be 2');
|
||||||
|
|
||||||
|
promise_test(async () => {
|
||||||
|
let resolveWrite;
|
||||||
|
const orig = new WritableStream({
|
||||||
|
write() {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
resolveWrite = resolve;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const transferred = await transfer(orig);
|
||||||
|
const writer = transferred.getWriter();
|
||||||
|
await writer.write('a');
|
||||||
|
let writeDone = false;
|
||||||
|
writer.write('b').then(() => {
|
||||||
|
writeDone = true;
|
||||||
|
});
|
||||||
|
await flushAsyncEvents();
|
||||||
|
assert_false(writeDone, 'second write should not have resolved yet');
|
||||||
|
resolveWrite();
|
||||||
|
await delay(0);
|
||||||
|
assert_true(writeDone, 'second write should have resolved');
|
||||||
|
}, 'second write should wait for first underlying write to complete');
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const orig = recordingWritableStream();
|
||||||
|
const transferred = await transfer(orig);
|
||||||
|
transferred.abort('p');
|
||||||
|
await delay(0);
|
||||||
|
assert_array_equals(orig.events, ['abort', 'p'],
|
||||||
|
'abort() should have been called');
|
||||||
|
}, 'abort() should work');
|
||||||
|
|
||||||
|
promise_test(async t => {
|
||||||
|
const orig = recordingWritableStream();
|
||||||
|
const transferred = await transfer(orig);
|
||||||
|
const writer = transferred.getWriter();
|
||||||
|
// A WritableStream object cannot be cloned.
|
||||||
|
await promise_rejects_dom(t, 'DataCloneError', writer.write(new WritableStream()),
|
||||||
|
'the write should reject');
|
||||||
|
await promise_rejects_dom(t, 'DataCloneError', writer.closed,
|
||||||
|
'the stream should be errored');
|
||||||
|
await delay(0);
|
||||||
|
assert_equals(orig.events.length, 2, 'abort should have been called');
|
||||||
|
assert_equals(orig.events[0], 'abort', 'first event should be abort');
|
||||||
|
assert_equals(orig.events[1].name, 'DataCloneError',
|
||||||
|
'reason should be a DataCloneError');
|
||||||
|
}, 'writing a unclonable object should error the stream');
|
||||||
|
</script>
|
|
@ -957,6 +957,8 @@ def create_parser():
|
||||||
"working directory, not just files that changed")
|
"working directory, not just files that changed")
|
||||||
parser.add_argument("--github-checks-text-file", type=ensure_text,
|
parser.add_argument("--github-checks-text-file", type=ensure_text,
|
||||||
help="Path to GitHub checks output file for Taskcluster runs")
|
help="Path to GitHub checks output file for Taskcluster runs")
|
||||||
|
parser.add_argument("-j", "--jobs", type=int, default=0,
|
||||||
|
help="Level to parallelism to use (defaults to 0, which detects the number of CPUs)")
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
@ -984,18 +986,23 @@ def main(**kwargs_str):
|
||||||
|
|
||||||
github_checks_outputter = get_gh_checks_outputter(kwargs["github_checks_text_file"])
|
github_checks_outputter = get_gh_checks_outputter(kwargs["github_checks_text_file"])
|
||||||
|
|
||||||
return lint(repo_root, paths, output_format, ignore_glob, github_checks_outputter)
|
jobs = kwargs.get("jobs", 0)
|
||||||
|
|
||||||
|
return lint(repo_root, paths, output_format, ignore_glob, github_checks_outputter, jobs)
|
||||||
|
|
||||||
|
|
||||||
# best experimental guess at a decent cut-off for using the parallel path
|
# best experimental guess at a decent cut-off for using the parallel path
|
||||||
MIN_FILES_FOR_PARALLEL = 80
|
MIN_FILES_FOR_PARALLEL = 80
|
||||||
|
|
||||||
|
|
||||||
def lint(repo_root, paths, output_format, ignore_glob=None, github_checks_outputter=None):
|
def lint(repo_root, paths, output_format, ignore_glob=None, github_checks_outputter=None, jobs=0):
|
||||||
# type: (Text, List[Text], Text, Optional[List[Text]], Optional[GitHubChecksOutputter]) -> int
|
# type: (Text, List[Text], Text, Optional[List[Text]], Optional[GitHubChecksOutputter], int) -> int
|
||||||
error_count = defaultdict(int) # type: Dict[Text, int]
|
error_count = defaultdict(int) # type: Dict[Text, int]
|
||||||
last = None
|
last = None
|
||||||
|
|
||||||
|
if jobs == 0:
|
||||||
|
jobs = multiprocessing.cpu_count()
|
||||||
|
|
||||||
with io.open(os.path.join(repo_root, "lint.ignore"), "r") as f:
|
with io.open(os.path.join(repo_root, "lint.ignore"), "r") as f:
|
||||||
ignorelist, skipped_files = parse_ignorelist(f)
|
ignorelist, skipped_files = parse_ignorelist(f)
|
||||||
|
|
||||||
|
@ -1053,8 +1060,8 @@ def lint(repo_root, paths, output_format, ignore_glob=None, github_checks_output
|
||||||
|
|
||||||
paths = [p for p in paths if p not in skip]
|
paths = [p for p in paths if p not in skip]
|
||||||
|
|
||||||
if len(to_check_content) >= MIN_FILES_FOR_PARALLEL:
|
if jobs > 1 and len(to_check_content) >= MIN_FILES_FOR_PARALLEL:
|
||||||
pool = multiprocessing.Pool()
|
pool = multiprocessing.Pool(jobs)
|
||||||
# submit this job first, as it's the longest running
|
# submit this job first, as it's the longest running
|
||||||
all_paths_result = pool.apply_async(check_all_paths, (repo_root, paths))
|
all_paths_result = pool.apply_async(check_all_paths, (repo_root, paths))
|
||||||
# each item tends to be quick, so pass things in large chunks to avoid too much IPC overhead
|
# each item tends to be quick, so pass things in large chunks to avoid too much IPC overhead
|
||||||
|
|
|
@ -531,7 +531,8 @@ def test_main_with_args():
|
||||||
for x in ['a', 'b', 'c']],
|
for x in ['a', 'b', 'c']],
|
||||||
"normal",
|
"normal",
|
||||||
None,
|
None,
|
||||||
None)
|
None,
|
||||||
|
0)
|
||||||
finally:
|
finally:
|
||||||
sys.argv = orig_argv
|
sys.argv = orig_argv
|
||||||
|
|
||||||
|
@ -543,7 +544,7 @@ def test_main_no_args():
|
||||||
with _mock_lint('lint', return_value=True) as m:
|
with _mock_lint('lint', return_value=True) as m:
|
||||||
with _mock_lint('changed_files', return_value=['foo', 'bar']):
|
with _mock_lint('changed_files', return_value=['foo', 'bar']):
|
||||||
lint_mod.main(**vars(create_parser().parse_args()))
|
lint_mod.main(**vars(create_parser().parse_args()))
|
||||||
m.assert_called_once_with(repo_root, ['foo', 'bar'], "normal", None, None)
|
m.assert_called_once_with(repo_root, ['foo', 'bar'], "normal", None, None, 0)
|
||||||
finally:
|
finally:
|
||||||
sys.argv = orig_argv
|
sys.argv = orig_argv
|
||||||
|
|
||||||
|
@ -555,6 +556,6 @@ def test_main_all():
|
||||||
with _mock_lint('lint', return_value=True) as m:
|
with _mock_lint('lint', return_value=True) as m:
|
||||||
with _mock_lint('all_filesystem_paths', return_value=['foo', 'bar']):
|
with _mock_lint('all_filesystem_paths', return_value=['foo', 'bar']):
|
||||||
lint_mod.main(**vars(create_parser().parse_args()))
|
lint_mod.main(**vars(create_parser().parse_args()))
|
||||||
m.assert_called_once_with(repo_root, ['foo', 'bar'], "normal", None, None)
|
m.assert_called_once_with(repo_root, ['foo', 'bar'], "normal", None, None, 0)
|
||||||
finally:
|
finally:
|
||||||
sys.argv = orig_argv
|
sys.argv = orig_argv
|
||||||
|
|
|
@ -73,7 +73,9 @@ class WrapperHandler(object):
|
||||||
self.handler(request, response)
|
self.handler(request, response)
|
||||||
|
|
||||||
def handle_request(self, request, response):
|
def handle_request(self, request, response):
|
||||||
for header_name, header_value in self.headers:
|
headers = self.headers + handlers.load_headers(
|
||||||
|
request, self._get_filesystem_path(request))
|
||||||
|
for header_name, header_value in headers:
|
||||||
response.headers.set(header_name, header_value)
|
response.headers.set(header_name, header_value)
|
||||||
|
|
||||||
self.check_exposure(request)
|
self.check_exposure(request)
|
||||||
|
@ -111,13 +113,17 @@ class WrapperHandler(object):
|
||||||
path = replace_end(path, src, dest)
|
path = replace_end(path, src, dest)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
def _get_filesystem_path(self, request):
|
||||||
|
"""Get the path of the underlying resource file on disk."""
|
||||||
|
return self._get_path(filesystem_path(self.base_path, request, self.url_base), False)
|
||||||
|
|
||||||
def _get_metadata(self, request):
|
def _get_metadata(self, request):
|
||||||
"""Get an iterator over script metadata based on // META comments in the
|
"""Get an iterator over script metadata based on // META comments in the
|
||||||
associated js file.
|
associated js file.
|
||||||
|
|
||||||
:param request: The Request being processed.
|
:param request: The Request being processed.
|
||||||
"""
|
"""
|
||||||
path = self._get_path(filesystem_path(self.base_path, request, self.url_base), False)
|
path = self._get_filesystem_path(request)
|
||||||
try:
|
try:
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
for key, value in read_script_metadata(f, js_meta_re):
|
for key, value in read_script_metadata(f, js_meta_re):
|
||||||
|
|
|
@ -85,6 +85,7 @@ class TestUsingServer(unittest.TestCase):
|
||||||
else:
|
else:
|
||||||
assert resp.info()[name] == ", ".join(values)
|
assert resp.info()[name] == ", ".join(values)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not wptserve.utils.http2_compatible(), reason="h2 server only works in python 2.7.15")
|
@pytest.mark.skipif(not wptserve.utils.http2_compatible(), reason="h2 server only works in python 2.7.15")
|
||||||
class TestUsingH2Server:
|
class TestUsingH2Server:
|
||||||
def setup_method(self, test_method):
|
def setup_method(self, test_method):
|
||||||
|
@ -114,36 +115,39 @@ class TestWrapperHandlerUsingServer(TestUsingServer):
|
||||||
the html file. This class extends the TestUsingServer and do some some
|
the html file. This class extends the TestUsingServer and do some some
|
||||||
extra work: it tries to generate the dummy .js file in setUp and
|
extra work: it tries to generate the dummy .js file in setUp and
|
||||||
remove it in tearDown.'''
|
remove it in tearDown.'''
|
||||||
dummy_js_files = {}
|
dummy_files = {}
|
||||||
|
|
||||||
def gen_js_file(self, filename, empty=True, content=b''):
|
def gen_file(self, filename, empty=True, content=b''):
|
||||||
self.remove_js_file(filename)
|
self.remove_file(filename)
|
||||||
|
|
||||||
with open(filename, 'wb') as fp:
|
with open(filename, 'wb') as fp:
|
||||||
if not empty:
|
if not empty:
|
||||||
fp.write(content)
|
fp.write(content)
|
||||||
|
|
||||||
def remove_js_file(self, filename):
|
def remove_file(self, filename):
|
||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestWrapperHandlerUsingServer, self).setUp()
|
super(TestWrapperHandlerUsingServer, self).setUp()
|
||||||
|
|
||||||
for filename, content in self.dummy_js_files.items():
|
for filename, content in self.dummy_files.items():
|
||||||
filepath = os.path.join(doc_root, filename)
|
filepath = os.path.join(doc_root, filename)
|
||||||
if content == '':
|
if content == '':
|
||||||
self.gen_js_file(filepath)
|
self.gen_file(filepath)
|
||||||
else:
|
else:
|
||||||
self.gen_js_file(filepath, False, content)
|
self.gen_file(filepath, False, content)
|
||||||
|
|
||||||
def run_wrapper_test(self, req_file, header_data, wrapper_handler):
|
def run_wrapper_test(self, req_file, content_type, wrapper_handler,
|
||||||
|
headers=None):
|
||||||
route = ('GET', req_file, wrapper_handler())
|
route = ('GET', req_file, wrapper_handler())
|
||||||
self.server.router.register(*route)
|
self.server.router.register(*route)
|
||||||
|
|
||||||
resp = self.request(route[1])
|
resp = self.request(route[1])
|
||||||
self.assertEqual(200, resp.getcode())
|
self.assertEqual(200, resp.getcode())
|
||||||
self.assertEqual(header_data, resp.info()['Content-Type'])
|
self.assertEqual(content_type, resp.info()['Content-Type'])
|
||||||
|
for key, val in headers or []:
|
||||||
|
self.assertEqual(val, resp.info()[key])
|
||||||
|
|
||||||
with open(os.path.join(doc_root, req_file), 'rb') as fp:
|
with open(os.path.join(doc_root, req_file), 'rb') as fp:
|
||||||
self.assertEqual(fp.read(), resp.read())
|
self.assertEqual(fp.read(), resp.read())
|
||||||
|
@ -151,6 +155,6 @@ class TestWrapperHandlerUsingServer(TestUsingServer):
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
super(TestWrapperHandlerUsingServer, self).tearDown()
|
super(TestWrapperHandlerUsingServer, self).tearDown()
|
||||||
|
|
||||||
for filename, _ in self.dummy_js_files.items():
|
for filename, _ in self.dummy_files.items():
|
||||||
filepath = os.path.join(doc_root, filename)
|
filepath = os.path.join(doc_root, filename)
|
||||||
self.remove_js_file(filepath)
|
self.remove_file(filepath)
|
||||||
|
|
|
@ -13,6 +13,7 @@ from .base import TestWrapperHandlerUsingServer
|
||||||
|
|
||||||
from serve import serve
|
from serve import serve
|
||||||
|
|
||||||
|
|
||||||
class TestFileHandler(TestUsingServer):
|
class TestFileHandler(TestUsingServer):
|
||||||
def test_GET(self):
|
def test_GET(self):
|
||||||
resp = self.request("/document.txt")
|
resp = self.request("/document.txt")
|
||||||
|
@ -388,7 +389,7 @@ class TestH2Handler(TestUsingH2Server):
|
||||||
|
|
||||||
|
|
||||||
class TestWorkersHandler(TestWrapperHandlerUsingServer):
|
class TestWorkersHandler(TestWrapperHandlerUsingServer):
|
||||||
dummy_js_files = {'foo.worker.js': b'',
|
dummy_files = {'foo.worker.js': b'',
|
||||||
'foo.any.js': b''}
|
'foo.any.js': b''}
|
||||||
|
|
||||||
def test_any_worker_html(self):
|
def test_any_worker_html(self):
|
||||||
|
@ -401,7 +402,7 @@ class TestWorkersHandler(TestWrapperHandlerUsingServer):
|
||||||
|
|
||||||
|
|
||||||
class TestWindowHandler(TestWrapperHandlerUsingServer):
|
class TestWindowHandler(TestWrapperHandlerUsingServer):
|
||||||
dummy_js_files = {'foo.window.js': b''}
|
dummy_files = {'foo.window.js': b''}
|
||||||
|
|
||||||
def test_window_html(self):
|
def test_window_html(self):
|
||||||
self.run_wrapper_test('foo.window.html',
|
self.run_wrapper_test('foo.window.html',
|
||||||
|
@ -409,15 +410,19 @@ class TestWindowHandler(TestWrapperHandlerUsingServer):
|
||||||
|
|
||||||
|
|
||||||
class TestAnyHtmlHandler(TestWrapperHandlerUsingServer):
|
class TestAnyHtmlHandler(TestWrapperHandlerUsingServer):
|
||||||
dummy_js_files = {'foo.any.js': b''}
|
dummy_files = {'foo.any.js': b'',
|
||||||
|
'foo.any.js.headers': b'X-Foo: 1',
|
||||||
|
'__dir__.headers': b'X-Bar: 2'}
|
||||||
|
|
||||||
def test_any_html(self):
|
def test_any_html(self):
|
||||||
self.run_wrapper_test('foo.any.html',
|
self.run_wrapper_test('foo.any.html',
|
||||||
'text/html', serve.AnyHtmlHandler)
|
'text/html',
|
||||||
|
serve.AnyHtmlHandler,
|
||||||
|
headers=[('X-Foo', '1'), ('X-Bar', '2')])
|
||||||
|
|
||||||
|
|
||||||
class TestSharedWorkersHandler(TestWrapperHandlerUsingServer):
|
class TestSharedWorkersHandler(TestWrapperHandlerUsingServer):
|
||||||
dummy_js_files = {'foo.any.js': b'// META: global=sharedworker\n'}
|
dummy_files = {'foo.any.js': b'// META: global=sharedworker\n'}
|
||||||
|
|
||||||
def test_any_sharedworkers_html(self):
|
def test_any_sharedworkers_html(self):
|
||||||
self.run_wrapper_test('foo.any.sharedworker.html',
|
self.run_wrapper_test('foo.any.sharedworker.html',
|
||||||
|
@ -425,7 +430,7 @@ class TestSharedWorkersHandler(TestWrapperHandlerUsingServer):
|
||||||
|
|
||||||
|
|
||||||
class TestServiceWorkersHandler(TestWrapperHandlerUsingServer):
|
class TestServiceWorkersHandler(TestWrapperHandlerUsingServer):
|
||||||
dummy_js_files = {'foo.any.js': b'// META: global=serviceworker\n'}
|
dummy_files = {'foo.any.js': b'// META: global=serviceworker\n'}
|
||||||
|
|
||||||
def test_serviceworker_html(self):
|
def test_serviceworker_html(self):
|
||||||
self.run_wrapper_test('foo.any.serviceworker.html',
|
self.run_wrapper_test('foo.any.serviceworker.html',
|
||||||
|
@ -433,7 +438,7 @@ class TestServiceWorkersHandler(TestWrapperHandlerUsingServer):
|
||||||
|
|
||||||
|
|
||||||
class TestAnyWorkerHandler(TestWrapperHandlerUsingServer):
|
class TestAnyWorkerHandler(TestWrapperHandlerUsingServer):
|
||||||
dummy_js_files = {'bar.any.js': b''}
|
dummy_files = {'bar.any.js': b''}
|
||||||
|
|
||||||
def test_any_work_js(self):
|
def test_any_work_js(self):
|
||||||
self.run_wrapper_test('bar.any.worker.js', 'text/javascript',
|
self.run_wrapper_test('bar.any.worker.js', 'text/javascript',
|
||||||
|
|
|
@ -32,7 +32,6 @@ def guess_content_type(path):
|
||||||
return "application/octet-stream"
|
return "application/octet-stream"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def filesystem_path(base_path, request, url_base="/"):
|
def filesystem_path(base_path, request, url_base="/"):
|
||||||
if base_path is None:
|
if base_path is None:
|
||||||
base_path = request.doc_root
|
base_path = request.doc_root
|
||||||
|
@ -53,6 +52,7 @@ def filesystem_path(base_path, request, url_base="/"):
|
||||||
|
|
||||||
return new_path
|
return new_path
|
||||||
|
|
||||||
|
|
||||||
class DirectoryHandler(object):
|
class DirectoryHandler(object):
|
||||||
def __init__(self, base_path=None, url_base="/"):
|
def __init__(self, base_path=None, url_base="/"):
|
||||||
self.base_path = base_path
|
self.base_path = base_path
|
||||||
|
@ -121,6 +121,7 @@ class DirectoryHandler(object):
|
||||||
{"link": link, "name": escape(item), "class": class_,
|
{"link": link, "name": escape(item), "class": class_,
|
||||||
"headers": dot_headers_markup})
|
"headers": dot_headers_markup})
|
||||||
|
|
||||||
|
|
||||||
def parse_qs(qs):
|
def parse_qs(qs):
|
||||||
"""Parse a query string given as a string argument (data of type
|
"""Parse a query string given as a string argument (data of type
|
||||||
application/x-www-form-urlencoded). Data are returned as a dictionary. The
|
application/x-www-form-urlencoded). Data are returned as a dictionary. The
|
||||||
|
@ -140,7 +141,12 @@ def parse_qs(qs):
|
||||||
rv[name].append(value)
|
rv[name].append(value)
|
||||||
return dict(rv)
|
return dict(rv)
|
||||||
|
|
||||||
|
|
||||||
def wrap_pipeline(path, request, response):
|
def wrap_pipeline(path, request, response):
|
||||||
|
"""Applies pipelines to a response.
|
||||||
|
|
||||||
|
Pipelines are specified in the filename (.sub.) or the query param (?pipe).
|
||||||
|
"""
|
||||||
query = parse_qs(request.url_parts.query)
|
query = parse_qs(request.url_parts.query)
|
||||||
pipe_string = ""
|
pipe_string = ""
|
||||||
|
|
||||||
|
@ -161,6 +167,36 @@ def wrap_pipeline(path, request, response):
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def load_headers(request, path):
|
||||||
|
"""Loads headers from files for a given path.
|
||||||
|
|
||||||
|
Attempts to load both the neighbouring __dir__{.sub}.headers and
|
||||||
|
PATH{.sub}.headers (applying template substitution if needed); results are
|
||||||
|
concatenated in that order.
|
||||||
|
"""
|
||||||
|
def _load(request, path):
|
||||||
|
headers_path = path + ".sub.headers"
|
||||||
|
if os.path.exists(headers_path):
|
||||||
|
use_sub = True
|
||||||
|
else:
|
||||||
|
headers_path = path + ".headers"
|
||||||
|
use_sub = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(headers_path, "rb") as headers_file:
|
||||||
|
data = headers_file.read()
|
||||||
|
except IOError:
|
||||||
|
return []
|
||||||
|
else:
|
||||||
|
if use_sub:
|
||||||
|
data = template(request, data, escape_type="none")
|
||||||
|
return [tuple(item.strip() for item in line.split(b":", 1))
|
||||||
|
for line in data.splitlines() if line]
|
||||||
|
|
||||||
|
return (_load(request, os.path.join(os.path.dirname(path), "__dir__")) +
|
||||||
|
_load(request, path))
|
||||||
|
|
||||||
|
|
||||||
class FileHandler(object):
|
class FileHandler(object):
|
||||||
def __init__(self, base_path=None, url_base="/"):
|
def __init__(self, base_path=None, url_base="/"):
|
||||||
self.base_path = base_path
|
self.base_path = base_path
|
||||||
|
@ -197,33 +233,13 @@ class FileHandler(object):
|
||||||
raise HTTPException(404)
|
raise HTTPException(404)
|
||||||
|
|
||||||
def get_headers(self, request, path):
|
def get_headers(self, request, path):
|
||||||
rv = (self.load_headers(request, os.path.join(os.path.dirname(path), "__dir__")) +
|
rv = load_headers(request, path)
|
||||||
self.load_headers(request, path))
|
|
||||||
|
|
||||||
if not any(key.lower() == b"content-type" for (key, _) in rv):
|
if not any(key.lower() == b"content-type" for (key, _) in rv):
|
||||||
rv.insert(0, (b"Content-Type", guess_content_type(path).encode("ascii")))
|
rv.insert(0, (b"Content-Type", guess_content_type(path).encode("ascii")))
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
def load_headers(self, request, path):
|
|
||||||
headers_path = path + ".sub.headers"
|
|
||||||
if os.path.exists(headers_path):
|
|
||||||
use_sub = True
|
|
||||||
else:
|
|
||||||
headers_path = path + ".headers"
|
|
||||||
use_sub = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(headers_path, "rb") as headers_file:
|
|
||||||
data = headers_file.read()
|
|
||||||
except IOError:
|
|
||||||
return []
|
|
||||||
else:
|
|
||||||
if use_sub:
|
|
||||||
data = template(request, data, escape_type="none")
|
|
||||||
return [tuple(item.strip() for item in line.split(b":", 1))
|
|
||||||
for line in data.splitlines() if line]
|
|
||||||
|
|
||||||
def get_data(self, response, path, byte_ranges):
|
def get_data(self, response, path, byte_ranges):
|
||||||
"""Return either the handle to a file, or a string containing
|
"""Return either the handle to a file, or a string containing
|
||||||
the content of a chunk of the file, if we have a range request."""
|
the content of a chunk of the file, if we have a range request."""
|
||||||
|
@ -312,7 +328,6 @@ class PythonScriptHandler(object):
|
||||||
|
|
||||||
self._set_path_and_load_file(request, response, func)
|
self._set_path_and_load_file(request, response, func)
|
||||||
|
|
||||||
|
|
||||||
def frame_handler(self, request):
|
def frame_handler(self, request):
|
||||||
"""
|
"""
|
||||||
This creates a FunctionHandler with one or more of the handling functions.
|
This creates a FunctionHandler with one or more of the handling functions.
|
||||||
|
@ -340,8 +355,10 @@ class PythonScriptHandler(object):
|
||||||
return handler
|
return handler
|
||||||
return self._set_path_and_load_file(request, None, func)
|
return self._set_path_and_load_file(request, None, func)
|
||||||
|
|
||||||
|
|
||||||
python_script_handler = PythonScriptHandler()
|
python_script_handler = PythonScriptHandler()
|
||||||
|
|
||||||
|
|
||||||
class FunctionHandler(object):
|
class FunctionHandler(object):
|
||||||
def __init__(self, func):
|
def __init__(self, func):
|
||||||
self.func = func
|
self.func = func
|
||||||
|
@ -370,10 +387,11 @@ class FunctionHandler(object):
|
||||||
wrap_pipeline('', request, response)
|
wrap_pipeline('', request, response)
|
||||||
|
|
||||||
|
|
||||||
#The generic name here is so that this can be used as a decorator
|
# The generic name here is so that this can be used as a decorator
|
||||||
def handler(func):
|
def handler(func):
|
||||||
return FunctionHandler(func)
|
return FunctionHandler(func)
|
||||||
|
|
||||||
|
|
||||||
class JsonHandler(object):
|
class JsonHandler(object):
|
||||||
def __init__(self, func):
|
def __init__(self, func):
|
||||||
self.func = func
|
self.func = func
|
||||||
|
@ -395,9 +413,11 @@ class JsonHandler(object):
|
||||||
response.headers.set("Content-Length", length)
|
response.headers.set("Content-Length", length)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def json_handler(func):
|
def json_handler(func):
|
||||||
return JsonHandler(func)
|
return JsonHandler(func)
|
||||||
|
|
||||||
|
|
||||||
class AsIsHandler(object):
|
class AsIsHandler(object):
|
||||||
def __init__(self, base_path=None, url_base="/"):
|
def __init__(self, base_path=None, url_base="/"):
|
||||||
self.base_path = base_path
|
self.base_path = base_path
|
||||||
|
@ -414,8 +434,10 @@ class AsIsHandler(object):
|
||||||
except IOError:
|
except IOError:
|
||||||
raise HTTPException(404)
|
raise HTTPException(404)
|
||||||
|
|
||||||
|
|
||||||
as_is_handler = AsIsHandler()
|
as_is_handler = AsIsHandler()
|
||||||
|
|
||||||
|
|
||||||
class BasicAuthHandler(object):
|
class BasicAuthHandler(object):
|
||||||
def __init__(self, handler, user, password):
|
def __init__(self, handler, user, password):
|
||||||
"""
|
"""
|
||||||
|
@ -442,8 +464,10 @@ class BasicAuthHandler(object):
|
||||||
return response
|
return response
|
||||||
return self.handler(request, response)
|
return self.handler(request, response)
|
||||||
|
|
||||||
|
|
||||||
basic_auth_handler = BasicAuthHandler(file_handler, None, None)
|
basic_auth_handler = BasicAuthHandler(file_handler, None, None)
|
||||||
|
|
||||||
|
|
||||||
class ErrorHandler(object):
|
class ErrorHandler(object):
|
||||||
def __init__(self, status):
|
def __init__(self, status):
|
||||||
self.status = status
|
self.status = status
|
||||||
|
@ -454,7 +478,7 @@ class ErrorHandler(object):
|
||||||
|
|
||||||
class StringHandler(object):
|
class StringHandler(object):
|
||||||
def __init__(self, data, content_type, **headers):
|
def __init__(self, data, content_type, **headers):
|
||||||
"""Hander that reads a file from a path and substitutes some fixed data
|
"""Handler that returns a fixed data string and headers
|
||||||
|
|
||||||
:param data: String to use
|
:param data: String to use
|
||||||
:param content_type: Content type header to server the response with
|
:param content_type: Content type header to server the response with
|
||||||
|
@ -478,7 +502,9 @@ class StringHandler(object):
|
||||||
|
|
||||||
class StaticHandler(StringHandler):
|
class StaticHandler(StringHandler):
|
||||||
def __init__(self, path, format_args, content_type, **headers):
|
def __init__(self, path, format_args, content_type, **headers):
|
||||||
"""Hander that reads a file from a path and substitutes some fixed data
|
"""Handler that reads a file from a path and substitutes some fixed data
|
||||||
|
|
||||||
|
Note that *.headers files have no effect in this handler.
|
||||||
|
|
||||||
:param path: Path to the template file to use
|
:param path: Path to the template file to use
|
||||||
:param format_args: Dictionary of values to substitute into the template file
|
:param format_args: Dictionary of values to substitute into the template file
|
||||||
|
|
|
@ -0,0 +1,107 @@
|
||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>
|
||||||
|
Test Convolver Output with Transferred Buffer
|
||||||
|
</title>
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script src="/webaudio/resources/audit-util.js"></script>
|
||||||
|
<script src="/webaudio/resources/audit.js"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<script>
|
||||||
|
// Arbitrary sample rate.
|
||||||
|
const sampleRate = 16000;
|
||||||
|
|
||||||
|
// Number of frames to render. Just need to have at least 2 render
|
||||||
|
// quanta.
|
||||||
|
const lengthInFrames = 10 * RENDER_QUANTUM_FRAMES;
|
||||||
|
|
||||||
|
let audit = Audit.createTaskRunner();
|
||||||
|
|
||||||
|
// Buffer to use for the impulse response of a ConvolverNode.
|
||||||
|
let impulseBuffer;
|
||||||
|
|
||||||
|
// This sets up a worker to receive one channel of an AudioBuffer.
|
||||||
|
function setUpWorkerForTest() {
|
||||||
|
impulseBuffer = new AudioBuffer({
|
||||||
|
numberOfChannels: 2,
|
||||||
|
length: 2 * RENDER_QUANTUM_FRAMES,
|
||||||
|
sampleRate: sampleRate
|
||||||
|
});
|
||||||
|
|
||||||
|
// Just fill the buffer with a constant value; the contents shouldn't
|
||||||
|
// matter for this test since we're transferring one of the channels.
|
||||||
|
impulseBuffer.getChannelData(0).fill(1);
|
||||||
|
impulseBuffer.getChannelData(1).fill(2);
|
||||||
|
|
||||||
|
// We're going to transfer channel 0 to the worker, making it
|
||||||
|
// unavailable for the convolver
|
||||||
|
let data = impulseBuffer.getChannelData(0).buffer;
|
||||||
|
|
||||||
|
let string = [
|
||||||
|
'onmessage = function(e) {', ' postMessage(\'done\');', '};'
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
let blobURL = URL.createObjectURL(new Blob([string]));
|
||||||
|
let worker = new Worker(blobURL);
|
||||||
|
worker.onmessage = workerReply;
|
||||||
|
worker.postMessage(data, [data]);
|
||||||
|
}
|
||||||
|
|
||||||
|
function workerReply() {
|
||||||
|
// Worker has received the message. Run the test.
|
||||||
|
audit.run();
|
||||||
|
}
|
||||||
|
|
||||||
|
audit.define(
|
||||||
|
{
|
||||||
|
label: 'Test Convolver with transferred buffer',
|
||||||
|
description: 'Output should be all zeroes'
|
||||||
|
},
|
||||||
|
async (task, should) => {
|
||||||
|
// Two channels so we can capture the output of the convolver with a
|
||||||
|
// stereo convolver.
|
||||||
|
let context = new OfflineAudioContext({
|
||||||
|
numberOfChannels: 2,
|
||||||
|
length: lengthInFrames,
|
||||||
|
sampleRate: sampleRate
|
||||||
|
});
|
||||||
|
|
||||||
|
// Use a simple constant source so we easily check that the
|
||||||
|
// convolver output is correct.
|
||||||
|
let source = new ConstantSourceNode(context);
|
||||||
|
|
||||||
|
// Create the convolver with the desired impulse response and
|
||||||
|
// disable normalization so we can easily check the output.
|
||||||
|
let conv = new ConvolverNode(
|
||||||
|
context, {disableNormalization: true, buffer: impulseBuffer});
|
||||||
|
|
||||||
|
source.connect(conv).connect(context.destination);
|
||||||
|
|
||||||
|
source.start();
|
||||||
|
|
||||||
|
let renderedBuffer = await context.startRendering();
|
||||||
|
|
||||||
|
// Get the actual data
|
||||||
|
let c0 = renderedBuffer.getChannelData(0);
|
||||||
|
let c1 = renderedBuffer.getChannelData(1);
|
||||||
|
|
||||||
|
// Since one channel was transferred, we must behave as if all were
|
||||||
|
// transferred. Hence, the output should be all zeroes for both
|
||||||
|
// channels.
|
||||||
|
should(c0, `Convolver channel 0 output[0:${c0.length - 1}]`)
|
||||||
|
.beConstantValueOf(0);
|
||||||
|
|
||||||
|
should(c1, `Convolver channel 1 output[0:${c1.length - 1}]`)
|
||||||
|
.beConstantValueOf(0);
|
||||||
|
|
||||||
|
task.done();
|
||||||
|
});
|
||||||
|
|
||||||
|
setUpWorkerForTest();
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -1,5 +0,0 @@
|
||||||
spec: https://github.com/w3c/webrtc-quic
|
|
||||||
suggested_reviewers:
|
|
||||||
- aboba
|
|
||||||
- henbos
|
|
||||||
- steveanton
|
|
|
@ -1,98 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
// This file depends on RTCQuicTransport-helper.js which should be loaded from
|
|
||||||
// the main HTML file.
|
|
||||||
// The following helper methods are called from RTCQuicTransport-helper.js:
|
|
||||||
// makeTwoConnectedQuicTransports
|
|
||||||
|
|
||||||
// Run a test function for as many ways as an RTCQuicStream can transition to
|
|
||||||
// the 'closed' state.
|
|
||||||
// |test_func| will be called with the test as the first argument and the closed
|
|
||||||
// RTCQuicStream as the second argument.
|
|
||||||
function closed_stream_test(test_func, description) {
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.reset();
|
|
||||||
assert_equals(localStream.state, 'closed');
|
|
||||||
return test_func(t, localStream);
|
|
||||||
}, 'Stream closed by local reset(): ' + description);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(1) });
|
|
||||||
const remoteWatcher =
|
|
||||||
new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
localStream.reset();
|
|
||||||
const remoteStreamWatcher =
|
|
||||||
new EventWatcher(t, remoteStream, 'statechange');
|
|
||||||
await remoteStreamWatcher.wait_for('statechange');
|
|
||||||
assert_equals(remoteStream.state, 'closed');
|
|
||||||
return test_func(t, remoteStream);
|
|
||||||
}, 'Stream closed by remote reset(): ' + description);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher =
|
|
||||||
new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
remoteStream.write({ finish: true });
|
|
||||||
await localStream.waitForReadable(localStream.maxReadBufferedAmount);
|
|
||||||
assert_object_equals(
|
|
||||||
localStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true });
|
|
||||||
assert_equals(localStream.state, 'closed');
|
|
||||||
return test_func(t, localStream);
|
|
||||||
}, 'Stream closed by writing a finish, followed by reading remote finish: ' +
|
|
||||||
description);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher =
|
|
||||||
new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(10);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true });
|
|
||||||
remoteStream.write({ finish: true });
|
|
||||||
assert_equals(remoteStream.state, 'closed');
|
|
||||||
return test_func(t, remoteStream);
|
|
||||||
}, 'Stream closed by by reading remote finish, followed by writing a ' +
|
|
||||||
'finish: ' + description);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localQuicTransport.stop();
|
|
||||||
assert_equals(localStream.state, 'closed');
|
|
||||||
return test_func(t, localStream);
|
|
||||||
}, 'Stream closed by local RTCQuicTransport stop(): ' + description);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(1) });
|
|
||||||
const remoteWatcher =
|
|
||||||
new EventWatcher(t, remoteQuicTransport,
|
|
||||||
[ 'quicstream', 'statechange' ]);
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
localQuicTransport.stop();
|
|
||||||
await remoteWatcher.wait_for('statechange');
|
|
||||||
assert_equals(remoteStream.state, 'closed');
|
|
||||||
return test_func(t, remoteStream);
|
|
||||||
}, 'Stream closed by remote RTCQuicTransport stop(): ' + description);
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,646 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<meta charset=utf-8>
|
|
||||||
<title>RTCQuicStream.https.html</title>
|
|
||||||
<script src="/resources/testharness.js"></script>
|
|
||||||
<script src="/resources/testharnessreport.js"></script>
|
|
||||||
<script src="../webrtc/RTCIceTransport-extension-helper.js"></script>
|
|
||||||
<script src="RTCQuicTransport-helper.js"></script>
|
|
||||||
<script src="RTCQuicStream-helper.js"></script>
|
|
||||||
<script>
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
// These tests are based on the following specification:
|
|
||||||
// https://w3c.github.io/webrtc-quic/
|
|
||||||
|
|
||||||
// The following helper functions are called from RTCQuicTransport-helper.js:
|
|
||||||
// makeStandaloneQuicTransport
|
|
||||||
// makeTwoConnectedQuicTransports
|
|
||||||
// The following helper functions are called from RTCQuicStream-helper.js:
|
|
||||||
// closed_stream_test
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ quicTransport, ] = await makeTwoConnectedQuicTransports(t);
|
|
||||||
const quicStream = quicTransport.createStream();
|
|
||||||
assert_equals(quicStream.transport, quicTransport,
|
|
||||||
'Expect transport to be set to the creating RTCQuicTransport.');
|
|
||||||
assert_equals(quicStream.state, 'open', `Expect state to be 'open'.`);
|
|
||||||
assert_equals(quicStream.readBufferedAmount, 0,
|
|
||||||
'Expect read buffered amount to be 0.');
|
|
||||||
assert_equals(quicStream.writeBufferedAmount, 0,
|
|
||||||
'Expect write buffered amount to be 0.');
|
|
||||||
assert_greater_than(quicStream.maxWriteBufferedAmount, 0,
|
|
||||||
'Expect max write buffered amount to be greater than 0.');
|
|
||||||
}, 'createStream() returns an RTCQuicStream with initial properties set.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const quicTransport = await makeStandaloneQuicTransport(t);
|
|
||||||
assert_throws_dom('InvalidStateError', () => quicTransport.createStream());
|
|
||||||
}, 'createStream() throws if the transport is not connected.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const quicTransport = await makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.stop();
|
|
||||||
assert_throws_dom('InvalidStateError', () => quicTransport.createStream());
|
|
||||||
}, 'createStream() throws if the transport is closed.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ quicTransport, ] = await makeTwoConnectedQuicTransports(t);
|
|
||||||
const firstQuicStream = quicTransport.createStream();
|
|
||||||
const secondQuicStream = quicTransport.createStream();
|
|
||||||
quicTransport.stop();
|
|
||||||
assert_equals(firstQuicStream.state, 'closed');
|
|
||||||
assert_equals(secondQuicStream.state, 'closed');
|
|
||||||
}, 'RTCQuicTransport.stop() closes all local streams.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
localQuicTransport.createStream().write({ data: new Uint8Array([42]) });
|
|
||||||
localQuicTransport.createStream().write({ data: new Uint8Array([42]) });
|
|
||||||
const remoteWatcher =
|
|
||||||
new EventWatcher(t, remoteQuicTransport, [ 'quicstream', 'statechange' ]);
|
|
||||||
const { stream: firstRemoteStream } =
|
|
||||||
await remoteWatcher.wait_for('quicstream');
|
|
||||||
const { stream: secondRemoteStream } =
|
|
||||||
await remoteWatcher.wait_for('quicstream');
|
|
||||||
localQuicTransport.stop();
|
|
||||||
await remoteWatcher.wait_for('statechange');
|
|
||||||
assert_equals(firstRemoteStream.state, 'closed');
|
|
||||||
assert_equals(secondRemoteStream.state, 'closed');
|
|
||||||
}, 'RTCQuicTransport.stop() closes all remote streams.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
assert_equals(localStream.state, 'closing');
|
|
||||||
}, `write() with a finish changes state to 'closing'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(), finish: true });
|
|
||||||
assert_equals(localStream.state, 'closing');
|
|
||||||
}, 'write() with a finish and an empty array changes state ' +
|
|
||||||
`to 'closing'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
assert_throws_dom('InvalidStateError', () => {
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
});
|
|
||||||
}, `write() with finish twice throws InvalidStateError.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.reset();
|
|
||||||
assert_equals(localStream.state, 'closed');
|
|
||||||
}, `reset() changes state to 'closed'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
localStream.reset();
|
|
||||||
assert_equals(localStream.state, 'closed');
|
|
||||||
}, `reset() following write with finish changes state to 'closed'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.reset();
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
assert_equals(remoteStream.state, 'open');
|
|
||||||
const remoteStreamWatcher = new EventWatcher(t, remoteStream, 'statechange');
|
|
||||||
await remoteStreamWatcher.wait_for('statechange');
|
|
||||||
assert_equals(remoteStream.state, 'closed');
|
|
||||||
}, 'createStream() followed by reset() fires a quicstream event followed ' +
|
|
||||||
`by a statechange event to 'closed' on the remote side.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
assert_equals(remoteStream.state, 'open');
|
|
||||||
const remoteStreamWatcher = new EventWatcher(t, remoteStream, 'statechange');
|
|
||||||
await remoteStream.waitForReadable(remoteStream.maxReadBufferedAmount);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true } );
|
|
||||||
assert_equals(remoteStream.state, 'closing');
|
|
||||||
}, 'waitForReadable() promise resolves with remote finish');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(remoteStream.maxReadBufferedAmount);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true } );
|
|
||||||
assert_equals(remoteStream.state, 'closing');
|
|
||||||
remoteStream.write({ finish: true });
|
|
||||||
assert_equals(remoteStream.state, 'closed');
|
|
||||||
}, 'write() with a finish on a stream that has already read out finish ' +
|
|
||||||
`changes state to 'closed'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
remoteStream.write({ finish: true });
|
|
||||||
assert_equals(localStream.state, 'closing');
|
|
||||||
await localStream.waitForReadable(localStream.maxReadBufferedAmount);
|
|
||||||
assert_object_equals(
|
|
||||||
localStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true } );
|
|
||||||
assert_equals(localStream.state, 'closed');
|
|
||||||
}, 'Reading out finish on stream that has already called write() with a ' +
|
|
||||||
`finish state to 'closed'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(remoteStream.maxReadBufferedAmount);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true } );
|
|
||||||
assert_equals(remoteStream.state, 'closing');
|
|
||||||
localStream.reset();
|
|
||||||
const remoteStreamWatcher = new EventWatcher(t, remoteStream, 'statechange');
|
|
||||||
await remoteStreamWatcher.wait_for('statechange');
|
|
||||||
assert_equals(remoteStream.state, 'closed');
|
|
||||||
}, 'Reading out finish then a getting a remote reset fires a statechange event ' +
|
|
||||||
`to 'closed'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
assert_throws_dom('NotSupportedError', () =>
|
|
||||||
localStream.write({ data: new Uint8Array() }));
|
|
||||||
}, 'write() without finish and an empty array throws NotSupportedError.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
assert_throws_dom('NotSupportedError', () =>
|
|
||||||
localStream.write({}));
|
|
||||||
}, 'write() without finish and no data throws NotSupportedError.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array([65]) });
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 1);
|
|
||||||
localStream.write({ data: new Uint8Array([66, 67]) });
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 3);
|
|
||||||
localStream.write({ data: new Uint8Array([68, 69, 70]) });
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 6);
|
|
||||||
}, 'write() adds to writeBufferedAmount each call.');
|
|
||||||
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array([42, 43]), finish: true });
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 2);
|
|
||||||
}, `write() data with a finish adds to writeBufferedAmount.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({
|
|
||||||
data: new Uint8Array(localStream.maxWriteBufferedAmount)
|
|
||||||
});
|
|
||||||
assert_equals(localStream.writeBufferedAmount,
|
|
||||||
localStream.maxWriteBufferedAmount);
|
|
||||||
}, 'write() can write exactly maxWriteBufferedAmount.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
assert_throws_dom('OperationError', () => localStream.write({
|
|
||||||
data: new Uint8Array(localStream.maxWriteBufferedAmount + 1)
|
|
||||||
}));
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 0);
|
|
||||||
}, 'write() throws if data longer than maxWriteBufferedAmount.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write(
|
|
||||||
{ data: new Uint8Array(10)});
|
|
||||||
assert_throws_dom('OperationError', () => localStream.write({
|
|
||||||
data: new Uint8Array(localStream.maxWriteBufferedAmount)
|
|
||||||
}));
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 10);
|
|
||||||
}, 'write() throws if total write buffered amount would be greater than ' +
|
|
||||||
'maxWriteBufferedAmount.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(10) });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
await remoteWatcher.wait_for('quicstream');
|
|
||||||
}, 'write() causes quicstream event to fire on the remote transport.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });;
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
await remoteWatcher.wait_for('quicstream');
|
|
||||||
}, 'write() with a finish causes quicstream event to fire on the ' +
|
|
||||||
'remote transport.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => localStream.write({ data: new Uint8Array([65]) }));
|
|
||||||
}, 'write() throws InvalidStateError if write() with finish has been called.');
|
|
||||||
|
|
||||||
closed_stream_test(async (t, stream) => {
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => stream.write({ data: new Uint8Array([65]) }));
|
|
||||||
}, 'write() throws InvalidStateError.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: generateData(10) });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream : remoteStream} = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(10);
|
|
||||||
assert_equals(10, remoteStream.readBufferedAmount);
|
|
||||||
remoteStream.reset();
|
|
||||||
assert_equals(0, remoteStream.readBufferedAmount);
|
|
||||||
}, 'readBufferedAmount set to 0 after local reset().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: generateData(10) });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream : remoteStream} = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(10);
|
|
||||||
assert_equals(10, remoteStream.readBufferedAmount);
|
|
||||||
localStream.reset();
|
|
||||||
const remoteStreamWatcher =
|
|
||||||
new EventWatcher(t, remoteStream, 'statechange');
|
|
||||||
await remoteStreamWatcher.wait_for('statechange');
|
|
||||||
assert_equals(remoteStream.state, 'closed');
|
|
||||||
assert_equals(0, remoteStream.readBufferedAmount);
|
|
||||||
}, 'readBufferedAmount set to 0 after remote reset().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(10) });
|
|
||||||
localStream.reset();
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 0);
|
|
||||||
}, 'writeBufferedAmount set to 0 after local reset().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher =
|
|
||||||
new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(10);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true });
|
|
||||||
remoteStream.write({ data: new Uint8Array(10) });
|
|
||||||
assert_equals(remoteStream.writeBufferedAmount, 10);
|
|
||||||
remoteStream.write({ finish: true });
|
|
||||||
assert_equals(remoteStream.writeBufferedAmount, 0);
|
|
||||||
}, 'writeBufferedAmount set to 0 after reading remote finish, followed ' +
|
|
||||||
'by write() with finish.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(10) });
|
|
||||||
localQuicTransport.stop();
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 0);
|
|
||||||
}, 'writeBufferedAmount set to 0 after local RTCQuicTransport stop().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(10) });
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
assert_equals(localStream.writeBufferedAmount, 10);
|
|
||||||
}, 'writeBufferedAmount maintained after write() with finish has been called.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
await localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
}, 'waitForWriteBufferedAmountBelow(0) resolves immediately.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
await localStream.waitForWriteBufferedAmountBelow(
|
|
||||||
localStream.maxWriteBufferedAmount);
|
|
||||||
}, 'waitForWriteBufferedAmountBelow(maxWriteBufferedAmount) resolves ' +
|
|
||||||
'immediately.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({
|
|
||||||
data: new Uint8Array(localStream.maxWriteBufferedAmount)
|
|
||||||
});
|
|
||||||
const promise1 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
const promise2 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'Pending waitForWriteBufferedAmountBelow() promises rejected after ' +
|
|
||||||
'write() with finish.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({
|
|
||||||
data: new Uint8Array(localStream.maxWriteBufferedAmount)
|
|
||||||
});
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const promise1 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
const promise2 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'waitForWriteBufferedAmountBelow() promises immediately rejected after ' +
|
|
||||||
'wrote finish.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({
|
|
||||||
data: new Uint8Array(localStream.maxWriteBufferedAmount)
|
|
||||||
});
|
|
||||||
const promise1 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
const promise2 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
localStream.reset();
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'Pending waitForWriteBufferedAmountBelow() promises rejected after ' +
|
|
||||||
'reset().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({
|
|
||||||
data: new Uint8Array(localStream.maxWriteBufferedAmount)
|
|
||||||
});
|
|
||||||
const promise1 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
const promise2 = localStream.waitForWriteBufferedAmountBelow(0);
|
|
||||||
localQuicTransport.stop();
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'Pending waitForWriteBufferedAmountBelow() promises rejected after ' +
|
|
||||||
'RTCQuicTransport stop().');
|
|
||||||
|
|
||||||
closed_stream_test(async (t, stream) => {
|
|
||||||
await promise_rejects_dom(t, 'InvalidStateError',
|
|
||||||
stream.waitForWriteBufferedAmountBelow(0));
|
|
||||||
}, 'waitForWriteBufferedBelow() rejects with InvalidStateError.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
assert_object_equals(
|
|
||||||
localStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: false } );
|
|
||||||
}, 'readInto() on new local stream returns amount 0.');
|
|
||||||
|
|
||||||
closed_stream_test(async (t, stream) => {
|
|
||||||
assert_throws_dom('InvalidStateError', () => stream.readInto(new Uint8Array(1)));
|
|
||||||
}, 'readInto() throws InvalidStateError.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array([ 65 ]) });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(1);
|
|
||||||
assert_equals(remoteStream.readBufferedAmount, 1);
|
|
||||||
const readBuffer = new Uint8Array(3);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(readBuffer),
|
|
||||||
{ amount: 1, finished: false } );
|
|
||||||
assert_array_equals(readBuffer, [ 65, 0, 0 ]);
|
|
||||||
assert_equals(remoteStream.readBufferedAmount, 0);
|
|
||||||
}, 'Read 1 byte.');
|
|
||||||
|
|
||||||
// Returns a Uint8Array of length |amount| with generated data.
|
|
||||||
function generateData(amount) {
|
|
||||||
const data = new Uint8Array(amount);
|
|
||||||
for (let i = 0; i < data.length; i++) {
|
|
||||||
data[i] = i % 256;
|
|
||||||
}
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
const data = generateData(10);
|
|
||||||
localStream.write({ data: data, finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(data.length + 1);
|
|
||||||
const readBuffer = new Uint8Array(5);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(readBuffer),
|
|
||||||
{ amount: 5, finished: false} );
|
|
||||||
assert_array_equals(
|
|
||||||
readBuffer, data.subarray(0, 5));
|
|
||||||
const finReadBuffer = new Uint8Array(5);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(finReadBuffer),
|
|
||||||
{ amount: 5, finished: true} );
|
|
||||||
assert_array_equals(
|
|
||||||
finReadBuffer, data.subarray(5, data.length));
|
|
||||||
}, 'readInto() reads out finish after reading all data.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(remoteStream.maxReadBufferedAmount);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true } );
|
|
||||||
}, 'waitForReadable() resolves with write() with finish.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
const writeData = generateData(10);
|
|
||||||
localStream.write({ data: writeData });
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(11);
|
|
||||||
assert_equals(remoteStream.readBufferedAmount, 10);
|
|
||||||
const readBuffer = new Uint8Array(10);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(readBuffer), { amount: 10, finished: true } );
|
|
||||||
assert_array_equals(readBuffer, writeData);
|
|
||||||
}, 'waitForReadable() resolves early if remote finish is received.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
await promise_rejects_js(t, TypeError,
|
|
||||||
localStream.waitForReadable(localStream.maxReadBufferedAmount + 1));
|
|
||||||
}, 'waitForReadable() rejects with TypeError if amount is more than ' +
|
|
||||||
'maxReadBufferedAmount.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream: remoteStream } = await remoteWatcher.wait_for('quicstream');
|
|
||||||
await remoteStream.waitForReadable(remoteStream.maxReadBufferedAmount);
|
|
||||||
assert_object_equals(
|
|
||||||
remoteStream.readInto(new Uint8Array(10)),
|
|
||||||
{ amount: 0, finished: true } );
|
|
||||||
|
|
||||||
const promise1 = remoteStream.waitForReadable(10);
|
|
||||||
const promise2 = remoteStream.waitForReadable(10);
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'waitForReadable() promises immediately rejected with InvalidStateError ' +
|
|
||||||
'after finish is read out.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
const promise1 = localStream.waitForReadable(10);
|
|
||||||
const promise2 = localStream.waitForReadable(10);
|
|
||||||
localStream.reset();
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'Pending waitForReadable() promises rejected after reset().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(1) });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream : remoteStream} = await remoteWatcher.wait_for('quicstream');
|
|
||||||
const promise1 = remoteStream.waitForReadable(10);
|
|
||||||
const promise2 = remoteStream.waitForReadable(10);
|
|
||||||
localStream.reset();
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'Pending waitForReadable() promises rejected after remote reset().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
const promise1 = localStream.waitForReadable(10);
|
|
||||||
const promise2 = localStream.waitForReadable(10);
|
|
||||||
localQuicTransport.stop();
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'Pending waitForReadable() promises rejected after RTCQuicTransport ' +
|
|
||||||
'stop().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ data: new Uint8Array(1) });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
const { stream : remoteStream} = await remoteWatcher.wait_for('quicstream');
|
|
||||||
const promise1 = remoteStream.waitForReadable(10);
|
|
||||||
const promise2 = remoteStream.waitForReadable(10);
|
|
||||||
localQuicTransport.stop();
|
|
||||||
await Promise.all([
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise1),
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise2)]);
|
|
||||||
}, 'Pending waitForReadable() promises rejected after remote RTCQuicTransport ' +
|
|
||||||
'stop().');
|
|
||||||
|
|
||||||
closed_stream_test(async (t, stream) => {
|
|
||||||
await promise_rejects_dom(t, 'InvalidStateError',
|
|
||||||
stream.waitForReadable(1));
|
|
||||||
}, 'waitForReadable() rejects with InvalidStateError.');
|
|
||||||
|
|
||||||
</script>
|
|
|
@ -1,69 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
// This file depends on RTCIceTransport-extension-helper.js which should be
|
|
||||||
// loaded from the main HTML file.
|
|
||||||
// The following helper functions are called from
|
|
||||||
// RTCIceTransport-extension-helper.js:
|
|
||||||
// makeIceTransport
|
|
||||||
// makeGatherAndStartTwoIceTransports
|
|
||||||
|
|
||||||
// Construct an RTCQuicTransport instance with the given RTCIceTransport
|
|
||||||
// instance and the given certificates. The RTCQuicTransport instance will be
|
|
||||||
// automatically cleaned up when the test finishes.
|
|
||||||
function makeQuicTransport(t, iceTransport) {
|
|
||||||
const quicTransport = new RTCQuicTransport(iceTransport);
|
|
||||||
t.add_cleanup(() => quicTransport.stop());
|
|
||||||
return quicTransport;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Construct an RTCQuicTransport instance with a new RTCIceTransport instance
|
|
||||||
// and a single, newly-generated certificate. The RTCQuicTransport and
|
|
||||||
// RTCIceTransport instances will be automatically cleaned up when the test
|
|
||||||
// finishes.
|
|
||||||
function makeStandaloneQuicTransport(t) {
|
|
||||||
return makeQuicTransport(t, makeIceTransport(t));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Construct two RTCQuicTransport instances and each call start() with the other
|
|
||||||
// transport's local parameters.
|
|
||||||
// Returns a 2-list:
|
|
||||||
// [ server RTCQuicTransport,
|
|
||||||
// client RTCQuicTransport ]
|
|
||||||
function makeAndStartTwoQuicTransports(t) {
|
|
||||||
const [ localIceTransport, remoteIceTransport ] =
|
|
||||||
makeGatherAndStartTwoIceTransports(t);
|
|
||||||
const localQuicTransport =
|
|
||||||
makeQuicTransport(t, localIceTransport);
|
|
||||||
const remoteQuicTransport =
|
|
||||||
makeQuicTransport(t, remoteIceTransport);
|
|
||||||
const remote_key = remoteQuicTransport.getKey();
|
|
||||||
localQuicTransport.listen(remote_key);
|
|
||||||
remoteQuicTransport.connect();
|
|
||||||
return [ localQuicTransport, remoteQuicTransport ];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Construct two RTCQuicTransport instances and wait for them to connect.
|
|
||||||
// Returns a 2-list:
|
|
||||||
// [ server RTCQuicTransport,
|
|
||||||
// client RTCQuicTransport ]
|
|
||||||
async function makeTwoConnectedQuicTransports(t) {
|
|
||||||
// Returns a promise that resolves when the transport fires a 'statechange'
|
|
||||||
// event to 'connected'.
|
|
||||||
function waitForConnected(transport) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const eventHandler = t.step_func(() => {
|
|
||||||
assert_equals(transport.state, 'connected');
|
|
||||||
transport.removeEventListener('statechange', eventHandler, false);
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
transport.addEventListener('statechange', eventHandler, false);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeAndStartTwoQuicTransports(t);
|
|
||||||
await Promise.all([
|
|
||||||
waitForConnected(localQuicTransport),
|
|
||||||
waitForConnected(remoteQuicTransport),
|
|
||||||
]);
|
|
||||||
return [ localQuicTransport, remoteQuicTransport ];
|
|
||||||
}
|
|
|
@ -1,399 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<meta charset=utf-8>
|
|
||||||
<title>RTCQuicTransport.https.html</title>
|
|
||||||
<script src="/resources/testharness.js"></script>
|
|
||||||
<script src="/resources/testharnessreport.js"></script>
|
|
||||||
<script src="../webrtc/RTCIceTransport-extension-helper.js"></script>
|
|
||||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
|
||||||
<script src="RTCQuicTransport-helper.js"></script>
|
|
||||||
<script src="../webrtc/dictionary-helper.js"></script>
|
|
||||||
<script>
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
// These tests are based on the following specification:
|
|
||||||
// https://w3c.github.io/webrtc-quic/
|
|
||||||
|
|
||||||
// The following helper functions are called from
|
|
||||||
// RTCIceTransport-extension-helper.js:
|
|
||||||
// makeIceTransport
|
|
||||||
// makeAndGatherTwoIceTransports
|
|
||||||
|
|
||||||
// The following helper functions are called from RTCQuicTransport-helper.js:
|
|
||||||
// makeQuicTransport
|
|
||||||
// makeStandaloneQuicTransport
|
|
||||||
// makeAndStartTwoQuicTransports
|
|
||||||
// makeTwoConnectedQuicTransports
|
|
||||||
// sleep
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const iceTransport = makeIceTransport(t);
|
|
||||||
const quicTransport = makeQuicTransport(t, iceTransport);
|
|
||||||
assert_equals(quicTransport.transport, iceTransport,
|
|
||||||
'Expect transport to be the same as the one passed in the constructor.');
|
|
||||||
assert_equals(quicTransport.state, 'new', `Expect state to be 'new'.`);
|
|
||||||
}, 'RTCQuicTransport initial properties are set.');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const iceTransport = makeIceTransport(t);
|
|
||||||
iceTransport.stop();
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => makeQuicTransport(t, iceTransport));
|
|
||||||
}, 'RTCQuicTransport constructor throws if passed a closed RTCIceTransport.');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const iceTransport = makeIceTransport(t);
|
|
||||||
const firstQuicTransport =
|
|
||||||
makeQuicTransport(t, iceTransport);
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => makeQuicTransport(t, iceTransport));
|
|
||||||
}, 'RTCQuicTransport constructor throws if passed an RTCIceTransport that ' +
|
|
||||||
'already has an active RTCQuicTransport.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const pc1 = new RTCPeerConnection();
|
|
||||||
t.add_cleanup(() => pc1.close());
|
|
||||||
const pc2 = new RTCPeerConnection();
|
|
||||||
t.add_cleanup(() => pc2.close());
|
|
||||||
|
|
||||||
pc1.createDataChannel('test');
|
|
||||||
await exchangeOfferAnswer(pc1, pc2);
|
|
||||||
const iceTransport = pc1.sctp.transport.iceTransport;
|
|
||||||
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => makeQuicTransport(t, iceTransport));
|
|
||||||
}, 'RTCQuicTransport constructor throws if passed an RTCIceTransport that ' +
|
|
||||||
'came from an RTCPeerConnection.');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.stop();
|
|
||||||
assert_equals(quicTransport.state, 'closed');
|
|
||||||
}, `stop() changes state to 'closed'.`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.transport.stop();
|
|
||||||
assert_equals(quicTransport.state, 'closed');
|
|
||||||
}, `RTCIceTransport.stop() changes RTCQuicTransport.state to 'closed'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
makeAndStartTwoQuicTransports(t);
|
|
||||||
const localWatcher = new EventWatcher(t, localQuicTransport, 'statechange');
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'statechange');
|
|
||||||
await Promise.all([
|
|
||||||
localWatcher.wait_for('statechange').then(() => {
|
|
||||||
assert_equals(localQuicTransport.state, 'connected');
|
|
||||||
}),
|
|
||||||
remoteWatcher.wait_for('statechange').then(() => {
|
|
||||||
assert_equals(remoteQuicTransport.state, 'connected');
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
}, 'Two RTCQuicTransports connect to each other.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
localQuicTransport.stop();
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'statechange');
|
|
||||||
await remoteWatcher.wait_for('statechange');
|
|
||||||
assert_equals(remoteQuicTransport.state, 'closed');
|
|
||||||
}, `stop() fires a statechange event to 'closed' on the remote transport`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.connect();
|
|
||||||
assert_equals(quicTransport.state, 'connecting');
|
|
||||||
}, `connect() changes state to 'connecting'.`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.connect();
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.connect());
|
|
||||||
}, 'connect() throws if already called connect().');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.listen(new Uint8Array([12345]));
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.connect());
|
|
||||||
}, 'connect() throws if already called listen().');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.stop();
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.connect());
|
|
||||||
}, 'connect() throws after stop().');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.transport.stop();
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.connect());
|
|
||||||
}, 'connect() throws if called after RTCIceTransport has stopped.');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.listen(new Uint8Array([12345]));
|
|
||||||
assert_equals(quicTransport.state, 'connecting');
|
|
||||||
}, `listen() changes state to 'connecting'.`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.connect();
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.listen(new Uint8Array([12345])));
|
|
||||||
}, 'listen() throws if already called connect().');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.listen(new Uint8Array([12345]));
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.listen(new Uint8Array([12345])));
|
|
||||||
}, 'listen() throws if already called listen().');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.stop();
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.listen(new Uint8Array([12345])));
|
|
||||||
}, 'listen() throws after stop().');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.transport.stop();
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.listen(new Uint8Array([12345])));
|
|
||||||
}, 'listen() throws if called after RTCIceTransport has stopped.');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
const key = quicTransport.getKey();
|
|
||||||
assert_equals(key.byteLength, 16);
|
|
||||||
}, 'RTCQuicTransport.getKey() attribute is 16 bytes.');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
const key = new Uint8Array();
|
|
||||||
assert_throws_dom('NotSupportedError',
|
|
||||||
() => quicTransport.listen(key));
|
|
||||||
}, 'listen() throws if given an empty key.');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
const key = quicTransport.getKey();
|
|
||||||
let update_key = new Uint8Array(key);
|
|
||||||
for (let i = 0; i < update_key.length; i++) {
|
|
||||||
update_key[i] = 0;
|
|
||||||
}
|
|
||||||
const new_key = quicTransport.getKey();
|
|
||||||
assert_not_equals(update_key, new Uint8Array(new_key));
|
|
||||||
}, 'Cannot mutate key retrieved from getKey().');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
makeAndStartTwoQuicTransports(t);
|
|
||||||
const stats = await localQuicTransport.getStats();
|
|
||||||
assert_number_field(stats, 'timestamp');
|
|
||||||
assert_unsigned_int_field(stats, 'bytesSent');
|
|
||||||
assert_unsigned_int_field(stats, 'packetsSent');
|
|
||||||
assert_unsigned_int_field(stats, 'streamBytesSent');
|
|
||||||
assert_unsigned_int_field(stats, 'streamBytesReceived');
|
|
||||||
assert_unsigned_int_field(stats, 'numOutgoingStreamsCreated');
|
|
||||||
assert_unsigned_int_field(stats, 'numIncomingStreamsCreated');
|
|
||||||
assert_unsigned_int_field(stats, 'bytesReceived');
|
|
||||||
assert_unsigned_int_field(stats, 'packetsReceived');
|
|
||||||
assert_unsigned_int_field(stats, 'packetsProcessed');
|
|
||||||
assert_unsigned_int_field(stats, 'bytesRetransmitted');
|
|
||||||
assert_unsigned_int_field(stats, 'packetsRetransmitted');
|
|
||||||
assert_unsigned_int_field(stats, 'packetsLost');
|
|
||||||
assert_unsigned_int_field(stats, 'packetsDropped');
|
|
||||||
assert_unsigned_int_field(stats, 'cryptoRetransmitCount');
|
|
||||||
assert_unsigned_int_field(stats, 'minRttUs');
|
|
||||||
assert_unsigned_int_field(stats, 'smoothedRttUs');
|
|
||||||
assert_unsigned_int_field(stats, 'maxPacketSize');
|
|
||||||
assert_unsigned_int_field(stats, 'maxReceivedPacketSize');
|
|
||||||
assert_unsigned_int_field(stats, 'estimatedBandwidthBps');
|
|
||||||
assert_unsigned_int_field(stats, 'packetsReordered');
|
|
||||||
assert_unsigned_int_field(stats, 'blockedFramesReceived');
|
|
||||||
assert_unsigned_int_field(stats, 'blockedFramesSent');
|
|
||||||
assert_unsigned_int_field(stats, 'connectivityProbingPacketsReceived');
|
|
||||||
}, 'Stats returned by getStats() are present.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const localStream = localQuicTransport.createStream();
|
|
||||||
localStream.write({ finish: true });
|
|
||||||
const remoteWatcher = new EventWatcher(t, remoteQuicTransport, 'quicstream');
|
|
||||||
await remoteWatcher.wait_for('quicstream');
|
|
||||||
const localStats = await localQuicTransport.getStats();
|
|
||||||
const remoteStats = await remoteQuicTransport.getStats();
|
|
||||||
assert_equals(localStats.numOutgoingStreamsCreated, 1);
|
|
||||||
assert_equals(localStats.numIncomingStreamsCreated, 0);
|
|
||||||
assert_equals(remoteStats.numOutgoingStreamsCreated, 0);
|
|
||||||
assert_equals(remoteStats.numIncomingStreamsCreated, 1);
|
|
||||||
}, 'getStats() returns proper stream counts after creating streams.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
makeAndStartTwoQuicTransports(t);
|
|
||||||
const stats1 = await localQuicTransport.getStats();
|
|
||||||
await new Promise(resolve => t.step_timeout(resolve, 20));
|
|
||||||
const stats2 = await localQuicTransport.getStats();
|
|
||||||
assert_greater_than(stats2.timestamp, stats1.timestamp);
|
|
||||||
}, 'Two separate stats returned by getStats() give different timestamps.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
const promise = quicTransport.getStats();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'getStats() promises immediately rejected with InvalidStateError ' +
|
|
||||||
`if called before 'connecting'.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const promise = localQuicTransport.getStats();
|
|
||||||
localQuicTransport.stop();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'getStats() promises rejected with InvalidStateError if stop() ' +
|
|
||||||
'is called before being fulfilled.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const promise = localQuicTransport.getStats();
|
|
||||||
localQuicTransport.transport.stop();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'getStats() promises rejected with InvalidStateError if ' +
|
|
||||||
'RTCIceTransport calls stop() before being fulfilled.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
localQuicTransport.transport.stop();
|
|
||||||
const promise = localQuicTransport.getStats();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'getStats() promises immediately rejected if called after ' +
|
|
||||||
`'closed' state.`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.sendDatagram(new Uint8Array([1])));
|
|
||||||
}, `sendDatagram() throws InvalidStateError if called before 'connected'.`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
quicTransport.stop();
|
|
||||||
assert_equals(quicTransport.state, 'closed');
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => quicTransport.sendDatagram(new Uint8Array([1])));
|
|
||||||
}, `sendDatagram() throws InvalidStateError if called when 'closed'.`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
assert_equals(quicTransport.maxDatagramLength, null);
|
|
||||||
}, 'maxDatagramLength 0 before connected.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
assert_greater_than(localQuicTransport.maxDatagramLength, 0);
|
|
||||||
}, 'maxDatagramLength larger than 0 after connected.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const bigData = new Uint8Array(localQuicTransport.maxDatagramLength + 1);
|
|
||||||
assert_throws_dom('InvalidStateError',
|
|
||||||
() => localQuicTransport.sendDatagram(bigData));
|
|
||||||
}, 'sendDatagram() throws InvalidStateError if called with data larger ' +
|
|
||||||
'than maxDatagramLength()');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const datagram = new Uint8Array([42]);
|
|
||||||
await localQuicTransport.readyToSendDatagram();
|
|
||||||
localQuicTransport.sendDatagram(datagram);
|
|
||||||
const receiveDatagrams = await remoteQuicTransport.receiveDatagrams();
|
|
||||||
assert_equals(receiveDatagrams.length, 1);
|
|
||||||
const receiveDatagram = new Uint8Array(receiveDatagrams[0]);
|
|
||||||
assert_array_equals(receiveDatagram, datagram);
|
|
||||||
}, 'sendDatagram() sends a datagram to remote side');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const datagram = new Uint8Array([42]);
|
|
||||||
const datagram2 = new Uint8Array([43]);
|
|
||||||
await localQuicTransport.readyToSendDatagram();
|
|
||||||
localQuicTransport.sendDatagram(datagram);
|
|
||||||
const receiveDatagrams = await remoteQuicTransport.receiveDatagrams();
|
|
||||||
assert_equals(receiveDatagrams.length, 1);
|
|
||||||
const receiveDatagram = new Uint8Array(receiveDatagrams[0]);
|
|
||||||
assert_array_equals(receiveDatagram, datagram);
|
|
||||||
await localQuicTransport.readyToSendDatagram();
|
|
||||||
localQuicTransport.sendDatagram(datagram2);
|
|
||||||
const receiveDatagrams2 = await remoteQuicTransport.receiveDatagrams();
|
|
||||||
assert_equals(receiveDatagrams2.length, 1);
|
|
||||||
const receiveDatagram2 = new Uint8Array(receiveDatagrams2[0]);
|
|
||||||
assert_array_equals(receiveDatagram2, datagram2);
|
|
||||||
}, 'sendDatagram() sends a multiple datagrams to remote side');
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
const promise = quicTransport.readyToSendDatagram();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'readyToSendDatagram() promise immediately rejected if called before ' +
|
|
||||||
'connecting');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
localQuicTransport.stop();
|
|
||||||
const promise = localQuicTransport.readyToSendDatagram();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'readyToSendDatagram() promise immediately rejected if called after ' +
|
|
||||||
`'closed' state.`);
|
|
||||||
|
|
||||||
test(t => {
|
|
||||||
const quicTransport = makeStandaloneQuicTransport(t);
|
|
||||||
const promise = quicTransport.receiveDatagrams();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'receiveDatagrams() promise immediately rejected if called before ' +
|
|
||||||
'connecting.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
localQuicTransport.stop();
|
|
||||||
const promise = localQuicTransport.receiveDatagrams();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'receiveDatagrams() promise immediately rejected if called after ' +
|
|
||||||
`'closed' state.`);
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const promise = localQuicTransport.receiveDatagrams();
|
|
||||||
localQuicTransport.stop();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'receiveDatagrams() promise rejected with InvalidStateError if stop() ' +
|
|
||||||
'is called before being fulfilled.');
|
|
||||||
|
|
||||||
promise_test(async t => {
|
|
||||||
const [ localQuicTransport, remoteQuicTransport ] =
|
|
||||||
await makeTwoConnectedQuicTransports(t);
|
|
||||||
const promise = localQuicTransport.receiveDatagrams();
|
|
||||||
localQuicTransport.transport.stop();
|
|
||||||
promise_rejects_dom(t, 'InvalidStateError', promise);
|
|
||||||
}, 'receiveDatagrams() promises rejected with InvalidStateError if ' +
|
|
||||||
'RTCIceTransport calls stop() before being fulfilled.');
|
|
||||||
|
|
||||||
</script>
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue