mirror of
https://github.com/servo/servo.git
synced 2025-08-27 08:08:19 +01:00
Update web-platform-tests to revision 2b7dace05fc1869398ee24f84fda4c0e4c0455ae
This commit is contained in:
parent
b23125d590
commit
6c901de216
844 changed files with 19802 additions and 3093 deletions
|
@ -0,0 +1,60 @@
|
|||
// META: global=worker
|
||||
|
||||
'use strict';
|
||||
|
||||
const classes = [
|
||||
{
|
||||
constructor: TextDecoderStream,
|
||||
input: new Uint8Array([65])
|
||||
},
|
||||
{
|
||||
constructor: TextEncoderStream,
|
||||
input: 'A'
|
||||
}
|
||||
];
|
||||
|
||||
const microtasksRun = () => new Promise(resolve => step_timeout(resolve, 0));
|
||||
|
||||
for (const streamClass of classes) {
|
||||
promise_test(async () => {
|
||||
const stream = new streamClass.constructor();
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
const events = [];
|
||||
await microtasksRun();
|
||||
const writePromise = writer.write(streamClass.input);
|
||||
writePromise.then(() => events.push('write'));
|
||||
await microtasksRun();
|
||||
events.push('paused');
|
||||
await reader.read();
|
||||
events.push('read');
|
||||
await writePromise;
|
||||
assert_array_equals(events, ['paused', 'read', 'write'],
|
||||
'write should happen after read');
|
||||
}, 'write() should not complete until read relieves backpressure for ' +
|
||||
`${streamClass.constructor.name}`);
|
||||
|
||||
promise_test(async () => {
|
||||
const stream = new streamClass.constructor();
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
const events = [];
|
||||
await microtasksRun();
|
||||
const readPromise1 = reader.read();
|
||||
readPromise1.then(() => events.push('read1'));
|
||||
const writePromise1 = writer.write(streamClass.input);
|
||||
const writePromise2 = writer.write(streamClass.input);
|
||||
writePromise1.then(() => events.push('write1'));
|
||||
writePromise2.then(() => events.push('write2'));
|
||||
await microtasksRun();
|
||||
events.push('paused');
|
||||
const readPromise2 = reader.read();
|
||||
readPromise2.then(() => events.push('read2'));
|
||||
await Promise.all([writePromise1, writePromise2,
|
||||
readPromise1, readPromise2]);
|
||||
assert_array_equals(events, ['read1', 'write1', 'paused', 'read2',
|
||||
'write2'],
|
||||
'writes should not happen before read2');
|
||||
}, 'additional writes should wait for backpressure to be relieved for ' +
|
||||
`class ${streamClass.constructor.name}`);
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
// META: global=worker
|
||||
|
||||
'use strict';
|
||||
|
||||
const badChunks = [
|
||||
{
|
||||
name: 'undefined',
|
||||
value: undefined
|
||||
},
|
||||
{
|
||||
name: 'null',
|
||||
value: null
|
||||
},
|
||||
{
|
||||
name: 'numeric',
|
||||
value: 3.14
|
||||
},
|
||||
{
|
||||
name: 'object, not BufferSource',
|
||||
value: {}
|
||||
},
|
||||
{
|
||||
name: 'array',
|
||||
value: [65]
|
||||
},
|
||||
{
|
||||
name: 'detached ArrayBufferView',
|
||||
value: (() => {
|
||||
const u8 = new Uint8Array([65]);
|
||||
const ab = u8.buffer;
|
||||
const mc = new MessageChannel();
|
||||
mc.port1.postMessage(ab, [ab]);
|
||||
return u8;
|
||||
})()
|
||||
},
|
||||
{
|
||||
name: 'detached ArrayBuffer',
|
||||
value: (() => {
|
||||
const u8 = new Uint8Array([65]);
|
||||
const ab = u8.buffer;
|
||||
const mc = new MessageChannel();
|
||||
mc.port1.postMessage(ab, [ab]);
|
||||
return ab;
|
||||
})()
|
||||
},
|
||||
{
|
||||
name: 'SharedArrayBuffer',
|
||||
// Use a getter to postpone construction so that all tests don't fail where
|
||||
// SharedArrayBuffer is not yet implemented.
|
||||
get value() {
|
||||
return new SharedArrayBuffer();
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'shared Uint8Array',
|
||||
get value() {
|
||||
new Uint8Array(new SharedArrayBuffer())
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
for (const chunk of badChunks) {
|
||||
promise_test(async t => {
|
||||
const tds = new TextDecoderStream();
|
||||
const reader = tds.readable.getReader();
|
||||
const writer = tds.writable.getWriter();
|
||||
const writePromise = writer.write(chunk.value);
|
||||
const readPromise = reader.read();
|
||||
await promise_rejects(t, new TypeError(), writePromise, 'write should reject');
|
||||
await promise_rejects(t, new TypeError(), readPromise, 'read should reject');
|
||||
}, `chunk of type ${chunk.name} should error the stream`);
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
// META: global=worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
const cases = [
|
||||
{encoding: 'utf-8', bytes: [0xEF, 0xBB, 0xBF, 0x61, 0x62, 0x63]},
|
||||
{encoding: 'utf-16le', bytes: [0xFF, 0xFE, 0x61, 0x00, 0x62, 0x00, 0x63, 0x00]},
|
||||
{encoding: 'utf-16be', bytes: [0xFE, 0xFF, 0x00, 0x61, 0x00, 0x62, 0x00, 0x63]}
|
||||
];
|
||||
const BOM = '\uFEFF';
|
||||
|
||||
// |inputChunks| is an array of chunks, each represented by an array of
|
||||
// integers. |ignoreBOM| is true or false. The result value is the output of the
|
||||
// pipe, concatenated into a single string.
|
||||
async function pipeAndAssemble(inputChunks, encoding, ignoreBOM) {
|
||||
const chunksAsUint8 = inputChunks.map(values => new Uint8Array(values));
|
||||
const readable = readableStreamFromArray(chunksAsUint8);
|
||||
const outputArray = await readableStreamToArray(readable.pipeThrough(
|
||||
new TextDecoderStream(encoding, {ignoreBOM})));
|
||||
return outputArray.join('');
|
||||
}
|
||||
|
||||
for (const testCase of cases) {
|
||||
for (let splitPoint = 0; splitPoint < 4; ++splitPoint) {
|
||||
promise_test(async () => {
|
||||
const inputChunks = [testCase.bytes.slice(0, splitPoint),
|
||||
testCase.bytes.slice(splitPoint)];
|
||||
const withIgnoreBOM =
|
||||
await pipeAndAssemble(inputChunks, testCase.encoding, true);
|
||||
assert_equals(withIgnoreBOM, BOM + 'abc', 'BOM should be preserved');
|
||||
|
||||
const withoutIgnoreBOM =
|
||||
await pipeAndAssemble(inputChunks, testCase.encoding, false);
|
||||
assert_equals(withoutIgnoreBOM, 'abc', 'BOM should be stripped')
|
||||
}, `ignoreBOM should work for encoding ${testCase.encoding}, split at ` +
|
||||
`character ${splitPoint}`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
// META: global=worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const inputBytes = [229];
|
||||
|
||||
promise_test(async () => {
|
||||
const input = readableStreamFromArray([new Uint8Array(inputBytes)]);
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_array_equals(array, ['\uFFFD'], 'array should have one element');
|
||||
}, 'incomplete input with error mode "replacement" should end with a ' +
|
||||
'replacement character');
|
||||
|
||||
promise_test(async t => {
|
||||
const input = readableStreamFromArray([new Uint8Array(inputBytes)]);
|
||||
const output = input.pipeThrough(new TextDecoderStream(
|
||||
'utf-8', {fatal: true}));
|
||||
const reader = output.getReader();
|
||||
await promise_rejects(t, new TypeError(), reader.read(),
|
||||
'read should reject');
|
||||
}, 'incomplete input with error mode "fatal" should error the stream');
|
|
@ -0,0 +1,77 @@
|
|||
// META: global=worker
|
||||
|
||||
'use strict';
|
||||
|
||||
// The browser is assumed to use the same implementation as for TextDecoder, so
|
||||
// this file don't replicate the exhaustive checks it has. It is just a smoke
|
||||
// test that non-UTF-8 encodings work at all.
|
||||
|
||||
const encodings = [
|
||||
{
|
||||
name: 'UTF-16BE',
|
||||
value: [108, 52],
|
||||
expected: "\u{6c34}",
|
||||
invalid: [0xD8, 0x00]
|
||||
},
|
||||
{
|
||||
name: 'UTF-16LE',
|
||||
value: [52, 108],
|
||||
expected: "\u{6c34}",
|
||||
invalid: [0x00, 0xD8]
|
||||
},
|
||||
{
|
||||
name: 'Shift_JIS',
|
||||
value: [144, 133],
|
||||
expected: "\u{6c34}",
|
||||
invalid: [255]
|
||||
},
|
||||
{
|
||||
name: 'ISO-8859-14',
|
||||
value: [100, 240, 114],
|
||||
expected: "d\u{0175}r",
|
||||
invalid: undefined // all bytes are treated as valid
|
||||
}
|
||||
];
|
||||
|
||||
for (const encoding of encodings) {
|
||||
promise_test(async () => {
|
||||
const stream = new TextDecoderStream(encoding.name);
|
||||
const reader = stream.readable.getReader();
|
||||
const writer = stream.writable.getWriter();
|
||||
const writePromise = writer.write(new Uint8Array(encoding.value));
|
||||
const {value, done} = await reader.read();
|
||||
assert_false(done, 'readable should not be closed');
|
||||
assert_equals(value, encoding.expected, 'chunk should match expected');
|
||||
await writePromise;
|
||||
}, `TextDecoderStream should be able to decode ${encoding.name}`);
|
||||
|
||||
if (!encoding.invalid)
|
||||
continue;
|
||||
|
||||
promise_test(async t => {
|
||||
const stream = new TextDecoderStream(encoding.name);
|
||||
const reader = stream.readable.getReader();
|
||||
const writer = stream.writable.getWriter();
|
||||
const writePromise = writer.write(new Uint8Array(encoding.invalid));
|
||||
const closePromise = writer.close();
|
||||
const {value, done} = await reader.read();
|
||||
assert_false(done, 'readable should not be closed');
|
||||
assert_equals(value, '\u{FFFD}', 'output should be replacement character');
|
||||
await Promise.all([writePromise, closePromise]);
|
||||
}, `TextDecoderStream should be able to decode invalid sequences in ` +
|
||||
`${encoding.name}`);
|
||||
|
||||
promise_test(async t => {
|
||||
const stream = new TextDecoderStream(encoding.name, {fatal: true});
|
||||
const reader = stream.readable.getReader();
|
||||
const writer = stream.writable.getWriter();
|
||||
const writePromise = writer.write(new Uint8Array(encoding.invalid));
|
||||
const closePromise = writer.close();
|
||||
await promise_rejects(t, new TypeError(), reader.read(),
|
||||
'readable should be errored');
|
||||
await promise_rejects(t, new TypeError(),
|
||||
Promise.all([writePromise, closePromise]),
|
||||
'writable should be errored');
|
||||
}, `TextDecoderStream should be able to reject invalid sequences in ` +
|
||||
`${encoding.name}`);
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
// META: global=worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const inputBytes = [73, 32, 240, 159, 146, 153, 32, 115, 116, 114, 101,
|
||||
97, 109, 115];
|
||||
for (const splitPoint of [2, 3, 4, 5]) {
|
||||
promise_test(async () => {
|
||||
const input = readableStreamFromArray(
|
||||
[new Uint8Array(inputBytes.slice(0, splitPoint)),
|
||||
new Uint8Array(inputBytes.slice(splitPoint))]);
|
||||
const expectedOutput = ['I ', '\u{1F499} streams'];
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_array_equals(array, expectedOutput,
|
||||
'the split code point should be in the second chunk ' +
|
||||
'of the output');
|
||||
}, 'a code point split between chunks should not be emitted until all ' +
|
||||
'bytes are available; split point = ' + splitPoint);
|
||||
}
|
||||
|
||||
promise_test(async () => {
|
||||
const splitPoint = 6;
|
||||
const input = readableStreamFromArray(
|
||||
[new Uint8Array(inputBytes.slice(0, splitPoint)),
|
||||
new Uint8Array(inputBytes.slice(splitPoint))]);
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_array_equals(array, ['I \u{1F499}', ' streams'],
|
||||
'the multibyte character should be in the first chunk ' +
|
||||
'of the output');
|
||||
}, 'a code point should be emitted as soon as all bytes are available');
|
||||
|
||||
for (let splitPoint = 1; splitPoint < 7; ++splitPoint) {
|
||||
promise_test(async () => {
|
||||
const input = readableStreamFromArray(
|
||||
[new Uint8Array(inputBytes.slice(0, splitPoint)),
|
||||
new Uint8Array([]),
|
||||
new Uint8Array(inputBytes.slice(splitPoint))]);
|
||||
const concatenatedOutput = 'I \u{1F499} streams';
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_equals(array.length, 2, 'two chunks should be output');
|
||||
assert_equals(array[0].concat(array[1]), concatenatedOutput,
|
||||
'output should be unchanged by the empty chunk');
|
||||
}, 'an empty chunk inside a code point split between chunks should not ' +
|
||||
'change the output; split point = ' + splitPoint);
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
// META: global=worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const emptyChunk = new Uint8Array([]);
|
||||
const inputChunk = new Uint8Array([73, 32, 240, 159, 146, 153, 32, 115, 116,
|
||||
114, 101, 97, 109, 115]);
|
||||
const expectedOutputString = 'I \u{1F499} streams';
|
||||
|
||||
promise_test(async () => {
|
||||
const input = readableStreamFromArray([inputChunk]);
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_array_equals(array, [expectedOutputString],
|
||||
'the output should be in one chunk');
|
||||
}, 'decoding one UTF-8 chunk should give one output string');
|
||||
|
||||
promise_test(async () => {
|
||||
const input = readableStreamFromArray([emptyChunk]);
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_array_equals(array, [], 'no chunks should be output');
|
||||
}, 'decoding an empty chunk should give no output chunks');
|
||||
|
||||
promise_test(async () => {
|
||||
const input = readableStreamFromArray([emptyChunk, inputChunk]);
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_array_equals(array, [expectedOutputString],
|
||||
'the output should be in one chunk');
|
||||
}, 'an initial empty chunk should be ignored');
|
||||
|
||||
promise_test(async () => {
|
||||
const input = readableStreamFromArray([inputChunk, emptyChunk]);
|
||||
const output = input.pipeThrough(new TextDecoderStream());
|
||||
const array = await readableStreamToArray(output);
|
||||
assert_array_equals(array, [expectedOutputString],
|
||||
'the output should be in one chunk');
|
||||
}, 'a trailing empty chunk should be ignored');
|
|
@ -0,0 +1,63 @@
|
|||
// META: global=worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const error1 = new Error('error1');
|
||||
error1.name = 'error1';
|
||||
|
||||
promise_test(t => {
|
||||
const ts = new TextEncoderStream();
|
||||
const writer = ts.writable.getWriter();
|
||||
const reader = ts.readable.getReader();
|
||||
const writePromise = writer.write({
|
||||
toString() { throw error1; }
|
||||
});
|
||||
const readPromise = reader.read();
|
||||
return Promise.all([
|
||||
promise_rejects(t, error1, readPromise, 'read should reject with error1'),
|
||||
promise_rejects(t, error1, writePromise, 'write should reject with error1'),
|
||||
promise_rejects(t, error1, reader.closed, 'readable should be errored with error1'),
|
||||
promise_rejects(t, error1, writer.closed, 'writable should be errored with error1'),
|
||||
]);
|
||||
}, 'a chunk that cannot be converted to a string should error the streams');
|
||||
|
||||
const oddInputs = [
|
||||
{
|
||||
name: 'undefined',
|
||||
value: undefined,
|
||||
expected: 'undefined'
|
||||
},
|
||||
{
|
||||
name: 'null',
|
||||
value: null,
|
||||
expected: 'null'
|
||||
},
|
||||
{
|
||||
name: 'numeric',
|
||||
value: 3.14,
|
||||
expected: '3.14'
|
||||
},
|
||||
{
|
||||
name: 'object',
|
||||
value: {},
|
||||
expected: '[object Object]'
|
||||
},
|
||||
{
|
||||
name: 'array',
|
||||
value: ['hi'],
|
||||
expected: 'hi'
|
||||
}
|
||||
];
|
||||
|
||||
for (const input of oddInputs) {
|
||||
promise_test(async () => {
|
||||
const outputReadable = readableStreamFromArray([input.value])
|
||||
.pipeThrough(new TextEncoderStream())
|
||||
.pipeThrough(new TextDecoderStream());
|
||||
const output = await readableStreamToArray(outputReadable);
|
||||
assert_equals(output.length, 1, 'output should contain one chunk');
|
||||
assert_equals(output[0], input.expected, 'output should be correct');
|
||||
}, `input of type ${input.name} should be converted correctly to string`);
|
||||
}
|
144
tests/wpt/web-platform-tests/encoding/streams/encode-utf8.any.js
Normal file
144
tests/wpt/web-platform-tests/encoding/streams/encode-utf8.any.js
Normal file
|
@ -0,0 +1,144 @@
|
|||
// META: global=worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
const inputString = 'I \u{1F499} streams';
|
||||
const expectedOutputBytes = [0x49, 0x20, 0xf0, 0x9f, 0x92, 0x99, 0x20, 0x73,
|
||||
0x74, 0x72, 0x65, 0x61, 0x6d, 0x73];
|
||||
// This is a character that must be represented in two code units in a string,
|
||||
// ie. it is not in the Basic Multilingual Plane.
|
||||
const astralCharacter = '\u{1F499}'; // BLUE HEART
|
||||
const astralCharacterEncoded = [0xf0, 0x9f, 0x92, 0x99];
|
||||
const leading = astralCharacter[0];
|
||||
const trailing = astralCharacter[1];
|
||||
const replacementEncoded = [0xef, 0xbf, 0xbd];
|
||||
|
||||
// These tests assume that the implementation correctly classifies leading and
|
||||
// trailing surrogates and treats all the code units in each set equivalently.
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
input: [inputString],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'encoding one string of UTF-8 should give one complete chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing],
|
||||
output: [astralCharacterEncoded],
|
||||
description: 'a character split between chunks should be correctly encoded'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + astralCharacter],
|
||||
output: [astralCharacterEncoded.concat(astralCharacterEncoded)],
|
||||
description: 'a character following one split between chunks should be ' +
|
||||
'correctly encoded'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + leading, trailing],
|
||||
output: [astralCharacterEncoded, astralCharacterEncoded],
|
||||
description: 'two consecutive astral characters each split down the ' +
|
||||
'middle should be correctly reassembled'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + leading + leading, trailing],
|
||||
output: [astralCharacterEncoded.concat(replacementEncoded), astralCharacterEncoded],
|
||||
description: 'two consecutive astral characters each split down the ' +
|
||||
'middle with an invalid surrogate in the middle should be correctly ' +
|
||||
'encoded'
|
||||
},
|
||||
{
|
||||
input: [leading],
|
||||
output: [replacementEncoded],
|
||||
description: 'a stream ending in a leading surrogate should emit a ' +
|
||||
'replacement character as a final chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, astralCharacter],
|
||||
output: [replacementEncoded.concat(astralCharacterEncoded)],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'an astral character in the next chunk should be replaced with ' +
|
||||
'the replacement character at the start of the next output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, 'A'],
|
||||
output: [replacementEncoded.concat([65])],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'an ascii character in the next chunk should be replaced with ' +
|
||||
'the replacement character at the start of the next output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, leading, trailing],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'a plane 1 character split into two chunks should result in ' +
|
||||
'the encoded plane 1 character appearing in the last output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, leading],
|
||||
output: [replacementEncoded, replacementEncoded],
|
||||
description: 'two leading chunks should result in two replacement ' +
|
||||
'characters'
|
||||
},
|
||||
{
|
||||
input: [leading + leading, trailing],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'a non-terminal unpaired leading surrogate should ' +
|
||||
'immediately be replaced'
|
||||
},
|
||||
{
|
||||
input: [trailing, astralCharacter],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'a terminal unpaired trailing surrogate should ' +
|
||||
'immediately be replaced'
|
||||
},
|
||||
{
|
||||
input: [leading, '', trailing],
|
||||
output: [astralCharacterEncoded],
|
||||
description: 'a leading surrogate chunk should be carried past empty chunks'
|
||||
},
|
||||
{
|
||||
input: [leading, ''],
|
||||
output: [replacementEncoded],
|
||||
description: 'a leading surrogate chunk should error when it is clear ' +
|
||||
'it didn\'t form a pair'
|
||||
},
|
||||
{
|
||||
input: [''],
|
||||
output: [],
|
||||
description: 'an empty string should result in no output chunk'
|
||||
},
|
||||
{
|
||||
input: ['', inputString],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'a leading empty chunk should be ignored'
|
||||
},
|
||||
{
|
||||
input: [inputString, ''],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'a trailing empty chunk should be ignored'
|
||||
},
|
||||
{
|
||||
input: ['A'],
|
||||
output: [[65]],
|
||||
description: 'a plain ASCII chunk should be converted'
|
||||
},
|
||||
{
|
||||
input: ['\xff'],
|
||||
output: [[195, 191]],
|
||||
description: 'characters in the ISO-8859-1 range should be encoded correctly'
|
||||
},
|
||||
];
|
||||
|
||||
for (const {input, output, description} of testCases) {
|
||||
promise_test(async () => {
|
||||
const inputStream = readableStreamFromArray(input);
|
||||
const outputStream = inputStream.pipeThrough(new TextEncoderStream());
|
||||
const chunkArray = await readableStreamToArray(outputStream);
|
||||
assert_equals(chunkArray.length, output.length,
|
||||
'number of chunks should match');
|
||||
for (let i = 0; i < output.length; ++i) {
|
||||
assert_array_equals(chunkArray[i], output[i], `chunk ${i} should match`);
|
||||
}
|
||||
}, description);
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
// META: global=worker
|
||||
|
||||
// This just tests that the "readable" and "writable" properties pass the brand
|
||||
// checks. All other relevant attributes are covered by the IDL tests.
|
||||
|
||||
'use strict';
|
||||
|
||||
test(() => {
|
||||
const te = new TextEncoderStream();
|
||||
assert_equals(typeof ReadableStream.prototype.getReader.call(te.readable),
|
||||
'object', 'readable property must pass brand check');
|
||||
assert_equals(typeof WritableStream.prototype.getWriter.call(te.writable),
|
||||
'object', 'writable property must pass brand check');
|
||||
}, 'TextEncoderStream readable and writable properties must pass brand checks');
|
||||
|
||||
test(() => {
|
||||
const td = new TextDecoderStream();
|
||||
assert_equals(typeof ReadableStream.prototype.getReader.call(td.readable),
|
||||
'object', 'readable property must pass brand check');
|
||||
assert_equals(typeof WritableStream.prototype.getWriter.call(td.writable),
|
||||
'object', 'writable property must pass brand check');
|
||||
}, 'TextDecoderStream readable and writable properties must pass brand checks');
|
347
tests/wpt/web-platform-tests/encoding/streams/realms.window.js
Normal file
347
tests/wpt/web-platform-tests/encoding/streams/realms.window.js
Normal file
|
@ -0,0 +1,347 @@
|
|||
'use strict';
|
||||
|
||||
// Test that objects created by the TextEncoderStream and TextDecoderStream APIs
|
||||
// are created in the correct realm. The tests work by creating an iframe for
|
||||
// each realm and then posting Javascript to them to be evaluated. Inputs and
|
||||
// outputs are passed around via global variables in each realm's scope.
|
||||
|
||||
// Async setup is required before creating any tests, so require done() to be
|
||||
// called.
|
||||
setup({explicit_done: true});
|
||||
|
||||
function createRealm() {
|
||||
let iframe = document.createElement('iframe');
|
||||
const scriptEndTag = '<' + '/script>';
|
||||
iframe.srcdoc = `<!doctype html>
|
||||
<script>
|
||||
onmessage = event => {
|
||||
if (event.source !== window.parent) {
|
||||
throw new Error('unexpected message with source ' + event.source);
|
||||
}
|
||||
eval(event.data);
|
||||
};
|
||||
${scriptEndTag}`;
|
||||
iframe.style.display = 'none';
|
||||
document.body.appendChild(iframe);
|
||||
let realmPromiseResolve;
|
||||
const realmPromise = new Promise(resolve => {
|
||||
realmPromiseResolve = resolve;
|
||||
});
|
||||
iframe.onload = () => {
|
||||
realmPromiseResolve(iframe.contentWindow);
|
||||
};
|
||||
return realmPromise;
|
||||
}
|
||||
|
||||
async function createRealms() {
|
||||
// All realms are visible on the global object so they can access each other.
|
||||
|
||||
// The realm that the constructor function comes from.
|
||||
window.constructorRealm = await createRealm();
|
||||
|
||||
// The realm in which the constructor object is called.
|
||||
window.constructedRealm = await createRealm();
|
||||
|
||||
// The realm in which reading happens.
|
||||
window.readRealm = await createRealm();
|
||||
|
||||
// The realm in which writing happens.
|
||||
window.writeRealm = await createRealm();
|
||||
|
||||
// The realm that provides the definitions of Readable and Writable methods.
|
||||
window.methodRealm = await createRealm();
|
||||
|
||||
await evalInRealmAndWait(methodRealm, `
|
||||
window.ReadableStreamDefaultReader =
|
||||
new ReadableStream().getReader().constructor;
|
||||
window.WritableStreamDefaultWriter =
|
||||
new WritableStream().getWriter().constructor;
|
||||
`);
|
||||
window.readMethod = methodRealm.ReadableStreamDefaultReader.prototype.read;
|
||||
window.writeMethod = methodRealm.WritableStreamDefaultWriter.prototype.write;
|
||||
}
|
||||
|
||||
// In order for values to be visible between realms, they need to be
|
||||
// global. To prevent interference between tests, variable names are generated
|
||||
// automatically.
|
||||
const id = (() => {
|
||||
let nextId = 0;
|
||||
return () => {
|
||||
return `realmsId${nextId++}`;
|
||||
};
|
||||
})();
|
||||
|
||||
// Eval string "code" in the content of realm "realm". Evaluation happens
|
||||
// asynchronously, meaning it hasn't happened when the function returns.
|
||||
function evalInRealm(realm, code) {
|
||||
realm.postMessage(code, window.origin);
|
||||
}
|
||||
|
||||
// Same as evalInRealm() but returns a Promise which will resolve when the
|
||||
// function has actually.
|
||||
async function evalInRealmAndWait(realm, code) {
|
||||
const resolve = id();
|
||||
const waitOn = new Promise(r => {
|
||||
realm[resolve] = r;
|
||||
});
|
||||
evalInRealm(realm, code);
|
||||
evalInRealm(realm, `${resolve}();`);
|
||||
await waitOn;
|
||||
}
|
||||
|
||||
// The same as evalInRealmAndWait but returns the result of evaluating "code" as
|
||||
// an expression.
|
||||
async function evalInRealmAndReturn(realm, code) {
|
||||
const myId = id();
|
||||
await evalInRealmAndWait(realm, `window.${myId} = ${code};`);
|
||||
return realm[myId];
|
||||
}
|
||||
|
||||
// Constructs an object in constructedRealm and copies it into readRealm and
|
||||
// writeRealm. Returns the id that can be used to access the object in those
|
||||
// realms. |what| can contain constructor arguments.
|
||||
async function constructAndStore(what) {
|
||||
const objId = id();
|
||||
// Call |constructorRealm|'s constructor from inside |constructedRealm|.
|
||||
writeRealm[objId] = await evalInRealmAndReturn(
|
||||
constructedRealm, `new parent.constructorRealm.${what}`);
|
||||
readRealm[objId] = writeRealm[objId];
|
||||
return objId;
|
||||
}
|
||||
|
||||
// Calls read() on the readable side of the TransformStream stored in
|
||||
// readRealm[objId]. Locks the readable side as a side-effect.
|
||||
function readInReadRealm(objId) {
|
||||
return evalInRealmAndReturn(readRealm, `
|
||||
parent.readMethod.call(window.${objId}.readable.getReader())`);
|
||||
}
|
||||
|
||||
// Calls write() on the writable side of the TransformStream stored in
|
||||
// writeRealm[objId], passing |value|. Locks the writable side as a
|
||||
// side-effect.
|
||||
function writeInWriteRealm(objId, value) {
|
||||
const valueId = id();
|
||||
writeRealm[valueId] = value;
|
||||
return evalInRealmAndReturn(writeRealm, `
|
||||
parent.writeMethod.call(window.${objId}.writable.getWriter(),
|
||||
window.${valueId})`);
|
||||
}
|
||||
|
||||
window.onload = () => {
|
||||
createRealms().then(() => {
|
||||
runGenericTests('TextEncoderStream');
|
||||
runTextEncoderStreamTests();
|
||||
runGenericTests('TextDecoderStream');
|
||||
runTextDecoderStreamTests();
|
||||
done();
|
||||
});
|
||||
};
|
||||
|
||||
function runGenericTests(classname) {
|
||||
promise_test(async () => {
|
||||
const obj = await evalInRealmAndReturn(
|
||||
constructedRealm, `new parent.constructorRealm.${classname}()`);
|
||||
assert_equals(obj.constructor, constructorRealm[classname],
|
||||
'obj should be in constructor realm');
|
||||
}, `a ${classname} object should be associated with the realm the ` +
|
||||
'constructor came from');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore(classname);
|
||||
const readableGetterId = id();
|
||||
readRealm[readableGetterId] = Object.getOwnPropertyDescriptor(
|
||||
methodRealm[classname].prototype, 'readable').get;
|
||||
const writableGetterId = id();
|
||||
writeRealm[writableGetterId] = Object.getOwnPropertyDescriptor(
|
||||
methodRealm[classname].prototype, 'writable').get;
|
||||
const readable = await evalInRealmAndReturn(
|
||||
readRealm, `${readableGetterId}.call(${objId})`);
|
||||
const writable = await evalInRealmAndReturn(
|
||||
writeRealm, `${writableGetterId}.call(${objId})`);
|
||||
assert_equals(readable.constructor, constructorRealm.ReadableStream,
|
||||
'readable should be in constructor realm');
|
||||
assert_equals(writable.constructor, constructorRealm.WritableStream,
|
||||
'writable should be in constructor realm');
|
||||
}, `${classname}'s readable and writable attributes should come from the ` +
|
||||
'same realm as the constructor definition');
|
||||
}
|
||||
|
||||
function runTextEncoderStreamTests() {
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
const writePromise = writeInWriteRealm(objId, 'A');
|
||||
const result = await readInReadRealm(objId);
|
||||
await writePromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
assert_equals(result.value.constructor, constructorRealm.Uint8Array,
|
||||
'chunk should be in constructor realm');
|
||||
}, 'the output chunks when read is called after write should come from the ' +
|
||||
'same realm as the constructor of TextEncoderStream');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
const chunkPromise = readInReadRealm(objId);
|
||||
writeInWriteRealm(objId, 'A');
|
||||
// Now the read() should resolve.
|
||||
const result = await chunkPromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
assert_equals(result.value.constructor, constructorRealm.Uint8Array,
|
||||
'chunk should be in constructor realm');
|
||||
}, 'the output chunks when write is called with a pending read should come ' +
|
||||
'from the same realm as the constructor of TextEncoderStream');
|
||||
|
||||
// There is not absolute consensus regarding what realm exceptions should be
|
||||
// created in. Implementations may vary. The expectations in exception-related
|
||||
// tests may change in future once consensus is reached.
|
||||
promise_test(async t => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
// promise_rejects() does not permit directly inspecting the rejection, so
|
||||
// it's necessary to write it out long-hand.
|
||||
let writeSucceeded = false;
|
||||
try {
|
||||
// Write an invalid chunk.
|
||||
await writeInWriteRealm(objId, {
|
||||
toString() { return {}; }
|
||||
});
|
||||
writeSucceeded = true;
|
||||
} catch (err) {
|
||||
assert_equals(err.constructor, constructorRealm.TypeError,
|
||||
'write TypeError should come from constructor realm');
|
||||
}
|
||||
assert_false(writeSucceeded, 'write should fail');
|
||||
|
||||
let readSucceeded = false;
|
||||
try {
|
||||
await readPromise;
|
||||
readSucceeded = true;
|
||||
} catch (err) {
|
||||
assert_equals(err.constructor, constructorRealm.TypeError,
|
||||
'read TypeError should come from constructor realm');
|
||||
}
|
||||
|
||||
assert_false(readSucceeded, 'read should fail');
|
||||
}, 'TypeError for unconvertable chunk should come from constructor realm ' +
|
||||
'of TextEncoderStream');
|
||||
}
|
||||
|
||||
function runTextDecoderStreamTests() {
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
const writePromise = writeInWriteRealm(objId, new Uint8Array([65]));
|
||||
const result = await readInReadRealm(objId);
|
||||
await writePromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
// A string is not an object, so doesn't have an associated realm. Accessing
|
||||
// string properties will create a transient object wrapper belonging to the
|
||||
// current realm. So checking the realm of result.value is not useful.
|
||||
}, 'the result object when read is called after write should come from the ' +
|
||||
'same realm as the constructor of TextDecoderStream');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
const chunkPromise = readInReadRealm(objId);
|
||||
writeInWriteRealm(objId, new Uint8Array([65]));
|
||||
// Now the read() should resolve.
|
||||
const result = await chunkPromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
// A string is not an object, so doesn't have an associated realm. Accessing
|
||||
// string properties will create a transient object wrapper belonging to the
|
||||
// current realm. So checking the realm of result.value is not useful.
|
||||
}, 'the result object when write is called with a pending ' +
|
||||
'read should come from the same realm as the constructor of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
// promise_rejects() does not permit directly inspecting the rejection, so
|
||||
// it's necessary to write it out long-hand.
|
||||
let writeSucceeded = false;
|
||||
try {
|
||||
// Write an invalid chunk.
|
||||
await writeInWriteRealm(objId, {});
|
||||
writeSucceeded = true;
|
||||
} catch (err) {
|
||||
assert_equals(err.constructor, constructorRealm.TypeError,
|
||||
'write TypeError should come from constructor realm');
|
||||
}
|
||||
assert_false(writeSucceeded, 'write should fail');
|
||||
|
||||
let readSucceeded = false;
|
||||
try {
|
||||
await readPromise;
|
||||
readSucceeded = true;
|
||||
} catch (err) {
|
||||
assert_equals(err.constructor, constructorRealm.TypeError,
|
||||
'read TypeError should come from constructor realm');
|
||||
}
|
||||
assert_false(readSucceeded, 'read should fail');
|
||||
}, 'TypeError for chunk with the wrong type should come from constructor ' +
|
||||
'realm of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId =
|
||||
await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
// promise_rejects() does not permit directly inspecting the rejection, so
|
||||
// it's necessary to write it out long-hand.
|
||||
let writeSucceeded = false;
|
||||
try {
|
||||
await writeInWriteRealm(objId, new Uint8Array([0xff]));
|
||||
writeSucceeded = true;
|
||||
} catch (err) {
|
||||
assert_equals(err.constructor, constructorRealm.TypeError,
|
||||
'write TypeError should come from constructor realm');
|
||||
}
|
||||
assert_false(writeSucceeded, 'write should fail');
|
||||
|
||||
let readSucceeded = false;
|
||||
try {
|
||||
await readPromise;
|
||||
readSucceeded = true;
|
||||
} catch (err) {
|
||||
assert_equals(err.constructor, constructorRealm.TypeError,
|
||||
'read TypeError should come from constructor realm');
|
||||
}
|
||||
assert_false(readSucceeded, 'read should fail');
|
||||
}, 'TypeError for invalid chunk should come from constructor realm ' +
|
||||
'of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId =
|
||||
await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
|
||||
// Read first to relieve backpressure.
|
||||
readInReadRealm(objId);
|
||||
// Write an unfinished sequence of bytes.
|
||||
const incompleteBytesId = id();
|
||||
writeRealm[incompleteBytesId] = new Uint8Array([0xf0]);
|
||||
// promise_rejects() does not permit directly inspecting the rejection, so
|
||||
// it's necessary to write it out long-hand.
|
||||
let closeSucceeded = false;
|
||||
try {
|
||||
// Can't use writeInWriteRealm() here because it doesn't make it possible
|
||||
// to reuse the writer.
|
||||
await evalInRealmAndReturn(writeRealm, `
|
||||
(() => {
|
||||
const writer = window.${objId}.writable.getWriter();
|
||||
parent.writeMethod.call(writer, window.${incompleteBytesId});
|
||||
return parent.methodRealm.WritableStreamDefaultWriter.prototype
|
||||
.close.call(writer);
|
||||
})();
|
||||
`);
|
||||
closeSucceeded = true;
|
||||
} catch (err) {
|
||||
assert_equals(err.constructor, constructorRealm.TypeError,
|
||||
'close TypeError should come from constructor realm');
|
||||
}
|
||||
assert_false(closeSucceeded, 'close should fail');
|
||||
}, 'TypeError for incomplete input should come from constructor realm ' +
|
||||
'of TextDecoderStream');
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
'use strict';
|
||||
|
||||
function readableStreamFromArray(array) {
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
for (let entry of array) {
|
||||
controller.enqueue(entry);
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
'use strict';
|
||||
|
||||
function readableStreamToArray(stream) {
|
||||
var array = [];
|
||||
var writable = new WritableStream({
|
||||
write(chunk) {
|
||||
array.push(chunk);
|
||||
}
|
||||
});
|
||||
return stream.pipeTo(writable).then(() => array);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue