mirror of
https://github.com/servo/servo.git
synced 2025-08-08 15:05:35 +01:00
Update web-platform-tests to revision b'b728032f59a396243864b0f8584e7211e3632005'
This commit is contained in:
parent
ace9b32b1c
commit
df68c4e5d1
15632 changed files with 514865 additions and 155000 deletions
|
@ -61,4 +61,14 @@ for (const chunk of badChunks) {
|
|||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
|
||||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
|
||||
}, `chunk of type ${chunk.name} should error the stream for deflate`);
|
||||
|
||||
promise_test(async t => {
|
||||
const cs = new CompressionStream('deflate-raw');
|
||||
const reader = cs.readable.getReader();
|
||||
const writer = cs.writable.getWriter();
|
||||
const writePromise = writer.write(chunk.value);
|
||||
const readPromise = reader.read();
|
||||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
|
||||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
|
||||
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
// META: global=window,worker
|
||||
|
||||
'use strict';
|
||||
|
||||
test(t => {
|
||||
assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw');
|
||||
}, '"a" should cause the constructor to throw');
|
||||
|
||||
test(t => {
|
||||
assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw');
|
||||
}, 'no input should cause the constructor to throw');
|
||||
|
||||
test(t => {
|
||||
assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw');
|
||||
}, 'non-string input should cause the constructor to throw');
|
|
@ -54,4 +54,10 @@ for (const chunkList of chunkLists) {
|
|||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
|
||||
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);
|
||||
|
||||
promise_test(async t => {
|
||||
const compressedData = await compressChunkList(chunkList, 'deflate-raw');
|
||||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
|
||||
}, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`);
|
||||
}
|
||||
|
|
|
@ -57,4 +57,11 @@ for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
|
|||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
|
||||
}, `compressing ${numberOfChunks} chunks with gzip should work`);
|
||||
|
||||
promise_test(async t => {
|
||||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw');
|
||||
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
|
||||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
|
||||
}, `compressing ${numberOfChunks} chunks with deflate-raw should work`);
|
||||
}
|
||||
|
|
|
@ -52,3 +52,13 @@ promise_test(async () => {
|
|||
const compressedLength = compressedData.length;
|
||||
assert_less_than(compressedLength, originalLength, 'output should be smaller');
|
||||
}, 'the length of gzipped data should be shorter than that of the original data');
|
||||
|
||||
promise_test(async () => {
|
||||
const response = await fetch(LARGE_FILE);
|
||||
const buffer = await response.arrayBuffer();
|
||||
const bufferView = new Uint8Array(buffer);
|
||||
const originalLength = bufferView.length;
|
||||
const compressedData = await compressArrayBuffer(bufferView, 'deflate-raw');
|
||||
const compressedLength = compressedData.length;
|
||||
assert_less_than(compressedLength, originalLength, 'output should be smaller');
|
||||
}, 'the length of deflated (with -raw) data should be shorter than that of the original data');
|
||||
|
|
|
@ -49,24 +49,37 @@ const badChunks = [
|
|||
},
|
||||
];
|
||||
|
||||
for (const chunk of badChunks) {
|
||||
promise_test(async t => {
|
||||
const ds = new DecompressionStream('gzip');
|
||||
// Test Case Design
|
||||
// We need to wait until after we close the writable stream to check if the decoded stream is valid.
|
||||
// We can end up in a state where all reads/writes are valid, but upon closing the writable stream an error is detected.
|
||||
// (Example: A zlib encoded chunk w/o the checksum).
|
||||
|
||||
async function decompress(chunk, format, t)
|
||||
{
|
||||
const ds = new DecompressionStream(format);
|
||||
const reader = ds.readable.getReader();
|
||||
const writer = ds.writable.getWriter();
|
||||
const writePromise = writer.write(chunk.value);
|
||||
const readPromise = reader.read();
|
||||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
|
||||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
|
||||
|
||||
writer.write(chunk.value).then(() => {}, () => {});
|
||||
reader.read().then(() => {}, () => {});
|
||||
|
||||
await promise_rejects_js(t, TypeError, writer.close(), 'writer.close() should reject');
|
||||
await promise_rejects_js(t, TypeError, writer.closed, 'write.closed should reject');
|
||||
|
||||
await promise_rejects_js(t, TypeError, reader.read(), 'reader.read() should reject');
|
||||
await promise_rejects_js(t, TypeError, reader.closed, 'read.closed should reject');
|
||||
}
|
||||
|
||||
for (const chunk of badChunks) {
|
||||
promise_test(async t => {
|
||||
await decompress(chunk, 'gzip', t);
|
||||
}, `chunk of type ${chunk.name} should error the stream for gzip`);
|
||||
|
||||
promise_test(async t => {
|
||||
const ds = new DecompressionStream('deflate');
|
||||
const reader = ds.readable.getReader();
|
||||
const writer = ds.writable.getWriter();
|
||||
const writePromise = writer.write(chunk.value);
|
||||
const readPromise = reader.read();
|
||||
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
|
||||
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
|
||||
await decompress(chunk, 'deflate', t);
|
||||
}, `chunk of type ${chunk.name} should error the stream for deflate`);
|
||||
|
||||
promise_test(async t => {
|
||||
await decompress(chunk, 'deflate-raw', t);
|
||||
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
|
||||
}
|
||||
|
|
|
@ -4,10 +4,15 @@
|
|||
|
||||
const compressedBytesWithDeflate = [120, 156, 75, 52, 48, 52, 50, 54, 49, 53, 3, 0, 8, 136, 1, 199];
|
||||
const compressedBytesWithGzip = [31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 52, 48, 52, 2, 0, 216, 252, 63, 136, 4, 0, 0, 0];
|
||||
// Two chunk values below were chosen to make the length of the compressed
|
||||
const compressedBytesWithDeflateRaw = [
|
||||
0x00, 0x06, 0x00, 0xf9, 0xff, 0x41, 0x42, 0x43,
|
||||
0x44, 0x45, 0x46, 0x01, 0x00, 0x00, 0xff, 0xff,
|
||||
];
|
||||
// These chunk values below were chosen to make the length of the compressed
|
||||
// output be a multiple of 8 bytes.
|
||||
const deflateExpectedChunkValue = new TextEncoder().encode('a0123456');
|
||||
const gzipExpectedChunkValue = new TextEncoder().encode('a012');
|
||||
const deflateRawExpectedChunkValue = new TextEncoder().encode('ABCDEF');
|
||||
|
||||
const bufferSourceChunksForDeflate = [
|
||||
{
|
||||
|
@ -103,6 +108,53 @@ const bufferSourceChunksForGzip = [
|
|||
},
|
||||
];
|
||||
|
||||
const bufferSourceChunksForDeflateRaw = [
|
||||
{
|
||||
name: 'ArrayBuffer',
|
||||
value: new Uint8Array(compressedBytesWithDeflateRaw).buffer
|
||||
},
|
||||
{
|
||||
name: 'Int8Array',
|
||||
value: new Int8Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Uint8Array',
|
||||
value: new Uint8Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Uint8ClampedArray',
|
||||
value: new Uint8ClampedArray(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Int16Array',
|
||||
value: new Int16Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Uint16Array',
|
||||
value: new Uint16Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Int32Array',
|
||||
value: new Int32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Uint32Array',
|
||||
value: new Uint32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Float32Array',
|
||||
value: new Float32Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'Float64Array',
|
||||
value: new Float64Array(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
{
|
||||
name: 'DataView',
|
||||
value: new DataView(new Uint8Array(compressedBytesWithDeflateRaw).buffer)
|
||||
},
|
||||
];
|
||||
|
||||
for (const chunk of bufferSourceChunksForDeflate) {
|
||||
promise_test(async t => {
|
||||
const ds = new DecompressionStream('deflate');
|
||||
|
@ -126,3 +178,15 @@ for (const chunk of bufferSourceChunksForGzip) {
|
|||
assert_array_equals(Array.from(value), gzipExpectedChunkValue, 'value should match');
|
||||
}, `chunk of type ${chunk.name} should work for gzip`);
|
||||
}
|
||||
|
||||
for (const chunk of bufferSourceChunksForDeflateRaw) {
|
||||
promise_test(async t => {
|
||||
const ds = new DecompressionStream('deflate-raw');
|
||||
const reader = ds.readable.getReader();
|
||||
const writer = ds.writable.getWriter();
|
||||
const writePromise = writer.write(chunk.value);
|
||||
writer.close();
|
||||
const { value } = await reader.read();
|
||||
assert_array_equals(Array.from(value), deflateRawExpectedChunkValue, 'value should match');
|
||||
}, `chunk of type ${chunk.name} should work for deflate-raw`);
|
||||
}
|
||||
|
|
|
@ -4,6 +4,10 @@
|
|||
|
||||
const deflateChunkValue = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]);
|
||||
const gzipChunkValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]);
|
||||
const deflateRawChunkValue = new Uint8Array([
|
||||
0x4b, 0xad, 0x28, 0x48, 0x4d, 0x2e, 0x49, 0x4d, 0x51, 0xc8,
|
||||
0x2f, 0x2d, 0x29, 0x28, 0x2d, 0x01, 0x00,
|
||||
]);
|
||||
const trueChunkValue = new TextEncoder().encode('expected output');
|
||||
|
||||
promise_test(async t => {
|
||||
|
@ -24,3 +28,12 @@ promise_test(async t => {
|
|||
const { done, value } = await reader.read();
|
||||
assert_array_equals(Array.from(value), trueChunkValue, "value should match");
|
||||
}, 'decompressing gzip input should work');
|
||||
|
||||
promise_test(async t => {
|
||||
const ds = new DecompressionStream('deflate-raw');
|
||||
const reader = ds.readable.getReader();
|
||||
const writer = ds.writable.getWriter();
|
||||
const writePromise = writer.write(deflateRawChunkValue);
|
||||
const { done, value } = await reader.read();
|
||||
assert_array_equals(Array.from(value), trueChunkValue, "value should match");
|
||||
}, 'decompressing deflated (with -raw) input should work');
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
const gzipEmptyValue = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
|
||||
const deflateEmptyValue = new Uint8Array([120, 156, 3, 0, 0, 0, 0, 1]);
|
||||
const deflateRawEmptyValue = new Uint8Array([1, 0, 0, 255, 255]);
|
||||
|
||||
promise_test(async t => {
|
||||
const ds = new DecompressionStream('gzip');
|
||||
|
@ -28,3 +29,15 @@ promise_test(async t => {
|
|||
assert_equals(value, undefined, "value should be undefined");
|
||||
await writePromise;
|
||||
}, 'decompressing deflate empty input should work');
|
||||
|
||||
promise_test(async t => {
|
||||
const ds = new DecompressionStream('deflate-raw');
|
||||
const reader = ds.readable.getReader();
|
||||
const writer = ds.writable.getWriter();
|
||||
const writePromise = writer.write(deflateRawEmptyValue);
|
||||
writer.close();
|
||||
const { value, done } = await reader.read();
|
||||
assert_true(done, "read() should set done");
|
||||
assert_equals(value, undefined, "value should be undefined");
|
||||
await writePromise;
|
||||
}, 'decompressing deflate-raw empty input should work');
|
|
@ -4,6 +4,10 @@
|
|||
|
||||
const compressedBytesWithDeflate = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]);
|
||||
const compressedBytesWithGzip = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]);
|
||||
const compressedBytesWithDeflateRaw = new Uint8Array([
|
||||
0x4b, 0xad, 0x28, 0x48, 0x4d, 0x2e, 0x49, 0x4d, 0x51, 0xc8,
|
||||
0x2f, 0x2d, 0x29, 0x28, 0x2d, 0x01, 0x00,
|
||||
]);
|
||||
const expectedChunkValue = new TextEncoder().encode('expected output');
|
||||
|
||||
async function decompressArrayBuffer(input, format, chunkSize) {
|
||||
|
@ -41,4 +45,9 @@ for (let chunkSize = 1; chunkSize < 16; ++chunkSize) {
|
|||
const decompressedData = await decompressArrayBuffer(compressedBytesWithGzip, 'gzip', chunkSize);
|
||||
assert_array_equals(decompressedData, expectedChunkValue, "value should match");
|
||||
}, `decompressing splitted chunk into pieces of size ${chunkSize} should work in gzip`);
|
||||
|
||||
promise_test(async t => {
|
||||
const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflateRaw, 'deflate-raw', chunkSize);
|
||||
assert_array_equals(decompressedData, expectedChunkValue, "value should match");
|
||||
}, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate-raw`);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
// META: script=/resources/idlharness-shadowrealm.js
|
||||
idl_test_shadowrealm(["compression"], ["streams"]);
|
Loading…
Add table
Add a link
Reference in a new issue