mirror of
https://github.com/servo/servo.git
synced 2025-08-22 22:05:32 +01:00
Update web-platform-tests to revision 2b80e6d28f3c1ca734384ebded282bf07df80657
This commit is contained in:
parent
32eb858a6a
commit
aff72973cf
379 changed files with 13969 additions and 2161 deletions
|
@ -0,0 +1,62 @@
|
|||
// META: global=worker
|
||||
|
||||
'use strict';
|
||||
|
||||
const badChunks = [
|
||||
{
|
||||
name: 'undefined',
|
||||
value: undefined
|
||||
},
|
||||
{
|
||||
name: 'null',
|
||||
value: null
|
||||
},
|
||||
{
|
||||
name: 'numeric',
|
||||
value: 3.14
|
||||
},
|
||||
{
|
||||
name: 'object, not BufferSource',
|
||||
value: {}
|
||||
},
|
||||
{
|
||||
name: 'array',
|
||||
value: [65]
|
||||
},
|
||||
{
|
||||
name: 'SharedArrayBuffer',
|
||||
// Use a getter to postpone construction so that all tests don't fail where
|
||||
// SharedArrayBuffer is not yet implemented.
|
||||
get value() {
|
||||
return new SharedArrayBuffer();
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'shared Uint8Array',
|
||||
get value() {
|
||||
return new Uint8Array(new SharedArrayBuffer())
|
||||
}
|
||||
},
|
||||
];
|
||||
|
||||
for (const chunk of badChunks) {
|
||||
promise_test(async t => {
|
||||
const cs = new CompressionStream('gzip');
|
||||
const reader = cs.readable.getReader();
|
||||
const writer = cs.writable.getWriter();
|
||||
const writePromise = writer.write(chunk.value);
|
||||
const readPromise = reader.read();
|
||||
await promise_rejects(t, new TypeError(), writePromise, 'write should reject');
|
||||
await promise_rejects(t, new TypeError(), readPromise, 'read should reject');
|
||||
}, `chunk of type ${chunk.name} should error the stream for gzip`);
|
||||
|
||||
promise_test(async t => {
|
||||
const cs = new CompressionStream('deflate');
|
||||
const reader = cs.readable.getReader();
|
||||
const writer = cs.writable.getWriter();
|
||||
const writePromise = writer.write(chunk.value);
|
||||
const readPromise = reader.read();
|
||||
await promise_rejects(t, new TypeError(), writePromise, 'write should reject');
|
||||
await promise_rejects(t, new TypeError(), readPromise, 'read should reject');
|
||||
}, `chunk of type ${chunk.name} should error the stream for deflate`);
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
// META: global=worker
|
||||
// META: script=pako/pako_inflate.min.js
|
||||
// META: timeout=long
|
||||
|
||||
'use strict';
|
||||
|
||||
// This test asserts that compressing '' doesn't affect the compressed data.
|
||||
// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello'
|
||||
|
||||
async function compressChunkList(chunkList, format) {
|
||||
const cs = new CompressionStream(format);
|
||||
const writer = cs.writable.getWriter();
|
||||
for (const chunk of chunkList) {
|
||||
const chunkByte = new TextEncoder().encode(chunk);
|
||||
writer.write(chunkByte);
|
||||
}
|
||||
const closePromise = writer.close();
|
||||
const out = [];
|
||||
const reader = cs.readable.getReader();
|
||||
let totalSize = 0;
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done)
|
||||
break;
|
||||
out.push(value);
|
||||
totalSize += value.byteLength;
|
||||
}
|
||||
await closePromise;
|
||||
const concatenated = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const array of out) {
|
||||
concatenated.set(array, offset);
|
||||
offset += array.byteLength;
|
||||
}
|
||||
return concatenated;
|
||||
}
|
||||
|
||||
const chunkLists = [
|
||||
['', 'Hello', 'Hello'],
|
||||
['Hello', '', 'Hello'],
|
||||
['Hello', 'Hello', '']
|
||||
];
|
||||
const expectedValue = new TextEncoder().encode('HelloHello');
|
||||
|
||||
for (const chunkList of chunkLists) {
|
||||
promise_test(async t => {
|
||||
const compressedData = await compressChunkList(chunkList, 'deflate');
|
||||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
|
||||
}, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`);
|
||||
|
||||
promise_test(async t => {
|
||||
const compressedData = await compressChunkList(chunkList, 'gzip');
|
||||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
|
||||
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
// META: global=worker
|
||||
// META: script=pako/pako_inflate.min.js
|
||||
// META: timeout=long
|
||||
|
||||
'use strict';
|
||||
|
||||
// This test asserts that compressing multiple chunks should work.
|
||||
|
||||
// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello')
|
||||
function makeExpectedChunk(input, numberOfChunks) {
|
||||
const expectedChunk = input.repeat(numberOfChunks);
|
||||
return new TextEncoder().encode(expectedChunk);
|
||||
}
|
||||
|
||||
// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello']
|
||||
async function compressMultipleChunks(input, numberOfChunks, format) {
|
||||
const cs = new CompressionStream(format);
|
||||
const writer = cs.writable.getWriter();
|
||||
const chunk = new TextEncoder().encode(input);
|
||||
for (let i = 0; i < numberOfChunks; ++i) {
|
||||
writer.write(chunk);
|
||||
}
|
||||
const closePromise = writer.close();
|
||||
const out = [];
|
||||
const reader = cs.readable.getReader();
|
||||
let totalSize = 0;
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done)
|
||||
break;
|
||||
out.push(value);
|
||||
totalSize += value.byteLength;
|
||||
}
|
||||
await closePromise;
|
||||
const concatenated = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const array of out) {
|
||||
concatenated.set(array, offset);
|
||||
offset += array.byteLength;
|
||||
}
|
||||
return concatenated;
|
||||
}
|
||||
|
||||
const hello = 'Hello';
|
||||
|
||||
for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
|
||||
promise_test(async t => {
|
||||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate');
|
||||
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
|
||||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
|
||||
}, `compressing ${numberOfChunks} chunks with deflate should work`);
|
||||
|
||||
promise_test(async t => {
|
||||
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip');
|
||||
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
|
||||
// decompress with pako, and check that we got the same result as our original string
|
||||
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
|
||||
}, `compressing ${numberOfChunks} chunks with gzip should work`);
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
// META: global=worker
|
||||
|
||||
'use strict';
|
||||
|
||||
// This test asserts that compressed data length is shorter than the original
|
||||
// data length. If the input is extremely small, the compressed data may be
|
||||
// larger than the original data.
|
||||
|
||||
const LARGE_FILE = '/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm';
|
||||
|
||||
async function compressArrayBuffer(input, format) {
|
||||
const cs = new CompressionStream(format);
|
||||
const writer = cs.writable.getWriter();
|
||||
writer.write(input);
|
||||
const closePromise = writer.close();
|
||||
const out = [];
|
||||
const reader = cs.readable.getReader();
|
||||
let totalSize = 0;
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done)
|
||||
break;
|
||||
out.push(value);
|
||||
totalSize += value.byteLength;
|
||||
}
|
||||
await closePromise;
|
||||
const concatenated = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const array of out) {
|
||||
concatenated.set(array, offset);
|
||||
offset += array.byteLength;
|
||||
}
|
||||
return concatenated;
|
||||
}
|
||||
|
||||
promise_test(async () => {
|
||||
const response = await fetch(LARGE_FILE);
|
||||
const buffer = await response.arrayBuffer();
|
||||
const bufferView = new Uint8Array(buffer);
|
||||
const originalLength = bufferView.length;
|
||||
const compressedData = await compressArrayBuffer(bufferView, 'deflate');
|
||||
const compressedLength = compressedData.length;
|
||||
assert_less_than(compressedLength, originalLength, 'output should be smaller');
|
||||
}, 'the length of deflated data should be shorter than that of the original data');
|
||||
|
||||
promise_test(async () => {
|
||||
const response = await fetch(LARGE_FILE);
|
||||
const buffer = await response.arrayBuffer();
|
||||
const bufferView = new Uint8Array(buffer);
|
||||
const originalLength = bufferView.length;
|
||||
const compressedData = await compressArrayBuffer(bufferView, 'gzip');
|
||||
const compressedLength = compressedData.length;
|
||||
assert_less_than(compressedLength, originalLength, 'output should be smaller');
|
||||
}, 'the length of gzipped data should be shorter than that of the original data');
|
Loading…
Add table
Add a link
Reference in a new issue