mirror of
https://github.com/servo/servo.git
synced 2025-08-03 20:50:07 +01:00
Update web-platform-tests to revision b464d69274950c7707855c0b29729d58b9a8d492
This commit is contained in:
parent
93c31df551
commit
b505991695
92 changed files with 1750 additions and 119 deletions
|
@ -0,0 +1,55 @@
|
|||
// META: global=window,worker
|
||||
// META: script=resources/concatenate-stream.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const kInputLength = 500000;
|
||||
|
||||
function createLargeRandomInput() {
|
||||
const buffer = new ArrayBuffer(kInputLength);
|
||||
// The getRandomValues API will only let us get 65536 bytes at a time, so call
|
||||
// it multiple times.
|
||||
const kChunkSize = 65536;
|
||||
for (let offset = 0; offset < kInputLength; offset += kChunkSize) {
|
||||
const length =
|
||||
offset + kChunkSize > kInputLength ? kInputLength - offset : kChunkSize;
|
||||
const view = new Uint8Array(buffer, offset, length);
|
||||
crypto.getRandomValues(view);
|
||||
}
|
||||
return new Uint8Array(buffer);
|
||||
}
|
||||
|
||||
function decompress(view) {
|
||||
const ds = new DecompressionStream('deflate');
|
||||
const writer = ds.writable.getWriter();
|
||||
writer.write(view);
|
||||
writer.close();
|
||||
return concatenateStream(ds.readable);
|
||||
}
|
||||
|
||||
promise_test(async () => {
|
||||
const input = createLargeRandomInput();
|
||||
const inputCopy = input.slice(0, input.byteLength);
|
||||
const cs = new CompressionStream('deflate');
|
||||
const writer = cs.writable.getWriter();
|
||||
writer.write(input);
|
||||
writer.close();
|
||||
// Object.prototype.then will be looked up synchronously when the promise
|
||||
// returned by read() is resolved.
|
||||
Object.defineProperty(Object.prototype, 'then', {
|
||||
get() {
|
||||
// Cause input to become detached and unreferenced.
|
||||
try {
|
||||
postMessage(undefined, 'nowhere', [input.buffer]);
|
||||
} catch (e) {
|
||||
// It's already detached.
|
||||
}
|
||||
}
|
||||
});
|
||||
const output = await concatenateStream(cs.readable);
|
||||
// Perform the comparison as strings since this is reasonably fast even when
|
||||
// JITted JavaScript is running under an emulator.
|
||||
assert_equals(
|
||||
inputCopy.toString(), (await decompress(output)).toString(),
|
||||
'decompressing the output should return the input');
|
||||
}, 'data should be correctly compressed even if input is detached partway');
|
|
@ -0,0 +1,41 @@
|
|||
// META: global=window,worker
|
||||
// META: script=resources/concatenate-stream.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const kInputLength = 1000000;
|
||||
|
||||
async function createLargeCompressedInput() {
|
||||
const cs = new CompressionStream('deflate');
|
||||
// The input has to be large enough that it won't fit in a single chunk when
|
||||
// decompressed.
|
||||
const writer = cs.writable.getWriter();
|
||||
writer.write(new Uint8Array(kInputLength));
|
||||
writer.close();
|
||||
return concatenateStream(cs.readable);
|
||||
}
|
||||
|
||||
promise_test(async () => {
|
||||
const input = await createLargeCompressedInput();
|
||||
const ds = new DecompressionStream('deflate');
|
||||
const writer = ds.writable.getWriter();
|
||||
writer.write(input);
|
||||
writer.close();
|
||||
// Object.prototype.then will be looked up synchronously when the promise
|
||||
// returned by read() is resolved.
|
||||
Object.defineProperty(Object.prototype, 'then', {
|
||||
get() {
|
||||
// Cause input to become detached and unreferenced.
|
||||
try {
|
||||
postMessage(undefined, 'nowhere', [input.buffer]);
|
||||
} catch (e) {
|
||||
// It's already detached.
|
||||
}
|
||||
}
|
||||
});
|
||||
const output = await concatenateStream(ds.readable);
|
||||
// If output successfully decompressed and gave the right length, we can be
|
||||
// reasonably confident that no data corruption happened.
|
||||
assert_equals(
|
||||
output.byteLength, kInputLength, 'output should be the right length');
|
||||
}, 'data should be correctly decompressed even if input is detached partway');
|
|
@ -0,0 +1,25 @@
|
|||
'use strict';
|
||||
|
||||
// Read all the chunks from a stream that returns BufferSource objects and
|
||||
// concatenate them into a single Uint8Array.
|
||||
async function concatenateStream(readableStream) {
|
||||
const reader = readableStream.getReader();
|
||||
let totalSize = 0;
|
||||
const buffers = [];
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
buffers.push(value);
|
||||
totalSize += value.byteLength;
|
||||
}
|
||||
reader.releaseLock();
|
||||
const concatenated = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const buffer of buffers) {
|
||||
concatenated.set(buffer, offset);
|
||||
offset += buffer.byteLength;
|
||||
}
|
||||
return concatenated;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue