mirror of
https://github.com/servo/servo.git
synced 2025-08-03 20:50:07 +01:00
Update web-platform-tests to revision b'b728032f59a396243864b0f8584e7211e3632005'
This commit is contained in:
parent
ace9b32b1c
commit
df68c4e5d1
15632 changed files with 514865 additions and 155000 deletions
|
@ -324,3 +324,47 @@ promise_test(async t => {
|
|||
|
||||
assert_equals(outputs, 1, 'outputs');
|
||||
}, 'Test reset during flush');
|
||||
|
||||
promise_test(async t => {
|
||||
const callbacks = {};
|
||||
const decoder = createAudioDecoder(t, callbacks);
|
||||
|
||||
// No decodes yet.
|
||||
assert_equals(decoder.decodeQueueSize, 0);
|
||||
|
||||
decoder.configure(CONFIG);
|
||||
|
||||
// Still no decodes.
|
||||
assert_equals(decoder.decodeQueueSize, 0);
|
||||
|
||||
let lastDequeueSize = Infinity;
|
||||
decoder.ondequeue = () => {
|
||||
assert_greater_than(lastDequeueSize, 0, "Dequeue event after queue empty");
|
||||
assert_greater_than(lastDequeueSize, decoder.decodeQueueSize,
|
||||
"Dequeue event without decreased queue size");
|
||||
lastDequeueSize = decoder.decodeQueueSize;
|
||||
};
|
||||
|
||||
for (let chunk of CHUNKS)
|
||||
decoder.decode(chunk);
|
||||
|
||||
assert_greater_than_equal(decoder.decodeQueueSize, 0);
|
||||
assert_less_than_equal(decoder.decodeQueueSize, CHUNKS.length);
|
||||
|
||||
await decoder.flush();
|
||||
// We can guarantee that all decodes are processed after a flush.
|
||||
assert_equals(decoder.decodeQueueSize, 0);
|
||||
// Last dequeue event should fire when the queue is empty.
|
||||
assert_equals(lastDequeueSize, 0);
|
||||
|
||||
// Reset this to Infinity to track the decline of queue size for this next
|
||||
// batch of decodes.
|
||||
lastDequeueSize = Infinity;
|
||||
|
||||
for (let chunk of CHUNKS)
|
||||
decoder.decode(chunk);
|
||||
|
||||
assert_greater_than_equal(decoder.decodeQueueSize, 0);
|
||||
decoder.reset();
|
||||
assert_equals(decoder.decodeQueueSize, 0);
|
||||
}, 'AudioDecoder decodeQueueSize test');
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue