Update web-platform-tests to revision 44702f2bc8ea98bc32b5b244f2fe63c6ce66d49d

This commit is contained in:
Josh Matthews 2017-11-15 12:15:13 -05:00
parent 85fa6409bb
commit c227604a2c
997 changed files with 45660 additions and 14650 deletions

View file

@ -21,3 +21,198 @@ function trimEmptyElements(array) {
}
return array.subarray(start, end);
}
function fuzzyCompare(a, b) {
return Math.abs(a - b) < 9e-3;
}
function compareChannels(buf1, buf2,
/*optional*/ length,
/*optional*/ sourceOffset,
/*optional*/ destOffset,
/*optional*/ skipLengthCheck) {
if (!skipLengthCheck) {
assert_equals(buf1.length, buf2.length, "Channels must have the same length");
}
sourceOffset = sourceOffset || 0;
destOffset = destOffset || 0;
if (length == undefined) {
length = buf1.length - sourceOffset;
}
var difference = 0;
var maxDifference = 0;
var firstBadIndex = -1;
for (var i = 0; i < length; ++i) {
if (!fuzzyCompare(buf1[i + sourceOffset], buf2[i + destOffset])) {
difference++;
maxDifference = Math.max(maxDifference, Math.abs(buf1[i + sourceOffset] - buf2[i + destOffset]));
if (firstBadIndex == -1) {
firstBadIndex = i;
}
}
};
assert_equals(difference, 0, "maxDifference: " + maxDifference +
", first bad index: " + firstBadIndex + " with test-data offset " +
sourceOffset + " and expected-data offset " + destOffset +
"; corresponding values " + buf1[firstBadIndex + sourceOffset] + " and " +
buf2[firstBadIndex + destOffset] + " --- differences");
}
function compareBuffers(got, expected) {
if (got.numberOfChannels != expected.numberOfChannels) {
assert_equals(got.numberOfChannels, expected.numberOfChannels,
"Correct number of buffer channels");
return;
}
if (got.length != expected.length) {
assert_equals(got.length, expected.length,
"Correct buffer length");
return;
}
if (got.sampleRate != expected.sampleRate) {
assert_equals(got.sampleRate, expected.sampleRate,
"Correct sample rate");
return;
}
for (var i = 0; i < got.numberOfChannels; ++i) {
compareChannels(got.getChannelData(i), expected.getChannelData(i),
got.length, 0, 0, true);
}
}
/**
* This function assumes that the test is a "single page test" [0], and defines a
* single gTest variable with the following properties and methods:
*
* + numberOfChannels: optional property which specifies the number of channels
* in the output. The default value is 2.
* + createGraph: mandatory method which takes a context object and does
* everything needed in order to set up the Web Audio graph.
* This function returns the node to be inspected.
* + createGraphAsync: async version of createGraph. This function takes
* a callback which should be called with an argument
* set to the node to be inspected when the callee is
* ready to proceed with the test. Either this function
* or createGraph must be provided.
* + createExpectedBuffers: optional method which takes a context object and
* returns either one expected buffer or an array of
* them, designating what is expected to be observed
* in the output. If omitted, the output is expected
* to be silence. All buffers must have the same
* length, which must be a bufferSize supported by
* ScriptProcessorNode. This function is guaranteed
* to be called before createGraph.
* + length: property equal to the total number of frames which we are waiting
* to see in the output, mandatory if createExpectedBuffers is not
* provided, in which case it must be a bufferSize supported by
* ScriptProcessorNode (256, 512, 1024, 2048, 4096, 8192, or 16384).
* If createExpectedBuffers is provided then this must be equal to
* the number of expected buffers * the expected buffer length.
*
* + skipOfflineContextTests: optional. when true, skips running tests on an offline
* context by circumventing testOnOfflineContext.
*
* [0]: http://web-platform-tests.org/writing-tests/testharness-api.html#single-page-tests
*/
function runTest(name)
{
function runTestFunction () {
if (!gTest.numberOfChannels) {
gTest.numberOfChannels = 2; // default
}
var testLength;
function runTestOnContext(context, callback, testOutput) {
if (!gTest.createExpectedBuffers) {
// Assume that the output is silence
var expectedBuffers = getEmptyBuffer(context, gTest.length);
} else {
var expectedBuffers = gTest.createExpectedBuffers(context);
}
if (!(expectedBuffers instanceof Array)) {
expectedBuffers = [expectedBuffers];
}
var expectedFrames = 0;
for (var i = 0; i < expectedBuffers.length; ++i) {
assert_equals(expectedBuffers[i].numberOfChannels, gTest.numberOfChannels,
"Correct number of channels for expected buffer " + i);
expectedFrames += expectedBuffers[i].length;
}
if (gTest.length && gTest.createExpectedBuffers) {
assert_equals(expectedFrames,
gTest.length, "Correct number of expected frames");
}
if (gTest.createGraphAsync) {
gTest.createGraphAsync(context, function(nodeToInspect) {
testOutput(nodeToInspect, expectedBuffers, callback);
});
} else {
testOutput(gTest.createGraph(context), expectedBuffers, callback);
}
}
function testOnNormalContext(callback) {
function testOutput(nodeToInspect, expectedBuffers, callback) {
testLength = 0;
var sp = context.createScriptProcessor(expectedBuffers[0].length, gTest.numberOfChannels, 0);
nodeToInspect.connect(sp);
sp.onaudioprocess = function(e) {
var expectedBuffer = expectedBuffers.shift();
testLength += expectedBuffer.length;
compareBuffers(e.inputBuffer, expectedBuffer);
if (expectedBuffers.length == 0) {
sp.onaudioprocess = null;
callback();
}
};
}
var context = new AudioContext();
runTestOnContext(context, callback, testOutput);
}
function testOnOfflineContext(callback, sampleRate) {
function testOutput(nodeToInspect, expectedBuffers, callback) {
nodeToInspect.connect(context.destination);
context.oncomplete = function(e) {
var samplesSeen = 0;
while (expectedBuffers.length) {
var expectedBuffer = expectedBuffers.shift();
assert_equals(e.renderedBuffer.numberOfChannels, expectedBuffer.numberOfChannels,
"Correct number of input buffer channels");
for (var i = 0; i < e.renderedBuffer.numberOfChannels; ++i) {
compareChannels(e.renderedBuffer.getChannelData(i),
expectedBuffer.getChannelData(i),
expectedBuffer.length,
samplesSeen,
undefined,
true);
}
samplesSeen += expectedBuffer.length;
}
callback();
};
context.startRendering();
}
var context = new OfflineAudioContext(gTest.numberOfChannels, testLength, sampleRate);
runTestOnContext(context, callback, testOutput);
}
testOnNormalContext(function() {
if (!gTest.skipOfflineContextTests) {
testOnOfflineContext(function() {
testOnOfflineContext(done, 44100);
}, 48000);
} else {
done();
}
});
};
runTestFunction();
}

File diff suppressed because it is too large Load diff

View file

@ -69,7 +69,13 @@ window.Audit = (function() {
String(target) :
String(target.slice(0, options.numberOfArrayElements)) + '...';
targetString = '[' + arrayElements + ']';
} else if (target === null) {
// null is an object, so we need to handle this specially.
targetString = String(target);
} else {
// We're expecting String() to return something like "[object Foo]",
// so we split the string to get the object type "Foo". This is
// pretty fragile.
targetString = '' + String(targetString).split(/[\s\]]/)[1];
}
break;
@ -167,10 +173,8 @@ window.Audit = (function() {
// If there is a second operand (i.e. expected value), we have to build
// the string for it as well.
if (this._expected !== null) {
this._detail =
this._detail.replace(/\$\{expected\}/g, this._expectedDescription);
}
this._detail =
this._detail.replace(/\$\{expected\}/g, this._expectedDescription);
// If there is any property in |_options|, replace the property name
// with the value.

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,50 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script>
promise_test(function() {
// fftSize <= bufferSize so that the time domain data is full of input after
// processing the buffer.
const fftSize = 32;
const bufferSize = 128;
var context = new OfflineAudioContext(1, bufferSize, 48000);
var analyser1 = context.createAnalyser();
analyser1.fftSize = fftSize;
analyser1.connect(context.destination);
var analyser2 = context.createAnalyser();
analyser2.fftSize = fftSize;
var gain = context.createGain();
gain.gain.value = 2.0;
gain.connect(analyser1);
gain.connect(analyser2);
// Create a DC input to make getFloatTimeDomainData() output consistent at
// any time.
var buffer = context.createBuffer(1, 1, context.sampleRate);
buffer.getChannelData(0)[0] = 1.0 / gain.gain.value;
var source = context.createBufferSource();
source.buffer = buffer;
source.loop = true;
source.connect(gain);
source.start();
return context.startRendering().then(function(buffer) {
assert_equals(buffer.getChannelData(0)[0], 1.0, "analyser1 output");
var data = new Float32Array(1);
analyser1.getFloatTimeDomainData(data);
assert_equals(data[0], 1.0, "analyser1 time domain data");
analyser2.getFloatTimeDomainData(data);
assert_equals(data[0], 1.0, "analyser2 time domain data");
});
}, "Test effect of AnalyserNode on GainNode output");
</script>
</head>
</body>
</html>

View file

@ -0,0 +1,42 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Test AnalyserNode when the input is silent</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script>
var ac = new AudioContext();
var analyser = ac.createAnalyser();
var constant = ac.createConstantSource();
var sp = ac.createScriptProcessor(2048, 1, 0);
constant.offset.value = 0.0;
constant.connect(analyser).connect(ac.destination);
constant.connect(sp);
var buf = new Float32Array(analyser.frequencyBinCount);
var iteration_count = 10;
sp.onaudioprocess = function() {
analyser.getFloatFrequencyData(buf);
var correct = true;
for (var i = 0; i < buf.length; i++) {
correct &= buf[i] == -Infinity;
}
assert_true(!!correct, "silent input process -Infinity in decibel bins");
if (!iteration_count--) {
sp.onaudioprocess = null;
constant.stop();
ac.close();
done();
}
};
constant.start();
</script>
</head>
</body>
</html>

View file

@ -0,0 +1,41 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>AnalyserNode output</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script>
var gTest = {
length: 2048,
numberOfChannels: 1,
createGraph: function(context) {
var source = context.createBufferSource();
var analyser = context.createAnalyser();
source.buffer = this.buffer;
source.connect(analyser);
source.start(0);
return analyser;
},
createExpectedBuffers: function(context) {
this.buffer = context.createBuffer(1, 2048, context.sampleRate);
for (var i = 0; i < 2048; ++i) {
this.buffer.getChannelData(0)[i] = Math.sin(
440 * 2 * Math.PI * i / context.sampleRate
);
}
return [this.buffer];
}
};
runTest("AnalyserNode output");
</script>
</head>
</body>
</html>

View file

@ -0,0 +1,49 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Test AnalyserNode when the input is scaled</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script>
var context = new AudioContext();
var gain = context.createGain();
var analyser = context.createAnalyser();
var osc = context.createOscillator();
osc.connect(gain);
gain.connect(analyser);
osc.start();
var array = new Uint8Array(analyser.frequencyBinCount);
function getAnalyserData() {
gain.gain.setValueAtTime(currentGain, context.currentTime);
analyser.getByteTimeDomainData(array);
var inrange = true;
var max = -1;
for (var i = 0; i < array.length; i++) {
if (array[i] > max) {
max = Math.abs(array[i] - 128);
}
}
if (max <= currentGain * 128) {
assert_true(true, "Analyser got scaled data for " + currentGain);
currentGain = tests.shift();
if (currentGain == undefined) {
done();
return;
}
}
requestAnimationFrame(getAnalyserData);
}
var tests = [1.0, 0.5, 0.0];
var currentGain = tests.shift();
requestAnimationFrame(getAnalyserData);
</script>
</head>
</body>
</html>

View file

@ -0,0 +1,237 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script>
function testNode() {
var context = new AudioContext();
var buffer = context.createBuffer(1, 2048, context.sampleRate);
for (var i = 0; i < 2048; ++i) {
buffer.getChannelData(0)[i] = Math.sin(
440 * 2 * Math.PI * i / context.sampleRate
);
}
var destination = context.destination;
var source = context.createBufferSource();
var analyser = context.createAnalyser();
source.buffer = buffer;
source.connect(analyser);
analyser.connect(destination);
assert_equals(
analyser.channelCount,
1,
"analyser node has 1 input channels by default"
);
assert_equals(
analyser.channelCountMode,
"max",
"Correct channelCountMode for the analyser node"
);
assert_equals(
analyser.channelInterpretation,
"speakers",
"Correct channelCountInterpretation for the analyser node"
);
assert_equals(
analyser.fftSize,
2048,
"Correct default value for fftSize"
);
assert_equals(
analyser.frequencyBinCount,
1024,
"Correct default value for frequencyBinCount"
);
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 0;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 1;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 8;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 100;
}); // non-power of two
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 2049;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 4097;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 8193;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 16385;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 32769;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.fftSize = 65536;
});
analyser.fftSize = 1024;
assert_equals(
analyser.frequencyBinCount,
512,
"Correct new value for frequencyBinCount"
);
assert_equals(
analyser.minDecibels,
-100,
"Correct default value for minDecibels"
);
assert_equals(
analyser.maxDecibels,
-30,
"Correct default value for maxDecibels"
);
assert_throws("INDEX_SIZE_ERR", function() {
analyser.minDecibels = -30;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.minDecibels = -29;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.maxDecibels = -100;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.maxDecibels = -101;
});
assert_true(
Math.abs(analyser.smoothingTimeConstant - 0.8) < 0.001,
"Correct default value for smoothingTimeConstant"
);
assert_throws("INDEX_SIZE_ERR", function() {
analyser.smoothingTimeConstant = -0.1;
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser.smoothingTimeConstant = 1.1;
});
analyser.smoothingTimeConstant = 0;
analyser.smoothingTimeConstant = 1;
}
function testConstructor() {
var context = new AudioContext();
var analyser = new AnalyserNode(context);
assert_equals(
analyser.channelCount,
1,
"analyser node has 1 input channels by default"
);
assert_equals(
analyser.channelCountMode,
"max",
"Correct channelCountMode for the analyser node"
);
assert_equals(
analyser.channelInterpretation,
"speakers",
"Correct channelCountInterpretation for the analyser node"
);
assert_equals(
analyser.fftSize,
2048,
"Correct default value for fftSize"
);
assert_equals(
analyser.frequencyBinCount,
1024,
"Correct default value for frequencyBinCount"
);
assert_equals(
analyser.minDecibels,
-100,
"Correct default value for minDecibels"
);
assert_equals(
analyser.maxDecibels,
-30,
"Correct default value for maxDecibels"
);
assert_true(
Math.abs(analyser.smoothingTimeConstant - 0.8) < 0.001,
"Correct default value for smoothingTimeConstant"
);
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 0 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 1 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 8 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 100 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 2049 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 4097 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 8193 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 16385 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 32769 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { fftSize: 65536 });
});
analyser = new AnalyserNode(context, { fftSize: 1024 });
assert_equals(
analyser.frequencyBinCount,
512,
"Correct new value for frequencyBinCount"
);
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { minDecibels: -30 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { minDecibels: -29 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { maxDecibels: -100 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { maxDecibels: -101 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { smoothingTimeConstant: -0.1 });
});
assert_throws("INDEX_SIZE_ERR", function() {
analyser = new AnalyserNode(context, { smoothingTimeConstant: -1.1 });
});
analyser = new AnalyserNode(context, { smoothingTimeConstant: 0 });
analyser = new AnalyserNode(context, { smoothingTimeConstant: 1 });
}
test(testNode, "Test AnalyserNode API");
test(testConstructor, "Test AnalyserNode's ctor API");
</script>
</head>
</body>
</html>

View file

@ -6,7 +6,6 @@
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/idlharness.js"></script>
<script src="/resources/WebIDLParser.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<style type="text/css">
#event-target-idl,

View file

@ -6,7 +6,6 @@
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/idlharness.js"></script>
<script src="/resources/WebIDLParser.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<style type="text/css">
#event-target-idl,

View file

@ -6,7 +6,6 @@
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/idlharness.js"></script>
<script src="/resources/WebIDLParser.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<style type="text/css">
#audio-param-idl

View file

@ -6,9 +6,9 @@
</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="../resources/audit-util.js"></script>
<script src="../resources/audit.js"></script>
<script src="../resources/start-stop-exceptions.js"></script>
<script src="../../resources/audit-util.js"></script>
<script src="../../resources/audit.js"></script>
<script src="../../resources/start-stop-exceptions.js"></script>
</head>
<body>
<script id="layout-test-code">

View file

@ -6,9 +6,9 @@
</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="../resources/audit-util.js"></script>
<script src="../resources/audit.js"></script>
<script src="../resources/audioparam-testing.js"></script>
<script src="../../resources/audit-util.js"></script>
<script src="../../resources/audit.js"></script>
<script src="../../resources/audioparam-testing.js"></script>
</head>
<body>
<script id="layout-test-code">

View file

@ -6,7 +6,6 @@
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/idlharness.js"></script>
<script src="/resources/WebIDLParser.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<style type="text/css">
#event-target-idl,
@ -33,7 +32,7 @@ callback interface EventListener {
interface EventListener {};
</pre>
<pre id="base-audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
<pre id="base-audio-context-idl">callback DecodeErrorCallback = void (DOMException error);
callback DecodeSuccessCallback = void (AudioBuffer decodedData);

View file

@ -6,7 +6,6 @@
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/idlharness.js"></script>
<script src="/resources/WebIDLParser.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<style type="text/css">
#event-target-idl,
@ -33,7 +32,7 @@ callback interface EventListener {
interface EventListener {};
</pre>
<pre id="base-audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
<pre id="base-audio-context-idl">callback DecodeErrorCallback = void (DOMException error);
callback DecodeSuccessCallback = void (AudioBuffer decodedData);
interface BaseAudioContext : EventTarget {

View file

@ -14,7 +14,6 @@ Based on a test from the WebKit test suite
<title>GainNode interface</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script src="/webaudio/js/buffer-loader.js"></script>
</head>

View file

@ -16,7 +16,6 @@ Somewhat similiar to a test from Mozilla:
<title>MediaElementAudioSource interface test (to scriptProcessor)</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script src="/webaudio/js/buffer-loader.js"></script>
</head>