Update web-platform-tests to revision 0d318188757a9c996e20b82db201fd04de5aa255

This commit is contained in:
James Graham 2015-03-27 09:15:38 +00:00
parent b2a5225831
commit 1a81b18b9f
12321 changed files with 544385 additions and 6 deletions

View file

@ -0,0 +1,107 @@
<!DOCTYPE html>
<html class="a">
<head>
<title>AudioBuffer IDL Test</title>
<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/webidl2/lib/webidl2.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
#event-target-idl,
#audio-context-idl
{ visibility:hidden; height: 0px;}
</style>
</head>
<body class="a">
<pre id="event-target-idl">interface EventTarget {
void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
boolean dispatchEvent(Event event);
};
/*
callback interface EventListener {
void handleEvent(Event event);
};
*/
// Callback interfaces are not supported yet, but that's ok
interface EventListener {};
</pre>
<pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
callback DecodeErrorCallback = void ();
[Constructor]
interface AudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
readonly attribute AudioListener listener;
AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
void decodeAudioData(ArrayBuffer audioData,
DecodeSuccessCallback successCallback,
optional DecodeErrorCallback errorCallback);
// AudioNode creation
AudioBufferSourceNode createBufferSource();
MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
MediaStreamAudioDestinationNode createMediaStreamDestination();
ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
optional unsigned long numberOfInputChannels = 2,
optional unsigned long numberOfOutputChannels = 2);
AnalyserNode createAnalyser();
GainNode createGain();
DelayNode createDelay(optional double maxDelayTime = 1.0);
BiquadFilterNode createBiquadFilter();
WaveShaperNode createWaveShaper();
PannerNode createPanner();
ConvolverNode createConvolver();
ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
DynamicsCompressorNode createDynamicsCompressor();
OscillatorNode createOscillator();
PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
};</pre>
<pre id="audio-buffer-idl">interface AudioBuffer {
readonly attribute float sampleRate;
readonly attribute long length;
// in seconds
readonly attribute double duration;
readonly attribute long numberOfChannels;
Float32Array getChannelData(unsigned long channel);
};</pre>
<div id="log"></div>
<script>
(function() {
var idl_array = new IdlArray();
idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
idl_array.add_idls(document.getElementById("audio-buffer-idl").textContent);
// For these tests the value of the arguments is unimportant.
audio_buffer = (new AudioContext).createBuffer(numberOfChannels = 1, length = 256, sampleRate = 44100);
idl_array.add_objects({AudioBuffer: ["audio_buffer"]});
idl_array.test();
})();
</script>
</body>
</html>

View file

@ -0,0 +1,128 @@
<!DOCTYPE html>
<html class="a">
<head>
<title>AudioDestinationNode IDL Test</title>
<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/webidl2/lib/webidl2.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
#event-target-idl,
#audio-context-idl,
#audio-node-idl
{ visibility:hidden; height: 0px;}
</style>
</head>
<body class="a">
<pre id="event-target-idl">interface EventTarget {
void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
boolean dispatchEvent(Event event);
};
/*
callback interface EventListener {
void handleEvent(Event event);
};
*/
// Callback interfaces are not supported yet, but that's ok
interface EventListener {};
</pre>
<pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
callback DecodeErrorCallback = void ();
[Constructor]
interface AudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
readonly attribute AudioListener listener;
AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
void decodeAudioData(ArrayBuffer audioData,
DecodeSuccessCallback successCallback,
optional DecodeErrorCallback errorCallback);
// AudioNode creation
AudioBufferSourceNode createBufferSource();
MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
MediaStreamAudioDestinationNode createMediaStreamDestination();
ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
optional unsigned long numberOfInputChannels = 2,
optional unsigned long numberOfOutputChannels = 2);
AnalyserNode createAnalyser();
GainNode createGain();
DelayNode createDelay(optional double maxDelayTime = 1.0);
BiquadFilterNode createBiquadFilter();
WaveShaperNode createWaveShaper();
PannerNode createPanner();
ConvolverNode createConvolver();
ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
DynamicsCompressorNode createDynamicsCompressor();
OscillatorNode createOscillator();
PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
};</pre>
<pre id="audio-node-idl">enum ChannelCountMode {
"max",
"clamped-max",
"explicit"
};
enum ChannelInterpretation {
"speakers",
"discrete"
};
interface AudioNode : EventTarget {
void connect(AudioNode destination, optional unsigned long output = 0, optional unsigned long input = 0);
void connect(AudioParam destination, optional unsigned long output = 0);
void disconnect(optional unsigned long output = 0);
readonly attribute AudioContext context;
readonly attribute unsigned long numberOfInputs;
readonly attribute unsigned long numberOfOutputs;
// Channel up-mixing and down-mixing rules for all inputs.
attribute unsigned long channelCount;
attribute ChannelCountMode channelCountMode;
attribute ChannelInterpretation channelInterpretation;
};</pre>
<pre id="audio-destination-node-idl">interface AudioDestinationNode : AudioNode {
readonly attribute unsigned long maxChannelCount;
};</pre>
<div id="log"></div>
<script>
(function() {
var idl_array = new IdlArray();
idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-node-idl").textContent);
idl_array.add_idls(document.getElementById("audio-destination-node-idl").textContent);
audio_destination_node = (new AudioContext).destination;
idl_array.add_objects({AudioDestinationNode: ["audio_destination_node"]});
idl_array.test();
})();
</script>
</body>
</html>

View file

@ -0,0 +1,152 @@
<!DOCTYPE html>
<html class="a">
<head>
<title>DelayNode IDL Test</title>
<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/webidl2/lib/webidl2.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
#event-target-idl,
#audio-context-idl,
#audio-node-idl,
#audio-param-idl
{ visibility:hidden; height: 0px;}
</style>
</head>
<body class="a">
<pre id="event-target-idl">interface EventTarget {
void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
boolean dispatchEvent(Event event);
};
/*
callback interface EventListener {
void handleEvent(Event event);
};
*/
// Callback interfaces are not supported yet, but that's ok
interface EventListener {};
</pre>
<pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
callback DecodeErrorCallback = void ();
[Constructor]
interface AudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
readonly attribute AudioListener listener;
AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
void decodeAudioData(ArrayBuffer audioData,
DecodeSuccessCallback successCallback,
optional DecodeErrorCallback errorCallback);
// AudioNode creation
AudioBufferSourceNode createBufferSource();
MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
MediaStreamAudioDestinationNode createMediaStreamDestination();
ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
optional unsigned long numberOfInputChannels = 2,
optional unsigned long numberOfOutputChannels = 2);
AnalyserNode createAnalyser();
GainNode createGain();
DelayNode createDelay(optional double maxDelayTime = 1.0);
BiquadFilterNode createBiquadFilter();
WaveShaperNode createWaveShaper();
PannerNode createPanner();
ConvolverNode createConvolver();
ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
DynamicsCompressorNode createDynamicsCompressor();
OscillatorNode createOscillator();
PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
};</pre>
<pre id="audio-node-idl">enum ChannelCountMode {
"max",
"clamped-max",
"explicit"
};
enum ChannelInterpretation {
"speakers",
"discrete"
};
interface AudioNode : EventTarget {
void connect(AudioNode destination, optional unsigned long output = 0, optional unsigned long input = 0);
void connect(AudioParam destination, optional unsigned long output = 0);
void disconnect(optional unsigned long output = 0);
readonly attribute AudioContext context;
readonly attribute unsigned long numberOfInputs;
readonly attribute unsigned long numberOfOutputs;
// Channel up-mixing and down-mixing rules for all inputs.
attribute unsigned long channelCount;
attribute ChannelCountMode channelCountMode;
attribute ChannelInterpretation channelInterpretation;
};</pre>
<pre id="audio-param-idl">interface AudioParam {
attribute float value;
readonly attribute float defaultValue;
// Parameter automation.
void setValueAtTime(float value, double startTime);
void linearRampToValueAtTime(float value, double endTime);
void exponentialRampToValueAtTime(float value, double endTime);
// Exponentially approach the target value with a rate having the given time constant.
void setTargetAtTime(float target, double startTime, double timeConstant);
// Sets an array of arbitrary parameter values starting at time for the given duration.
// The number of values will be scaled to fit into the desired duration.
void setValueCurveAtTime(Float32Array values, double startTime, double duration);
// Cancels all scheduled parameter changes with times greater than or equal to startTime.
void cancelScheduledValues(double startTime);
};</pre>
<pre id="delay-node-idl">interface DelayNode : AudioNode {
readonly attribute AudioParam delayTime;
};</pre>
<div id="log"></div>
<script>
(function() {
var idl_array = new IdlArray();
idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-node-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-param-idl").textContent);
idl_array.add_idls(document.getElementById("delay-node-idl").textContent);
delay_node = (new AudioContext).createDelay();
idl_array.add_objects({DelayNode: ["delay_node"]});
idl_array.test();
})();
</script>
</body>
</html>

View file

@ -0,0 +1,152 @@
<!DOCTYPE html>
<html class="a">
<head>
<title>GainNode IDL Test</title>
<script src="/resources/testharness.js"></script><script src="/resources/testharnessreport.js"></script><script src="/resources/idlharness.js"></script><script src="/resources/webidl2/lib/webidl2.js"></script><script src="/webaudio/js/lodash.js"></script><script src="/webaudio/js/vendor-prefixes.js"></script><script src="/webaudio/js/helpers.js"></script><style type="text/css">
#event-target-idl,
#audio-context-idl,
#audio-node-idl,
#audio-param-idl
{ visibility:hidden; height: 0px;}
</style>
</head>
<body class="a">
<pre id="event-target-idl">interface EventTarget {
void addEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
void removeEventListener(DOMString type, EventListener? callback, optional boolean capture = false);
boolean dispatchEvent(Event event);
};
/*
callback interface EventListener {
void handleEvent(Event event);
};
*/
// Callback interfaces are not supported yet, but that's ok
interface EventListener {};
</pre>
<pre id="audio-context-idl">callback DecodeSuccessCallback = void (AudioBuffer decodedData);
callback DecodeErrorCallback = void ();
[Constructor]
interface AudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
readonly attribute AudioListener listener;
AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
void decodeAudioData(ArrayBuffer audioData,
DecodeSuccessCallback successCallback,
optional DecodeErrorCallback errorCallback);
// AudioNode creation
AudioBufferSourceNode createBufferSource();
MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
MediaStreamAudioDestinationNode createMediaStreamDestination();
ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
optional unsigned long numberOfInputChannels = 2,
optional unsigned long numberOfOutputChannels = 2);
AnalyserNode createAnalyser();
GainNode createGain();
DelayNode createDelay(optional double maxDelayTime = 1.0);
BiquadFilterNode createBiquadFilter();
WaveShaperNode createWaveShaper();
PannerNode createPanner();
ConvolverNode createConvolver();
ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);
ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs = 6);
DynamicsCompressorNode createDynamicsCompressor();
OscillatorNode createOscillator();
PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
};</pre>
<pre id="audio-node-idl">enum ChannelCountMode {
"max",
"clamped-max",
"explicit"
};
enum ChannelInterpretation {
"speakers",
"discrete"
};
interface AudioNode : EventTarget {
void connect(AudioNode destination, optional unsigned long output = 0, optional unsigned long input = 0);
void connect(AudioParam destination, optional unsigned long output = 0);
void disconnect(optional unsigned long output = 0);
readonly attribute AudioContext context;
readonly attribute unsigned long numberOfInputs;
readonly attribute unsigned long numberOfOutputs;
// Channel up-mixing and down-mixing rules for all inputs.
attribute unsigned long channelCount;
attribute ChannelCountMode channelCountMode;
attribute ChannelInterpretation channelInterpretation;
};</pre>
<pre id="audio-param-idl">interface AudioParam {
attribute float value;
readonly attribute float defaultValue;
// Parameter automation.
void setValueAtTime(float value, double startTime);
void linearRampToValueAtTime(float value, double endTime);
void exponentialRampToValueAtTime(float value, double endTime);
// Exponentially approach the target value with a rate having the given time constant.
void setTargetAtTime(float target, double startTime, double timeConstant);
// Sets an array of arbitrary parameter values starting at time for the given duration.
// The number of values will be scaled to fit into the desired duration.
void setValueCurveAtTime(Float32Array values, double startTime, double duration);
// Cancels all scheduled parameter changes with times greater than or equal to startTime.
void cancelScheduledValues(double startTime);
};</pre>
<pre id="gain-node-idl">interface GainNode : AudioNode {
readonly attribute AudioParam gain;
};</pre>
<div id="log"></div>
<script>
(function() {
var idl_array = new IdlArray();
idl_array.add_untested_idls(document.getElementById("event-target-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-context-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-node-idl").textContent);
idl_array.add_untested_idls(document.getElementById("audio-param-idl").textContent);
idl_array.add_idls(document.getElementById("gain-node-idl").textContent);
gain_node = (new AudioContext).createGain();
idl_array.add_objects({GainNode: ["gain_node"]});
idl_array.test();
})();
</script>
</body>
</html>

View file

@ -0,0 +1,121 @@
<!doctype html>
<!--
Tests that GainNode is properly scaling the gain.
We'll render 11 notes, starting at a gain of 1.0, decreasing in gain by 0.1.
The 11th note will be of gain 0.0, so it should be silent (at the end in the rendered output).
Based on a test from the WebKit test suite
(https://github.com/WebKit/webkit/blob/master/LayoutTests/webaudio/gain.html)
-->
<html class="a">
<head>
<title>GainNode interface</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/vendor-prefixes.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script src="/webaudio/js/buffer-loader.js"></script>
</head>
<body class="a">
<div id="log"></div>
<script>
var gainNodeTest = async_test("GainNode");
var sampleRate = 44100.0;
var bufferDurationSeconds = 0.125;
var numberOfNotes = 11;
var noteSpacing = bufferDurationSeconds + 0.020; // leave 20ms of silence between each "note"
var lengthInSeconds = numberOfNotes * noteSpacing;
var context = 0;
var expectedBuffer = 0;
var actualBuffer = 0;
var sinWaveBuffer = 0;
function createSinWaveBuffer(lengthInSeconds, frequency) {
var audioBuffer = context.createBuffer(2, lengthInSeconds * sampleRate, sampleRate);
var n = audioBuffer.length;
var channelL = audioBuffer.getChannelData(0);
var channelR = audioBuffer.getChannelData(1);
for (var i = 0; i < n; ++i) {
channelL[i] = Math.sin(frequency * 2.0*Math.PI * i / sampleRate);
channelR[i] = channelL[i];
}
return audioBuffer;
}
function playNote(time, gain) {
var source = context.createBufferSource();
source.buffer = sinWaveBuffer;
var gainNode = context.createGain();
gainNode.gain.value = gain;
source.connect(gainNode);
gainNode.connect(context.destination);
source.start(time);
}
function loadExpectedBuffer(event) {
actualBuffer = event.renderedBuffer;
bufferLoader = new BufferLoader(
context,
['/webaudio/the-audio-api/the-gainnode-interface/gain-expected.wav'],
bufferLoadCompleted
);
bufferLoader.load();
};
function bufferLoadCompleted(buffer) {
compareExpectedWithActualBuffer(buffer);
};
setup( function() {
// Create offline audio context.
context = new OfflineAudioContext(2, sampleRate * lengthInSeconds, sampleRate);
// Create a buffer for a short "note".
sinWaveBuffer = createSinWaveBuffer(bufferDurationSeconds, 880.0);
// Render 11 notes, starting at a gain of 1.0, decreasing in gain by 0.1.
// The last note will be of gain 0.0, so shouldn't be perceptible in the rendered output.
for (var i = 0; i < numberOfNotes; ++i) {
var time = i * noteSpacing;
var gain = 1.0 - i / (numberOfNotes - 1);
playNote(time, gain);
}
context.oncomplete = loadExpectedBuffer;
context.startRendering();
}, {timeout: 10000});
function compareExpectedWithActualBuffer(expected) {
var expectedBuffer = expected[0];
gainNodeTest.step(function() {
assert_array_approx_equals(expectedBuffer.getChannelData(0),
actualBuffer.getChannelData(0),
1e-4,
"comparing expected and rendered buffers (channel 0)");
});
gainNodeTest.step(function() {
assert_array_approx_equals(expectedBuffer.getChannelData(1),
actualBuffer.getChannelData(1),
1e-4,
"comparing expected and rendered buffers (channel 1)");
});
gainNodeTest.done();
};
</script>
</body>
</html>

View file

@ -0,0 +1,123 @@
<!doctype html>
<!--
Tests that a create MediaElementSourceNode that is passed through
a script processor passes the stream data.
The the script processor saves the input buffers it gets to a temporary
array, and after the playback has stopped, the contents are compared
to those of a loaded AudioBuffer with the same source.
Somewhat similiar to a test from Mozilla:
(http://mxr.mozilla.org/mozilla-central/source/content/media/webaudio/test/test_mediaElementAudioSourceNode.html?force=1)
-->
<html class="a">
<head>
<title>MediaElementAudioSource interface test (to scriptProcessor)</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/js/lodash.js"></script>
<script src="/webaudio/js/vendor-prefixes.js"></script>
<script src="/webaudio/js/helpers.js"></script>
<script src="/webaudio/js/buffer-loader.js"></script>
</head>
<body class="a">
<div id="log"></div>
<script>
var elementSourceTest = async_test("Element Source tests completed");
var src = '/webaudio/resources/sin_440Hz_-6dBFS_1s.wav';
var BUFFER_SIZE = 2048;
var context = null;
var actualBufferArrayC0 = new Float32Array(0);
var actualBufferArrayC1 = new Float32Array(0);
var audio = null, source = null, processor = null
function loadExpectedBuffer(event) {
bufferLoader = new BufferLoader(
context,
[src],
bufferLoadCompleted
);
bufferLoader.load();
};
function bufferLoadCompleted(buffer) {
runTests(buffer);
};
function concatTypedArray(arr1, arr2) {
var result = new Float32Array(arr1.length + arr2.length);
result.set(arr1);
result.set(arr2, arr1.length);
return result;
}
// Create Audio context
context = new AudioContext();
// Create an audio element, and a media element source
audio = document.createElement('audio');
audio.src = src;
source = context.createMediaElementSource(audio);
// Create a processor node to copy the input to the actual buffer
processor = context.createScriptProcessor(BUFFER_SIZE);
source.connect(processor);
processor.connect(context.destination);
processor.addEventListener('audioprocess',
(function (e) {
actualBufferArrayC0 = concatTypedArray(actualBufferArrayC0, e.inputBuffer.getChannelData(0));
actualBufferArrayC1 = concatTypedArray(actualBufferArrayC1, e.inputBuffer.getChannelData(1));
}));
// When media playback ended, save the begin to compare with expected buffer
audio.addEventListener("ended", function(e) {
// Setting a timeout since we need audioProcess event to run for all samples
window.setTimeout(loadExpectedBuffer, 50);
});
audio.play();
function runTests(expected) {
source.disconnect();
processor.disconnect();
// firefox seems to process events after disconnect
processor.removeEventListener('audioprocess')
var expectedBuffer = expected[0];
// Trim the actual elements because we don't have a fine-grained
// control over the start and end time of recording the data.
var actualTrimmedC0 = trimEmptyElements(actualBufferArrayC0);
var actualTrimmedC1 = trimEmptyElements(actualBufferArrayC1);
var expectedLength = trimEmptyElements(expectedBuffer.getChannelData(0)).length;
// Test that there is some data.
test(function() {
assert_greater_than(actualTrimmedC0.length, 0,
"processed data array (C0) length greater than 0");
assert_greater_than(actualTrimmedC1.length, 0,
"processed data array (C1) length greater than 0");
}, "Channel 0 processed some data");
// Test the actual contents of the 1st and second channel.
test(function() {
assert_array_approx_equals(
actualTrimmedC0,
trimEmptyElements(expectedBuffer.getChannelData(0)),
1e-4,
"comparing expected and rendered buffers (channel 0)");
assert_array_approx_equals(
actualTrimmedC1,
trimEmptyElements(expectedBuffer.getChannelData(1)),
1e-4,
"comparing expected and rendered buffers (channel 1)");
}, "All data processed correctly");
elementSourceTest.done();
};
</script>
</body>
</html>

View file

@ -0,0 +1,212 @@
<!doctype html>
<html>
<head>
<title>WaveShaperNode interface - Curve tests | WebAudio</title>
<script type="text/javascript" src="../../../resources/testharness.js"></script>
<script type="text/javascript" src="../../../resources/testharnessreport.js"></script>
<script type="text/javascript" src="../../js/vendor-prefixes.js"></script>
</head>
<body>
<div id="log">
</div>
<script type="text/javascript">
var sampleRate=44100.0;
var tolerance=0.01;
/*
Testing that -1, 0 and +1 map correctly to curve (with 1:1 correlation)
=======================================================================
From the specification:
The input signal is nominally within the range -1 -> +1.
Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
to the center value of the curve array.
*/
(function() {
var threeElementCurve=[2.0, -3.0, 4.0];
var inputData=[-1.0, 0, 1.0];
var expectedData=[2.0, -3.0, 4.0];
executeTest(threeElementCurve, inputData, expectedData, "Testing that -1, 0 and +1 map correctly to curve (with 1:1 correlation)");
})();
/*
Testing interpolation (where inputs don't correlate directly to curve elements)
===============================================================================
From the specification:
The implementation must perform linear interpolation between adjacent points in the curve.
*/
(function() {
var threeElementCurve=[2.0, -3.0, 4.0];
var inputData=[-0.5, +0.5, +0.75];
var expectedData=[-0.5, +0.5, +2.25];
executeTest(threeElementCurve, inputData, expectedData, "Testing interpolation (where inputs don't correlate directly to curve elements)");
})();
/*
Testing out-of-range inputs (should be mapped to the first/last elements of the curve)
======================================================================================
From the specification:
Any sample value less than -1 will correspond to the first value in the curve array.
Any sample value greater than +1 will correspond to the last value in the curve array.
*/
(function() {
var threeElementCurve=[2.0, -3.0, 4.0];
var inputData=[-1.5, +1.5];
var expectedData=[2.0, 4.0];
executeTest(threeElementCurve, inputData, expectedData, "Testing out-of-range inputs (should be mapped to the first/last elements of the curve)");
})();
/*
Testing a 2-element curve (does not have a middle element)
==========================================================
From the specification:
Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
to the center value of the curve array.
The implementation must perform linear interpolation between adjacent points in the curve.
*/
(function() {
var twoElementCurve=[2.0, -2.0];
var inputData=[-1.0, 0, 1.0];
var expectedData=[2.0, 0.0, -2.0];
executeTest(twoElementCurve, inputData, expectedData, "Testing a 2-element curve (does not have a middle element)");
})();
/*
Testing a 4-element curve (does not have a middle element)
==========================================================
From the specification:
Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
to the center value of the curve array.
The implementation must perform linear interpolation between adjacent points in the curve.
*/
(function() {
var fourElementCurve=[1.0, 2.0, 4.0, 7.0];
var inputData=[-1.0, 0, 1.0];
var expectedData=[1.0, 3.0, 7.0];
executeTest(fourElementCurve, inputData, expectedData, "Testing a 4-element curve (does not have a middle element)");
})();
/*
Testing a huge curve
====================
From the specification:
Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
to the center value of the curve array.
*/
(function() {
var bigCurve=[];
for(var i=0;i<=60000;i++) { bigCurve.push(i/3.5435); }
var inputData=[-1.0, 0, 1.0];
var expectedData=[bigCurve[0], bigCurve[30000], bigCurve[60000]];
executeTest(bigCurve, inputData, expectedData, "Testing a huge curve");
})();
/*
Testing single-element curve (boundary condition)
=================================================
From the specification:
Each input sample within this range will index into the shaping curve with a signal level of zero corresponding
to the center value of the curve array.
Any sample value less than -1 will correspond to the first value in the curve array.
Any sample value greater than +1 will correspond to the last value in the curve array.
The implementation must perform linear interpolation between adjacent points in the curve.
Note:
I found a post on the W3C audio mailing list (from one of the Chris's) that suggested it would be feasible
to use the WaveShaperNode to create constant values.
*/
(function() {
var oneElementCurve=[1.0];
var inputData=[-1.0, 0, 1.0, -2.0, 2.0];
var expectedData=[1.0, 1.0, 1.0, 1.0, 1.0];
executeTest(oneElementCurve, inputData, expectedData, "Testing single-element curve (boundary condition)");
})();
/*
Testing null curve (should return input values)
===============================================
From the specification:
Initially the curve attribute is null, which means that the WaveShaperNode will pass its input to its output
without modification.
*/
(function() {
var inputData=[-1.0, 0, 1.0, 2.0];
var expectedData=[-1.0, 0.0, 1.0, 2.0];
executeTest(null, inputData, expectedData, "Testing null curve (should return input values)");
})();
/*
Testing zero-element curve (unspecified result)
===============================================
From the specification:
Unspecified result (I assume it will be treated in the same way as a null curve).
Note:
Mozilla test_waveShaperNoCurve.html indicates they expect same results as a null curve.
*/
(function() {
var zeroElementCurve=[];
var inputData=[-1.0, 0, 1.0, 2.0];
var expectedData=[-1.0, 0.0, 1.0, 2.0];
executeTest(zeroElementCurve, inputData, expectedData, "Testing zero-element curve (unspecified result)");
})();
/**
* Function that does the actual testing (using an asynchronous test).
* @param {?Array.<number>} curveData - Array containing values for the WaveShaper curve.
* @param {!Array.<number>} inputData - Array containing values for the input stream.
* @param {!Array.<number>} expectedData - Array containing expected results for each of the corresponding inputs.
* @param {!string} testName - Name of the test case.
*/
function executeTest(curveData, inputData, expectedData, testName) {
var stTest=async_test("WaveShaperNode - "+testName);
// Create offline audio context.
var ac=new OfflineAudioContext(1, inputData.length, sampleRate);
// Create the WaveShaper and its curve.
var waveShaper=ac.createWaveShaper();
if(curveData!=null) {
var curve=new Float32Array(curveData.length);
for(var i=0;i<curveData.length;i++) { curve[i]=curveData[i]; }
waveShaper.curve=curve;
}
waveShaper.connect(ac.destination);
// Create buffer containing the input values.
var inputBuffer=ac.createBuffer(1, Math.max(inputData.length, 2), sampleRate);
var d=inputBuffer.getChannelData(0);
for(var i=0;i<inputData.length;i++) { d[i]=inputData[i]; }
// Play the input buffer through the WaveShaper.
var src=ac.createBufferSource();
src.buffer=inputBuffer;
src.connect(waveShaper);
src.start();
// Test the outputs match the expected values.
ac.oncomplete=function(ev) {
var d=ev.renderedBuffer.getChannelData(0);
stTest.step(function() {
for(var i=0;i<expectedData.length;i++) {
var curveText="null";
if(curve!=null) {
if(curveData.length<20) {
curveText=curveData.join(",");
} else {
curveText="TooBigToDisplay ("+(curveData.length-1)+" elements)";
}
}
var comment="Input="+inputData[i]+", Curve=["+curveText+"] >>> ";
assert_approx_equals(d[i], expectedData[i], tolerance, comment);
}
});
stTest.done();
};
ac.startRendering();
}
</script>
</body>
</html>