tests: Vendor blink perf tests (#38654)

Vendors the [blink perf
tests](https://chromium.googlesource.com/chromium/src/+/HEAD/third_party/blink/perf_tests/).
These perf tests are useful to evaluate the performance of servo. 
The license that governs the perf tests is included in the folder. 
Running benchmark cases automatically is left to future work.

The update.py script is taken from mozjs and slightly adapted, so we can
easily filter
(and patch if this should be necessary in the future.

Testing: This PR just adds the perf_tests, but does not use or modify
them in any way.

---------

Signed-off-by: Jonathan Schwender <schwenderjonathan@gmail.com>
This commit is contained in:
Jonathan Schwender 2025-08-17 11:54:04 +02:00 committed by GitHub
parent 7621332824
commit ee781b71b4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
648 changed files with 359694 additions and 0 deletions

View file

@ -0,0 +1,36 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 AudioBufferSourceNodes.
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
const buffer = createMonoRampBuffer(4800, 48000);
const testNodes =
createNodes(context, 'AudioBufferSourceNode', 100, {buffer});
// All the test nodes fan-in to the destination node.
testNodes.forEach(node => {
node.connect(context.destination);
node.start();
});
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 AudioBufferSourceNodes',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['AudioBufferSourceHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,41 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 AudioWorkletNodes (bypassing).
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
async function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
await context.audioWorklet.addModule('resources/bypass-processor.js');
const source = new ConstantSourceNode(context);
// Create 100 AudioWorkletNodes that are serially connected.
const testNodes = [];
for (let i = 0; i < 100; ++i) {
testNodes.push(new AudioWorkletNode(context, 'bypass-processor'));
if (i === 0) continue;
testNodes[i - 1].connect(testNodes[i]);
}
source.connect(testNodes[0]);
testNodes[99].connect(context.destination);
source.start();
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 AudioWorkletNodes (bypassing)',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['AudioWorkletHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,33 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 BiquadFilterNodes.
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
const source = new ConstantSourceNode(context);
const testNodes =
createAndConnectNodesInSeries(context, 'BiquadFilterNode', 100);
source.connect(testNodes.head);
testNodes.tail.connect(context.destination);
source.start();
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 BiquadFilterNodes',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['BiquadFilterHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,37 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 DynamicsCompressorNodes (knee).
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
// -24 dB default threshold is about 0.063, use 0.07
const source = new ConstantSourceNode(context, {offset: 0.07});
const testNodes = createNodes(
context, 'DynamicsCompressorNode', 100,
{attack: .003, knee: 30, ratio: 12, release: .25, threshold: -24});
for (let i = 0; i < testNodes.length; ++i) {
source.connect(testNodes[i]);
testNodes[i].connect(context.destination);
}
source.start();
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 DynamicsCompressorNodes',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['DynamicsCompressorHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,37 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 DynamicsCompressorNodes (post-knee).
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
// Use very large offset to ensure we are post-knee
const source = new ConstantSourceNode(context, {offset: 100});
const testNodes = createNodes(
context, 'DynamicsCompressorNode', 100,
{attack: .003, knee: 30, ratio: 12, release: .25, threshold: -24});
for (let i = 0; i < testNodes.length; ++i) {
source.connect(testNodes[i]);
testNodes[i].connect(context.destination);
}
source.start();
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 DynamicsCompressorNodes',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['DynamicsCompressorHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,37 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 DynamicsCompressorNodes (pre-knee).
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
// Use 0 offset to ensure we are pre-knee
const source = new ConstantSourceNode(context, {offset: 0});
const testNodes = createNodes(
context, 'DynamicsCompressorNode', 100,
{attack: .003, knee: 30, ratio: 12, release: .25, threshold: -24});
for (let i = 0; i < testNodes.length; ++i) {
source.connect(testNodes[i]);
testNodes[i].connect(context.destination);
}
source.start();
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 DynamicsCompressorNodes',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['DynamicsCompressorHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,33 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 GainNodes.
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
const source = new ConstantSourceNode(context);
const testNodes = createAndConnectNodesInSeries(
context, 'GainNode', 100, {gain: 0.707});
source.connect(testNodes.head);
testNodes.tail.connect(context.destination);
source.start();
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 GainNodes',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['GainHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,33 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of 100 PannerNodes (HRTF).
</title>
<script src="../resources/runner.js"></script>
<script src="resources/webaudio-perf-utils.js"></script>
</head>
<body>
<script>
function graphBuilder() {
const context = new OfflineAudioContext(1, 48000, 48000);
const source = new ConstantSourceNode(context);
const testNodes = createAndConnectNodesInSeries(
context, 'PannerNode', 100, {panningModel: 'HRTF'});
source.connect(testNodes.head);
testNodes.tail.connect(context.destination);
source.start();
return context;
}
RunWebAudioPerfTest({
description: 'Test performance of 100 PannerNodes (HRTF)',
graphBuilder: graphBuilder,
tracingOptions: {
targetCategory: 'disabled-by-default-webaudio.audionode',
targetEvents: ['PannerHandler::Process'],
}
});
</script>
</body>
</html>

View file

@ -0,0 +1,16 @@
/**
* @class BypassProcessor
* @extends AudioWorkletProcessor
*/
class BypassProcessor extends AudioWorkletProcessor {
process(inputs, outputs) {
let input = inputs[0];
let output = outputs[0];
for (let channel = 0; channel < input.length; ++channel)
output[channel].set(input[channel]);
return true;
}
}
registerProcessor('bypass-processor', BypassProcessor);

View file

@ -0,0 +1,100 @@
// Copyright 2020 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* Set up and perform a test with given test options.
*
* @param {!object} testOptions
* @param {string} testOptions.description test description
* @param {function} testOptions.graphBuilder a test function returns an
* OfflineAudioContext.
* @param {object} testOptions.tracingOptions test options
* @param {string} testOptions.tracingOptions.targetCategory
* target trace category
* @param {Array<string>} testOptions.tracingOptions.targetEvents
* target trace events
*/
function RunWebAudioPerfTest(testOptions) {
let isDone = false;
let startTime;
async function runTest_internal() {
const context = await testOptions.graphBuilder();
PerfTestRunner.addRunTestStartMarker();
startTime = PerfTestRunner.now();
await context.startRendering();
PerfTestRunner.measureValueAsync(PerfTestRunner.now() - startTime);
PerfTestRunner.addRunTestEndMarker();
if (!isDone)
runTest_internal();
}
PerfTestRunner.startMeasureValuesAsync({
unit: 'ms',
description: testOptions.description,
done: () => isDone = true,
run: runTest_internal,
warmUpCount: 2,
iterationCount: 5,
tracingCategories: testOptions.tracingOptions.targetCategory,
traceEventsToMeasure: testOptions.tracingOptions.targetEvents,
});
}
/**
* Creates multiple AudioNodes that are serially connected
*
* @param {BaseAudioContext} context
* @param {string} nodeName AudioNode name in string
* @param {number} numberOfNodes
* @param {AudioNodeOptions} nodeOptions
* @returns {object}
*/
function createAndConnectNodesInSeries(
context, nodeName, numberOfNodes, nodeOptions) {
const testNodes = [];
nodeOptions = nodeOptions || {};
for (let i = 0; i < numberOfNodes; ++i) {
testNodes.push(new window[nodeName](context, nodeOptions));
if (i === 0) continue;
testNodes[i - 1].connect(testNodes[i]);
}
return {
head: testNodes[0],
tail: testNodes[numberOfNodes - 1],
nodes: testNodes,
};
}
/**
* Creates multiple AudioNodes.
*
* @param {BaseAudioContext} context
* @param {string} nodeName AudioNode name in string
* @param {number} numberOfNodes
* @param {AudioNodeOptions} nodeOptions
* @returns {Array<AudioNode>}
*/
function createNodes(context, nodeName, numberOfNodes, nodeOptions) {
const testNodes = [];
nodeOptions = nodeOptions || {};
for (let i = 0; i < numberOfNodes; ++i)
testNodes.push(new window[nodeName](context, nodeOptions));
return testNodes;
}
/**
* Creates an AudioBuffer with up-ramp samples.
*
* @param {number} length number of samples
* @param {number} sampleRate sample rate
* @returns {AudioBuffer}
*/
function createMonoRampBuffer(length, sampleRate) {
let buffer = new AudioBuffer({numberOfChannel: 1, length, sampleRate});
let channelData = buffer.getChannelData(0);
for (let i = 0; i < length; ++i)
channelData[i] = i / length;
return buffer;
}

View file

@ -0,0 +1,52 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test performance of AudioParamTimeline::InsertEvent.
</title>
<script src="../resources/runner.js"></script>
</head>
<body>
<script>
// Number of events to insert.
let numberOfEvents = 10000;
let sampleRate = 44100;
let gainNode = null;
let timeInterval = .03;
let initialValue = 1;
let startingValueDelta = initialValue / numberOfEvents;
// Convert time (in seconds) to sample frames.
function timeToSampleFrame(time, sampleRate) {
return Math.floor(0.5 + time * sampleRate);
}
function renderLength(numberOfEvents) {
return timeToSampleFrame((numberOfEvents + 1) * timeInterval, sampleRate);
}
PerfTestRunner.measureTime({
description: "Measures performance of 10k InsertEvents using calls to setValueAtTime.",
setup: function () {
let context =
new OfflineAudioContext(2, renderLength(numberOfEvents), sampleRate);
gainNode = context.createGain();
},
run: function() {
let value = initialValue;
for (let k = 0; k < numberOfEvents; ++k) {
let startTime = k * timeInterval;
gainNode.gain.setValueAtTime(value, startTime);
value -= startingValueDelta;
}
},
iterationCount: 5,
warmUpCount: 2,
tracingCategories: 'disabled-by-default-webaudio.audionode',
traceEventsToMeasure: ['AudioParamTimeline::InsertEvent'],
});
</script>
</body>
</html>