mirror of
https://github.com/servo/servo.git
synced 2025-09-29 16:19:14 +01:00
Vendors the [blink perf tests](https://chromium.googlesource.com/chromium/src/+/HEAD/third_party/blink/perf_tests/). These perf tests are useful to evaluate the performance of servo. The license that governs the perf tests is included in the folder. Running benchmark cases automatically is left to future work. The update.py script is taken from mozjs and slightly adapted, so we can easily filter (and patch if this should be necessary in the future. Testing: This PR just adds the perf_tests, but does not use or modify them in any way. --------- Signed-off-by: Jonathan Schwender <schwenderjonathan@gmail.com>
82 lines
No EOL
2.1 KiB
JavaScript
Vendored
82 lines
No EOL
2.1 KiB
JavaScript
Vendored
function makeSharedBuffer(size) {
|
|
// SharedArrayBuffer constructor is hidden in some origins, but it's still
|
|
// available via WebAssembly.Memory.
|
|
const kPageSize = 65536;
|
|
const sizeInPages = Math.floor((size + kPageSize - 1) / kPageSize);
|
|
const memory = new WebAssembly.Memory(
|
|
{initial: sizeInPages, maximum: sizeInPages, shared: true});
|
|
return memory.buffer;
|
|
}
|
|
|
|
function runCopyToTest(frame, desc) {
|
|
let isDone = false;
|
|
let size = frame.allocationSize();
|
|
let buf = new makeSharedBuffer(size);
|
|
|
|
function runTest() {
|
|
let startTime = PerfTestRunner.now();
|
|
PerfTestRunner.addRunTestStartMarker();
|
|
frame.copyTo(buf)
|
|
.then(layout => {
|
|
PerfTestRunner.measureValueAsync(PerfTestRunner.now() - startTime);
|
|
PerfTestRunner.addRunTestEndMarker();
|
|
if (!isDone)
|
|
runTest();
|
|
})
|
|
.catch(e => {
|
|
PerfTestRunner.logFatalError('Test error: ' + e);
|
|
})
|
|
}
|
|
|
|
PerfTestRunner.startMeasureValuesAsync({
|
|
description: desc,
|
|
unit: 'ms',
|
|
done: _ => {
|
|
isDone = true;
|
|
frame.close();
|
|
},
|
|
run: _ => {
|
|
runTest();
|
|
},
|
|
});
|
|
}
|
|
|
|
function runBatchCopyToTest(frames, desc) {
|
|
let isDone = false;
|
|
let frames_and_buffers = frames.map(frame => {
|
|
let size = frame.allocationSize();
|
|
let buf = new makeSharedBuffer(size);
|
|
return [frame, buf];
|
|
});
|
|
|
|
function runTest() {
|
|
let startTime = PerfTestRunner.now();
|
|
PerfTestRunner.addRunTestStartMarker();
|
|
let readback_promises = frames_and_buffers.map(([frame, buf]) => {
|
|
return frame.copyTo(buf);
|
|
});
|
|
Promise.all(readback_promises)
|
|
.then(layouts => {
|
|
PerfTestRunner.measureValueAsync(PerfTestRunner.now() - startTime);
|
|
PerfTestRunner.addRunTestEndMarker();
|
|
if (!isDone)
|
|
runTest();
|
|
})
|
|
.catch(e => {
|
|
PerfTestRunner.logFatalError('Test error: ' + e);
|
|
})
|
|
}
|
|
|
|
PerfTestRunner.startMeasureValuesAsync({
|
|
description: desc,
|
|
unit: 'ms',
|
|
done: _ => {
|
|
isDone = true;
|
|
for (let frame of frames)
|
|
frame.close();
|
|
},
|
|
run: _ => {
|
|
runTest();
|
|
},
|
|
});
|
|
} |