mirror of
https://github.com/servo/servo.git
synced 2025-06-25 09:34:32 +01:00
89 lines
3.3 KiB
HTML
89 lines
3.3 KiB
HTML
<!doctype html>
|
|
<meta charset=utf-8>
|
|
<title>RTCPeerConnection Insertable Streams Simulcast</title>
|
|
<script src="/resources/testharness.js"></script>
|
|
<script src="/resources/testharnessreport.js"></script>
|
|
<script src=/resources/testdriver.js></script>
|
|
<script src=/resources/testdriver-vendor.js></script>
|
|
<script src='../mediacapture-streams/permission-helper.js'></script>
|
|
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
|
<script src="../webrtc/third_party/sdp/sdp.js"></script>
|
|
<script src="../webrtc/simulcast/simulcast.js"></script>
|
|
<script>
|
|
// Test based on wpt/webrtc/simulcast/basic.https.html
|
|
promise_test(async t => {
|
|
const rids = [0, 1, 2];
|
|
const pc1 = new RTCPeerConnection({encodedInsertableStreams:true});
|
|
t.add_cleanup(() => pc1.close());
|
|
const pc2 = new RTCPeerConnection({encodedInsertableStreams:true});
|
|
t.add_cleanup(() => pc2.close());
|
|
|
|
exchangeIceCandidates(pc1, pc2);
|
|
|
|
const metadataToBeLoaded = [];
|
|
let receiverSSRCs = []
|
|
pc2.ontrack = t.step_func(e => {
|
|
const receiverTransformer = new TransformStream({
|
|
async transform(encodedFrame, controller) {
|
|
let ssrc = encodedFrame.getMetadata().synchronizationSource;
|
|
if (receiverSSRCs.indexOf(ssrc) == -1)
|
|
receiverSSRCs.push(ssrc);
|
|
controller.enqueue(encodedFrame);
|
|
}
|
|
});
|
|
const receiverStreams = e.receiver.createEncodedStreams();
|
|
receiverStreams.readable
|
|
.pipeThrough(receiverTransformer)
|
|
.pipeTo(receiverStreams.writable);
|
|
|
|
const stream = e.streams[0];
|
|
const v = document.createElement('video');
|
|
v.autoplay = true;
|
|
v.srcObject = stream;
|
|
v.id = stream.id
|
|
metadataToBeLoaded.push(new Promise((resolve) => {
|
|
v.addEventListener('loadedmetadata', () => {
|
|
resolve();
|
|
});
|
|
}));
|
|
});
|
|
|
|
await setMediaPermission("granted", ["camera"]);
|
|
const stream = await navigator.mediaDevices.getUserMedia({video: {width: 1280, height: 720}});
|
|
t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
|
|
const transceiver = pc1.addTransceiver(stream.getVideoTracks()[0], {
|
|
streams: [stream],
|
|
sendEncodings: rids.map(rid => {rid}),
|
|
});
|
|
const senderStreams = transceiver.sender.createEncodedStreams();
|
|
let senderSSRCs = [];
|
|
const senderTransformer = new TransformStream({
|
|
async transform(encodedFrame, controller) {
|
|
if (senderSSRCs.indexOf(encodedFrame.getMetadata().synchronizationSource) == -1)
|
|
senderSSRCs.push(encodedFrame.getMetadata().synchronizationSource);
|
|
controller.enqueue(encodedFrame);
|
|
}
|
|
});
|
|
senderStreams.readable
|
|
.pipeThrough(senderTransformer)
|
|
.pipeTo(senderStreams.writable);
|
|
|
|
const offer = await pc1.createOffer();
|
|
await pc1.setLocalDescription(offer),
|
|
await pc2.setRemoteDescription({
|
|
type: 'offer',
|
|
sdp: swapRidAndMidExtensionsInSimulcastOffer(offer, rids),
|
|
});
|
|
const answer = await pc2.createAnswer();
|
|
await pc2.setLocalDescription(answer);
|
|
await pc1.setRemoteDescription({
|
|
type: 'answer',
|
|
sdp: swapRidAndMidExtensionsInSimulcastAnswer(answer, pc1.localDescription, rids),
|
|
});
|
|
assert_equals(metadataToBeLoaded.length, 3);
|
|
await Promise.all(metadataToBeLoaded);
|
|
// Ensure that frames from the 3 simulcast layers are exposed.
|
|
assert_equals(senderSSRCs.length, 3);
|
|
assert_equals(receiverSSRCs.length, 3);
|
|
}, 'Basic simulcast setup with three spatial layers');
|
|
</script>
|