mirror of
https://github.com/servo/servo.git
synced 2025-08-10 16:05:43 +01:00
Update web-platform-tests to revision b'468d01bbd84da2babf265c6af46947be68713440'
This commit is contained in:
parent
35e95f55a1
commit
58e8ee674b
9438 changed files with 266112 additions and 106976 deletions
|
@ -0,0 +1 @@
|
|||
spec: https://w3c.github.io/webrtc-encoded-transform/
|
|
@ -0,0 +1,56 @@
|
|||
<!DOCTYPE html>
|
||||
<meta charset="utf-8">
|
||||
<!-- Based on similar tests in html/infrastructure/safe-passing-of-structured-data/shared-array-buffers/ -->
|
||||
<title>RTCEncodedVideoFrame cannot cross agent clusters, service worker edition</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="../service-workers/service-worker/resources/test-helpers.sub.js"></script>
|
||||
|
||||
<script>
|
||||
"use strict";
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
|
||||
const track = stream.getTracks()[0];
|
||||
t.add_cleanup(() => track.stop());
|
||||
|
||||
const sender = caller.addTrack(track)
|
||||
const streams = sender.createEncodedStreams();
|
||||
const reader = streams.readable.getReader();
|
||||
const writer = streams.writable.getWriter();
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
const result = await reader.read();
|
||||
const scope = "resources/blank.html";
|
||||
let reg = await service_worker_unregister_and_register(t, "resources/serviceworker-failure.js", scope);
|
||||
t.add_cleanup(() => service_worker_unregister(t, scope));
|
||||
await wait_for_state(t, reg.installing, "activated");
|
||||
let iframe = await with_iframe(scope);
|
||||
t.add_cleanup(() => iframe.remove());
|
||||
const sw = iframe.contentWindow.navigator.serviceWorker;
|
||||
let state = "start in window";
|
||||
return new Promise(resolve => {
|
||||
sw.onmessage = t.step_func(e => {
|
||||
if (e.data === "start in worker") {
|
||||
assert_equals(state, "start in window");
|
||||
sw.controller.postMessage(result.value);
|
||||
state = "we are expecting confirmation of an onmessageerror in the worker";
|
||||
} else if (e.data === "onmessageerror was received in worker") {
|
||||
assert_equals(state, "we are expecting confirmation of an onmessageerror in the worker");
|
||||
resolve();
|
||||
} else {
|
||||
assert_unreached("Got an unexpected message from the service worker: " + e.data);
|
||||
}
|
||||
});
|
||||
|
||||
sw.controller.postMessage(state);
|
||||
});
|
||||
});
|
||||
</script>
|
|
@ -0,0 +1,56 @@
|
|||
<!DOCTYPE html>
|
||||
<meta charset="utf-8">
|
||||
<!-- Based on similar tests in html/infrastructure/safe-passing-of-structured-data/shared-array-buffers/ -->
|
||||
<title>RTCEncodedVideoFrame cannot cross agent clusters, service worker edition</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="../service-workers/service-worker/resources/test-helpers.sub.js"></script>
|
||||
|
||||
<script>
|
||||
"use strict";
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video:true});
|
||||
const videoTrack = stream.getVideoTracks()[0];
|
||||
t.add_cleanup(() => videoTrack.stop());
|
||||
|
||||
const videoSender = caller.addTrack(videoTrack)
|
||||
const senderStreams = videoSender.createEncodedStreams();
|
||||
const senderReader = senderStreams.readable.getReader();
|
||||
const senderWriter = senderStreams.writable.getWriter();
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
const result = await senderReader.read();
|
||||
const scope = "resources/blank.html";
|
||||
const reg = await service_worker_unregister_and_register(t, "resources/serviceworker-failure.js", scope)
|
||||
t.add_cleanup(() => service_worker_unregister(t, scope));
|
||||
await wait_for_state(t, reg.installing, "activated");
|
||||
const iframe = await with_iframe(scope);
|
||||
t.add_cleanup(() => iframe.remove());
|
||||
const sw = iframe.contentWindow.navigator.serviceWorker;
|
||||
let state = "start in window";
|
||||
return new Promise(resolve => {
|
||||
sw.onmessage = t.step_func(e => {
|
||||
if (e.data === "start in worker") {
|
||||
assert_equals(state, "start in window");
|
||||
sw.controller.postMessage(result.value);
|
||||
state = "we are expecting confirmation of an onmessageerror in the worker";
|
||||
} else if (e.data === "onmessageerror was received in worker") {
|
||||
assert_equals(state, "we are expecting confirmation of an onmessageerror in the worker");
|
||||
resolve();
|
||||
} else {
|
||||
assert_unreached("Got an unexpected message from the service worker: " + e.data);
|
||||
}
|
||||
});
|
||||
|
||||
sw.controller.postMessage(state);
|
||||
});
|
||||
});
|
||||
</script>
|
|
@ -0,0 +1,196 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RTCPeerConnection Insertable Streams Audio</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="./RTCPeerConnection-insertable-streams.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
async function testAudioFlow(t, negotiationFunction) {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
|
||||
const audioTrack = stream.getAudioTracks()[0];
|
||||
t.add_cleanup(() => audioTrack.stop());
|
||||
|
||||
const audioSender = caller.addTrack(audioTrack)
|
||||
const senderStreams = audioSender.createEncodedStreams();
|
||||
const senderReader = senderStreams.readable.getReader();
|
||||
const senderWriter = senderStreams.writable.getWriter();
|
||||
|
||||
const frameInfos = [];
|
||||
const numFramesPassthrough = 5;
|
||||
const numFramesReplaceData = 5;
|
||||
const numFramesModifyData = 5;
|
||||
const numFramesToSend = numFramesPassthrough + numFramesReplaceData + numFramesModifyData;
|
||||
|
||||
const ontrackPromise = new Promise(resolve => {
|
||||
callee.ontrack = t.step_func(() => {
|
||||
const audioReceiver = callee.getReceivers().find(r => r.track.kind === 'audio');
|
||||
assert_not_equals(audioReceiver, undefined);
|
||||
|
||||
const receiverStreams =
|
||||
audioReceiver.createEncodedStreams();
|
||||
const receiverReader = receiverStreams.readable.getReader();
|
||||
const receiverWriter = receiverStreams.writable.getWriter();
|
||||
|
||||
const maxFramesToReceive = numFramesToSend;
|
||||
let numVerifiedFrames = 0;
|
||||
for (let i = 0; i < maxFramesToReceive; i++) {
|
||||
receiverReader.read().then(t.step_func(result => {
|
||||
if (frameInfos[numVerifiedFrames] &&
|
||||
areFrameInfosEqual(result.value, frameInfos[numVerifiedFrames])) {
|
||||
numVerifiedFrames++;
|
||||
} else {
|
||||
// Receiving unexpected frames is an indication that
|
||||
// frames are not passed correctly between sender and receiver.
|
||||
assert_unreached("Incorrect frame received");
|
||||
}
|
||||
|
||||
if (numVerifiedFrames == numFramesToSend)
|
||||
resolve();
|
||||
}));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await negotiationFunction(caller, callee);
|
||||
|
||||
// Pass frames as they come from the encoder.
|
||||
for (let i = 0; i < numFramesPassthrough; i++) {
|
||||
const result = await senderReader.read()
|
||||
frameInfos.push({
|
||||
data: result.value.data,
|
||||
timestamp: result.value.timestamp,
|
||||
type: result.value.type,
|
||||
metadata: result.value.getMetadata(),
|
||||
getMetadata() { return this.metadata; }
|
||||
});
|
||||
senderWriter.write(result.value);
|
||||
}
|
||||
|
||||
// Replace frame data with arbitrary buffers.
|
||||
for (let i = 0; i < numFramesReplaceData; i++) {
|
||||
const result = await senderReader.read()
|
||||
|
||||
const buffer = new ArrayBuffer(100);
|
||||
const int8View = new Int8Array(buffer);
|
||||
int8View.fill(i);
|
||||
|
||||
result.value.data = buffer;
|
||||
frameInfos.push({
|
||||
data: result.value.data,
|
||||
timestamp: result.value.timestamp,
|
||||
type: result.value.type,
|
||||
metadata: result.value.getMetadata(),
|
||||
getMetadata() { return this.metadata; }
|
||||
});
|
||||
senderWriter.write(result.value);
|
||||
}
|
||||
|
||||
// Modify frame data.
|
||||
for (let i = 0; i < numFramesReplaceData; i++) {
|
||||
const result = await senderReader.read()
|
||||
const int8View = new Int8Array(result.value.data);
|
||||
int8View.fill(i);
|
||||
|
||||
frameInfos.push({
|
||||
data: result.value.data,
|
||||
timestamp: result.value.timestamp,
|
||||
type: result.value.type,
|
||||
metadata: result.value.getMetadata(),
|
||||
getMetadata() { return this.metadata; }
|
||||
});
|
||||
senderWriter.write(result.value);
|
||||
}
|
||||
|
||||
return ontrackPromise;
|
||||
}
|
||||
|
||||
promise_test(async t => {
|
||||
return testAudioFlow(t, exchangeOfferAnswer);
|
||||
}, 'Frames flow correctly using insertable streams');
|
||||
|
||||
promise_test(async t => {
|
||||
return testAudioFlow(t, exchangeOfferAnswerReverse);
|
||||
}, 'Frames flow correctly using insertable streams when receiver starts negotiation');
|
||||
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection();
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
|
||||
const audioTrack = stream.getAudioTracks()[0];
|
||||
t.add_cleanup(() => audioTrack.stop());
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
const audioSender = caller.addTrack(audioTrack);
|
||||
assert_throws_dom("InvalidStateError", () => audioSender.createEncodedStreams());
|
||||
}, 'RTCRtpSender.createEncodedStream() throws if not requested in PC configuration');
|
||||
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection();
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
|
||||
const audioTrack = stream.getAudioTracks()[0];
|
||||
t.add_cleanup(() => audioTrack.stop());
|
||||
|
||||
const audioSender = caller.addTrack(audioTrack);
|
||||
const ontrackPromise = new Promise(resolve => {
|
||||
callee.ontrack = t.step_func(() => {
|
||||
const audioReceiver = callee.getReceivers().find(r => r.track.kind === 'audio');
|
||||
assert_not_equals(audioReceiver, undefined);
|
||||
assert_throws_dom("InvalidStateError", () => audioReceiver.createEncodedStreams());
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
return ontrackPromise;
|
||||
}, 'RTCRtpReceiver.createEncodedStream() throws if not requested in PC configuration');
|
||||
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
|
||||
const track = stream.getTracks()[0];
|
||||
t.add_cleanup(() => track.stop());
|
||||
|
||||
const sender = caller.addTrack(track)
|
||||
const streams = sender.createEncodedStreams();
|
||||
const transformer = new TransformStream({
|
||||
transform(frame, controller) {
|
||||
// Inserting the same frame twice will result in failure since the frame
|
||||
// will be neutered after the first insertion is processed.
|
||||
controller.enqueue(frame);
|
||||
controller.enqueue(frame);
|
||||
}
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, 'OperationError',
|
||||
streams.readable.pipeThrough(transformer).pipeTo(streams.writable));
|
||||
}, 'Enqueuing the same frame twice fails');
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,83 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RTCPeerConnection Insertable Streams - Errors</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="./RTCPeerConnection-insertable-streams.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection();
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video:true});
|
||||
const videoTrack = stream.getVideoTracks()[0];
|
||||
t.add_cleanup(() => videoTrack.stop());
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
const videoSender = caller.addTrack(videoTrack);
|
||||
assert_throws_dom("InvalidStateError", () => videoSender.createEncodedStreams());
|
||||
}, 'RTCRtpSender.createEncodedStream() throws if not requested in PC configuration');
|
||||
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection();
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video:true});
|
||||
const videoTrack = stream.getVideoTracks()[0];
|
||||
t.add_cleanup(() => videoTrack.stop());
|
||||
|
||||
const videoSender = caller.addTrack(videoTrack);
|
||||
const ontrackPromise = new Promise(resolve => {
|
||||
callee.ontrack = t.step_func(() => {
|
||||
const videoReceiver = callee.getReceivers().find(r => r.track.kind === 'video');
|
||||
assert_not_equals(videoReceiver, undefined);
|
||||
assert_throws_dom("InvalidStateError", () => videoReceiver.createEncodedStreams());
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
return ontrackPromise;
|
||||
}, 'RTCRtpReceiver.createEncodedStream() throws if not requested in PC configuration');
|
||||
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video:true});
|
||||
const track = stream.getTracks()[0];
|
||||
t.add_cleanup(() => track.stop());
|
||||
|
||||
const sender = caller.addTrack(track)
|
||||
const streams = sender.createEncodedStreams();
|
||||
const transformer = new TransformStream({
|
||||
transform(frame, controller) {
|
||||
// Inserting the same frame twice will result in failure since the frame
|
||||
// will be neutered after the first insertion is processed.
|
||||
controller.enqueue(frame);
|
||||
controller.enqueue(frame);
|
||||
}
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, 'OperationError',
|
||||
streams.readable.pipeThrough(transformer).pipeTo(streams.writable));
|
||||
}, 'Enqueuing the same frame twice fails');
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,85 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>RTCPeerConnection Insertable Streams Simulcast</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="../webrtc/third_party/sdp/sdp.js"></script>
|
||||
<script src="../webrtc/simulcast/simulcast.js"></script>
|
||||
<script>
|
||||
// Test based on wpt/webrtc/simulcast/basic.https.html
|
||||
promise_test(async t => {
|
||||
const rids = [0, 1, 2];
|
||||
const pc1 = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => pc1.close());
|
||||
const pc2 = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => pc2.close());
|
||||
|
||||
exchangeIceCandidates(pc1, pc2);
|
||||
|
||||
const metadataToBeLoaded = [];
|
||||
let receiverSSRCs = []
|
||||
pc2.ontrack = t.step_func(e => {
|
||||
const receiverTransformer = new TransformStream({
|
||||
async transform(chunk, controller) {
|
||||
let ssrc = chunk.getMetadata().synchronizationSource;
|
||||
if (receiverSSRCs.indexOf(ssrc) == -1)
|
||||
receiverSSRCs.push(ssrc);
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
});
|
||||
const receiverStreams = e.receiver.createEncodedStreams();
|
||||
receiverStreams.readable
|
||||
.pipeThrough(receiverTransformer)
|
||||
.pipeTo(receiverStreams.writable);
|
||||
|
||||
const stream = e.streams[0];
|
||||
const v = document.createElement('video');
|
||||
v.autoplay = true;
|
||||
v.srcObject = stream;
|
||||
v.id = stream.id
|
||||
metadataToBeLoaded.push(new Promise((resolve) => {
|
||||
v.addEventListener('loadedmetadata', () => {
|
||||
resolve();
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video: {width: 1280, height: 720}});
|
||||
t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
|
||||
const transceiver = pc1.addTransceiver(stream.getVideoTracks()[0], {
|
||||
streams: [stream],
|
||||
sendEncodings: rids.map(rid => {rid}),
|
||||
});
|
||||
const senderStreams = transceiver.sender.createEncodedStreams();
|
||||
let senderSSRCs = [];
|
||||
const senderTransformer = new TransformStream({
|
||||
async transform(chunk, controller) {
|
||||
if (senderSSRCs.indexOf(chunk.getMetadata().synchronizationSource) == -1)
|
||||
senderSSRCs.push(chunk.getMetadata().synchronizationSource);
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
});
|
||||
senderStreams.readable
|
||||
.pipeThrough(senderTransformer)
|
||||
.pipeTo(senderStreams.writable);
|
||||
|
||||
const offer = await pc1.createOffer();
|
||||
await pc1.setLocalDescription(offer),
|
||||
await pc2.setRemoteDescription({
|
||||
type: 'offer',
|
||||
sdp: swapRidAndMidExtensionsInSimulcastOffer(offer, rids),
|
||||
});
|
||||
const answer = await pc2.createAnswer();
|
||||
await pc2.setLocalDescription(answer);
|
||||
await pc1.setRemoteDescription({
|
||||
type: 'answer',
|
||||
sdp: swapRidAndMidExtensionsInSimulcastAnswer(answer, pc1.localDescription, rids),
|
||||
});
|
||||
assert_equals(metadataToBeLoaded.length, 3);
|
||||
await Promise.all(metadataToBeLoaded);
|
||||
// Ensure that frames from the 3 simulcast layers are exposed.
|
||||
assert_equals(senderSSRCs.length, 3);
|
||||
assert_equals(receiverSSRCs.length, 3);
|
||||
}, 'Basic simulcast setup with three spatial layers');
|
||||
</script>
|
|
@ -0,0 +1,76 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RTCPeerConnection Insertable Streams - Video Frames</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="./RTCPeerConnection-insertable-streams.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video:true});
|
||||
const track = stream.getTracks()[0];
|
||||
t.add_cleanup(() => track.stop());
|
||||
|
||||
const sender = caller.addTrack(track)
|
||||
const senderStreams = sender.createEncodedStreams();
|
||||
const senderReader = senderStreams.readable.getReader();
|
||||
const senderWriter = senderStreams.writable.getWriter();
|
||||
const numFramesToSend = 20;
|
||||
|
||||
const ontrackPromise = new Promise((resolve, reject) => {
|
||||
callee.ontrack = async e => {
|
||||
const receiverStreams = e.receiver.createEncodedStreams();
|
||||
const receiverReader = receiverStreams.readable.getReader();
|
||||
|
||||
let numReceivedKeyFrames = 0;
|
||||
let numReceivedDeltaFrames = 0;
|
||||
for (let i = 0; i < numFramesToSend; i++) {
|
||||
const result = await receiverReader.read();
|
||||
if (result.value.type == 'key')
|
||||
numReceivedKeyFrames++;
|
||||
else if (result.value.type == 'delta')
|
||||
numReceivedDeltaFrames++;
|
||||
|
||||
if (numReceivedKeyFrames > 0 && numReceivedDeltaFrames > 0)
|
||||
resolve();
|
||||
else if (numReceivedKeyFrames + numReceivedDeltaFrames >= numFramesToSend)
|
||||
reject();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
let numSentKeyFrames = 0;
|
||||
let numSentDeltaFrames = 0;
|
||||
// Pass frames as they come from the encoder.
|
||||
for (let i = 0; i < numFramesToSend; i++) {
|
||||
const result = await senderReader.read();
|
||||
verifyNonstandardAdditionalDataIfPresent(result.value);
|
||||
if (result.value.type == 'key') {
|
||||
numSentKeyFrames++;
|
||||
} else {
|
||||
numSentDeltaFrames++;
|
||||
}
|
||||
|
||||
senderWriter.write(result.value);
|
||||
}
|
||||
|
||||
assert_greater_than(numSentKeyFrames, 0);
|
||||
assert_greater_than(numSentDeltaFrames, 0);
|
||||
|
||||
return ontrackPromise;
|
||||
}, 'Key and Delta frames are sent and received');
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,135 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RTCPeerConnection Insertable Streams - Video</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="./RTCPeerConnection-insertable-streams.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
async function testVideoFlow(t, negotiationFunction) {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video:true});
|
||||
const videoTrack = stream.getVideoTracks()[0];
|
||||
t.add_cleanup(() => videoTrack.stop());
|
||||
|
||||
const videoSender = caller.addTrack(videoTrack)
|
||||
const senderStreams = videoSender.createEncodedStreams();
|
||||
const senderReader = senderStreams.readable.getReader();
|
||||
const senderWriter = senderStreams.writable.getWriter();
|
||||
|
||||
const frameInfos = [];
|
||||
const numFramesPassthrough = 5;
|
||||
const numFramesReplaceData = 5;
|
||||
const numFramesModifyData = 5;
|
||||
const numFramesToSend = numFramesPassthrough + numFramesReplaceData + numFramesModifyData;
|
||||
|
||||
const ontrackPromise = new Promise(resolve => {
|
||||
callee.ontrack = t.step_func(() => {
|
||||
const videoReceiver = callee.getReceivers().find(r => r.track.kind === 'video');
|
||||
assert_not_equals(videoReceiver, undefined);
|
||||
|
||||
const receiverStreams =
|
||||
videoReceiver.createEncodedStreams();
|
||||
const receiverReader = receiverStreams.readable.getReader();
|
||||
const receiverWriter = receiverStreams.writable.getWriter();
|
||||
|
||||
const maxFramesToReceive = numFramesToSend;
|
||||
let numVerifiedFrames = 0;
|
||||
for (let i = 0; i < maxFramesToReceive; i++) {
|
||||
receiverReader.read().then(t.step_func(result => {
|
||||
verifyNonstandardAdditionalDataIfPresent(result.value);
|
||||
if (frameInfos[numVerifiedFrames] &&
|
||||
areFrameInfosEqual(result.value, frameInfos[numVerifiedFrames])) {
|
||||
numVerifiedFrames++;
|
||||
} else {
|
||||
// Receiving unexpected frames is an indication that
|
||||
// frames are not passed correctly between sender and receiver.
|
||||
assert_unreached("Incorrect frame received");
|
||||
}
|
||||
|
||||
if (numVerifiedFrames == numFramesToSend)
|
||||
resolve();
|
||||
}));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await negotiationFunction(caller, callee);
|
||||
|
||||
// Pass frames as they come from the encoder.
|
||||
for (let i = 0; i < numFramesPassthrough; i++) {
|
||||
const result = await senderReader.read();
|
||||
const metadata = result.value.getMetadata();
|
||||
assert_true(containsVideoMetadata(metadata));
|
||||
verifyNonstandardAdditionalDataIfPresent(result.value);
|
||||
frameInfos.push({
|
||||
timestamp: result.value.timestamp,
|
||||
type: result.value.type,
|
||||
data: result.value.data,
|
||||
metadata: metadata,
|
||||
getMetadata() { return this.metadata; }
|
||||
});
|
||||
senderWriter.write(result.value);
|
||||
}
|
||||
|
||||
// Replace frame data with arbitrary buffers.
|
||||
for (let i = 0; i < numFramesReplaceData; i++) {
|
||||
const result = await senderReader.read();
|
||||
const metadata = result.value.getMetadata();
|
||||
assert_true(containsVideoMetadata(metadata));
|
||||
const buffer = new ArrayBuffer(100);
|
||||
const int8View = new Int8Array(buffer);
|
||||
int8View.fill(i);
|
||||
|
||||
result.value.data = buffer;
|
||||
frameInfos.push({
|
||||
timestamp: result.value.timestamp,
|
||||
type: result.value.type,
|
||||
data: result.value.data,
|
||||
metadata: metadata,
|
||||
getMetadata() { return this.metadata; }
|
||||
});
|
||||
senderWriter.write(result.value);
|
||||
}
|
||||
|
||||
// Modify frame data.
|
||||
for (let i = 0; i < numFramesReplaceData; i++) {
|
||||
const result = await senderReader.read();
|
||||
const metadata = result.value.getMetadata();
|
||||
assert_true(containsVideoMetadata(metadata));
|
||||
const int8View = new Int8Array(result.value.data);
|
||||
int8View.fill(i);
|
||||
|
||||
frameInfos.push({
|
||||
timestamp: result.value.timestamp,
|
||||
type: result.value.type,
|
||||
data: result.value.data,
|
||||
metadata: metadata,
|
||||
getMetadata() { return this.metadata; }
|
||||
});
|
||||
senderWriter.write(result.value);
|
||||
}
|
||||
|
||||
return ontrackPromise;
|
||||
}
|
||||
|
||||
promise_test(async t => {
|
||||
return testVideoFlow(t, exchangeOfferAnswer);
|
||||
}, 'Frames flow correctly using insertable streams');
|
||||
|
||||
promise_test(async t => {
|
||||
return testVideoFlow(t, exchangeOfferAnswerReverse);
|
||||
}, 'Frames flow correctly using insertable streams when receiver starts negotiation');
|
||||
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,117 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RTCPeerConnection Insertable Streams - Worker</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
|
||||
<script src="./RTCPeerConnection-insertable-streams.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
|
||||
const track = stream.getTracks()[0];
|
||||
t.add_cleanup(() => track.stop());
|
||||
|
||||
const sender = caller.addTrack(track)
|
||||
const senderStreams = sender.createEncodedStreams();
|
||||
|
||||
const senderWorker = new Worker('RTCPeerConnection-sender-worker-single-frame.js')
|
||||
t.add_cleanup(() => senderWorker.terminate());
|
||||
senderWorker.postMessage(
|
||||
{readableStream: senderStreams.readable},
|
||||
[senderStreams.readable]);
|
||||
|
||||
let expectedFrameData = null;
|
||||
let verifiedFrameData = false;
|
||||
let numVerifiedFrames = 0;
|
||||
const onmessagePromise = new Promise(resolve => {
|
||||
senderWorker.onmessage = t.step_func(message => {
|
||||
if (!(message.data instanceof RTCEncodedAudioFrame)) {
|
||||
// This is the first message sent from the Worker to the test.
|
||||
// It contains an object (not an RTCEncodedAudioFrame) with the same
|
||||
// fields as the RTCEncodedAudioFrame to be sent in follow-up messages.
|
||||
// These serve as expected values to validate that the
|
||||
// RTCEncodedAudioFrame is sent correctly back to the test in the next
|
||||
// message.
|
||||
expectedFrameData = message.data;
|
||||
} else {
|
||||
// This is the frame sent by the Worker after reading it from the
|
||||
// readable stream. The Worker sends it twice after sending the
|
||||
// verification message.
|
||||
assert_equals(message.data.type, expectedFrameData.type);
|
||||
assert_equals(message.data.timestamp, expectedFrameData.timestamp);
|
||||
assert_true(areArrayBuffersEqual(message.data.data, expectedFrameData.data));
|
||||
if (++numVerifiedFrames == 2)
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
return onmessagePromise;
|
||||
}, 'RTCRtpSender readable stream transferred to a Worker and the Worker sends an RTCEncodedAudioFrame back');
|
||||
|
||||
promise_test(async t => {
|
||||
const caller = new RTCPeerConnection({encodedInsertableStreams:true});
|
||||
t.add_cleanup(() => caller.close());
|
||||
const callee = new RTCPeerConnection();
|
||||
t.add_cleanup(() => callee.close());
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video:true});
|
||||
const videoTrack = stream.getVideoTracks()[0];
|
||||
t.add_cleanup(() => videoTrack.stop());
|
||||
|
||||
const videoSender = caller.addTrack(videoTrack)
|
||||
const senderStreams = videoSender.createEncodedStreams();
|
||||
|
||||
const senderWorker = new Worker('RTCPeerConnection-sender-worker-single-frame.js')
|
||||
t.add_cleanup(() => senderWorker.terminate());
|
||||
senderWorker.postMessage(
|
||||
{readableStream: senderStreams.readable},
|
||||
[senderStreams.readable]);
|
||||
|
||||
let expectedFrameData = null;
|
||||
let verifiedFrameData = false;
|
||||
let numVerifiedFrames = 0;
|
||||
const onmessagePromise = new Promise(resolve => {
|
||||
senderWorker.onmessage = t.step_func(message => {
|
||||
if (!(message.data instanceof RTCEncodedVideoFrame)) {
|
||||
// This is the first message sent from the Worker to the test.
|
||||
// It contains an object (not an RTCEncodedVideoFrame) with the same
|
||||
// fields as the RTCEncodedVideoFrame to be sent in follow-up messages.
|
||||
// These serve as expected values to validate that the
|
||||
// RTCEncodedVideoFrame is sent correctly back to the test in the next
|
||||
// message.
|
||||
expectedFrameData = message.data;
|
||||
} else {
|
||||
// This is the frame sent by the Worker after reading it from the
|
||||
// readable stream. The Worker sends it twice after sending the
|
||||
// verification message.
|
||||
assert_equals(message.data.type, expectedFrameData.type);
|
||||
assert_equals(message.data.timestamp, expectedFrameData.timestamp);
|
||||
assert_true(areArrayBuffersEqual(message.data.data, expectedFrameData.data));
|
||||
assert_equals(message.data.getMetadata().synchronizationSource, expectedFrameData.metadata.synchronizationSource);
|
||||
if (++numVerifiedFrames == 2)
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
exchangeIceCandidates(caller, callee);
|
||||
await exchangeOfferAnswer(caller, callee);
|
||||
|
||||
return onmessagePromise;
|
||||
}, 'RTCRtpSender readable stream transferred to a Worker and the Worker sends an RTCEncodedVideoFrame back');
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,241 @@
|
|||
function areArrayBuffersEqual(buffer1, buffer2)
|
||||
{
|
||||
if (buffer1.byteLength !== buffer2.byteLength) {
|
||||
return false;
|
||||
}
|
||||
let array1 = new Int8Array(buffer1);
|
||||
var array2 = new Int8Array(buffer2);
|
||||
for (let i = 0 ; i < buffer1.byteLength ; ++i) {
|
||||
if (array1[i] !== array2[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function areArraysEqual(a1, a2) {
|
||||
if (a1 === a1)
|
||||
return true;
|
||||
if (a1.length != a2.length)
|
||||
return false;
|
||||
for (let i = 0; i < a1.length; i++) {
|
||||
if (a1[i] != a2[i])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function areMetadataEqual(metadata1, metadata2, type) {
|
||||
return metadata1.synchronizationSource === metadata2.synchronizationSource &&
|
||||
areArraysEqual(metadata1.contributingSources, metadata2.contributingSources) &&
|
||||
metadata1.frameId === metadata2.frameId &&
|
||||
areArraysEqual(metadata1.dependencies, metadata2.dependencies) &&
|
||||
metadata1.spatialIndex === metadata2.spatialIndex &&
|
||||
metadata1.temporalIndex === metadata2.temporalIndex &&
|
||||
// Width and height are reported only for key frames on the receiver side.
|
||||
type == "key"
|
||||
? metadata1.width === metadata2.width && metadata1.height === metadata2.height
|
||||
: true;
|
||||
}
|
||||
|
||||
function areFrameInfosEqual(frame1, frame2) {
|
||||
return frame1.timestamp === frame2.timestamp &&
|
||||
frame1.type === frame2.type &&
|
||||
areMetadataEqual(frame1.getMetadata(), frame2.getMetadata(), frame1.type) &&
|
||||
areArrayBuffersEqual(frame1.data, frame2.data);
|
||||
}
|
||||
|
||||
function containsVideoMetadata(metadata) {
|
||||
return metadata.synchronizationSource !== undefined &&
|
||||
metadata.width !== undefined &&
|
||||
metadata.height !== undefined &&
|
||||
metadata.spatialIndex !== undefined &&
|
||||
metadata.temporalIndex !== undefined &&
|
||||
metadata.dependencies !== undefined;
|
||||
}
|
||||
|
||||
function enableGFD(sdp) {
|
||||
const GFD_V00_EXTENSION =
|
||||
'http://www.webrtc.org/experiments/rtp-hdrext/generic-frame-descriptor-00';
|
||||
if (sdp.indexOf(GFD_V00_EXTENSION) !== -1)
|
||||
return sdp;
|
||||
|
||||
const extensionIds = sdp.trim().split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line.startsWith('a=extmap:'))
|
||||
.map(line => line.split(' ')[0].substr(9))
|
||||
.map(id => parseInt(id, 10))
|
||||
.sort((a, b) => a - b);
|
||||
for (let newId = 1; newId <= 14; newId++) {
|
||||
if (!extensionIds.includes(newId)) {
|
||||
return sdp += 'a=extmap:' + newId + ' ' + GFD_V00_EXTENSION + '\r\n';
|
||||
}
|
||||
}
|
||||
if (sdp.indexОf('a=extmap-allow-mixed') !== -1) { // Pick the next highest one.
|
||||
const newId = extensionIds[extensionIds.length - 1] + 1;
|
||||
return sdp += 'a=extmap:' + newId + ' ' + GFD_V00_EXTENSION + '\r\n';
|
||||
}
|
||||
throw 'Could not find free extension id to use for ' + GFD_V00_EXTENSION;
|
||||
}
|
||||
|
||||
async function exchangeOfferAnswer(pc1, pc2) {
|
||||
const offer = await pc1.createOffer();
|
||||
// Munge the SDP to enable the GFD extension in order to get correct metadata.
|
||||
const sdpGFD = enableGFD(offer.sdp);
|
||||
await pc1.setLocalDescription({type: offer.type, sdp: sdpGFD});
|
||||
// Munge the SDP to disable bandwidth probing via RTX.
|
||||
// TODO(crbug.com/1066819): remove this hack when we do not receive duplicates from RTX
|
||||
// anymore.
|
||||
const sdpRTX = sdpGFD.replace(new RegExp('rtx', 'g'), 'invalid');
|
||||
await pc2.setRemoteDescription({type: 'offer', sdp: sdpRTX});
|
||||
|
||||
const answer = await pc2.createAnswer();
|
||||
await pc2.setLocalDescription(answer);
|
||||
await pc1.setRemoteDescription(answer);
|
||||
}
|
||||
|
||||
async function exchangeOfferAnswerReverse(pc1, pc2) {
|
||||
const offer = await pc2.createOffer({offerToReceiveAudio: true, offerToReceiveVideo: true});
|
||||
// Munge the SDP to enable the GFD extension in order to get correct metadata.
|
||||
const sdpGFD = enableGFD(offer.sdp);
|
||||
// Munge the SDP to disable bandwidth probing via RTX.
|
||||
// TODO(crbug.com/1066819): remove this hack when we do not receive duplicates from RTX
|
||||
// anymore.
|
||||
const sdpRTX = sdpGFD.replace(new RegExp('rtx', 'g'), 'invalid');
|
||||
await pc1.setRemoteDescription({type: 'offer', sdp: sdpRTX});
|
||||
await pc2.setLocalDescription({type: 'offer', sdp: sdpGFD});
|
||||
|
||||
const answer = await pc1.createAnswer();
|
||||
await pc2.setRemoteDescription(answer);
|
||||
await pc1.setLocalDescription(answer);
|
||||
}
|
||||
|
||||
function createFrameDescriptor(videoFrame) {
|
||||
const kMaxSpatialLayers = 8;
|
||||
const kMaxTemporalLayers = 8;
|
||||
const kMaxNumFrameDependencies = 8;
|
||||
|
||||
const metadata = videoFrame.getMetadata();
|
||||
let frameDescriptor = {
|
||||
beginningOfSubFrame: true,
|
||||
endOfSubframe: false,
|
||||
frameId: metadata.frameId & 0xFFFF,
|
||||
spatialLayers: 1 << metadata.spatialIndex,
|
||||
temporalLayer: metadata.temporalLayer,
|
||||
frameDependenciesDiffs: [],
|
||||
width: 0,
|
||||
height: 0
|
||||
};
|
||||
|
||||
for (const dependency of metadata.dependencies) {
|
||||
frameDescriptor.frameDependenciesDiffs.push(metadata.frameId - dependency);
|
||||
}
|
||||
if (metadata.dependencies.length == 0) {
|
||||
frameDescriptor.width = metadata.width;
|
||||
frameDescriptor.height = metadata.height;
|
||||
}
|
||||
return frameDescriptor;
|
||||
}
|
||||
|
||||
function additionalDataSize(descriptor) {
|
||||
if (!descriptor.beginningOfSubFrame) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
let size = 4;
|
||||
for (const fdiff of descriptor.frameDependenciesDiffs) {
|
||||
size += (fdiff >= (1 << 6)) ? 2 : 1;
|
||||
}
|
||||
if (descriptor.beginningOfSubFrame &&
|
||||
descriptor.frameDependenciesDiffs.length == 0 &&
|
||||
descriptor.width > 0 &&
|
||||
descriptor.height > 0) {
|
||||
size += 4;
|
||||
}
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
// Compute the buffer reported in the additionalData field using the metadata
|
||||
// provided by a video frame.
|
||||
// Based on the webrtc::RtpDescriptorAuthentication() C++ function at
|
||||
// https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc
|
||||
function computeAdditionalData(videoFrame) {
|
||||
const kMaxSpatialLayers = 8;
|
||||
const kMaxTemporalLayers = 8;
|
||||
const kMaxNumFrameDependencies = 8;
|
||||
|
||||
const metadata = videoFrame.getMetadata();
|
||||
if (metadata.spatialIndex < 0 ||
|
||||
metadata.temporalIndex < 0 ||
|
||||
metadata.spatialIndex >= kMaxSpatialLayers ||
|
||||
metadata.temporalIndex >= kMaxTemporalLayers ||
|
||||
metadata.dependencies.length > kMaxNumFrameDependencies) {
|
||||
return new ArrayBuffer(0);
|
||||
}
|
||||
|
||||
const descriptor = createFrameDescriptor(videoFrame);
|
||||
const size = additionalDataSize(descriptor);
|
||||
const additionalData = new ArrayBuffer(size);
|
||||
const data = new Uint8Array(additionalData);
|
||||
|
||||
const kFlagBeginOfSubframe = 0x80;
|
||||
const kFlagEndOfSubframe = 0x40;
|
||||
const kFlagFirstSubframeV00 = 0x20;
|
||||
const kFlagLastSubframeV00 = 0x10;
|
||||
|
||||
const kFlagDependencies = 0x08;
|
||||
const kFlagMoreDependencies = 0x01;
|
||||
const kFlageXtendedOffset = 0x02;
|
||||
|
||||
let baseHeader =
|
||||
(descriptor.beginningOfSubFrame ? kFlagBeginOfSubframe : 0) |
|
||||
(descriptor.endOfSubFrame ? kFlagEndOfSubframe : 0);
|
||||
baseHeader |= kFlagFirstSubframeV00;
|
||||
baseHeader |= kFlagLastSubframeV00;
|
||||
|
||||
if (!descriptor.beginningOfSubFrame) {
|
||||
data[0] = baseHeader;
|
||||
return additionalData;
|
||||
}
|
||||
|
||||
data[0] =
|
||||
baseHeader |
|
||||
(descriptor.frameDependenciesDiffs.length == 0 ? 0 : kFlagDependencies) |
|
||||
descriptor.temporalLayer;
|
||||
data[1] = descriptor.spatialLayers;
|
||||
data[2] = descriptor.frameId & 0xFF;
|
||||
data[3] = descriptor.frameId >> 8;
|
||||
|
||||
const fdiffs = descriptor.frameDependenciesDiffs;
|
||||
let offset = 4;
|
||||
if (descriptor.beginningOfSubFrame &&
|
||||
fdiffs.length == 0 &&
|
||||
descriptor.width > 0 &&
|
||||
descriptor.height > 0) {
|
||||
data[offset++] = (descriptor.width >> 8);
|
||||
data[offset++] = (descriptor.width & 0xFF);
|
||||
data[offset++] = (descriptor.height >> 8);
|
||||
data[offset++] = (descriptor.height & 0xFF);
|
||||
}
|
||||
for (let i = 0; i < fdiffs.length; i++) {
|
||||
const extended = fdiffs[i] >= (1 << 6);
|
||||
const more = i < fdiffs.length - 1;
|
||||
data[offset++] = ((fdiffs[i] & 0x3f) << 2) |
|
||||
(extended ? kFlageXtendedOffset : 0) |
|
||||
(more ? kFlagMoreDependencies : 0);
|
||||
if (extended) {
|
||||
data[offset++] = fdiffs[i] >> 6;
|
||||
}
|
||||
}
|
||||
return additionalData;
|
||||
}
|
||||
|
||||
function verifyNonstandardAdditionalDataIfPresent(videoFrame) {
|
||||
if (videoFrame.additionalData === undefined)
|
||||
return;
|
||||
|
||||
const computedData = computeAdditionalData(videoFrame);
|
||||
assert_true(areArrayBuffersEqual(videoFrame.additionalData, computedData));
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
onmessage = async (event) => {
|
||||
const readableStream = event.data.readableStream;
|
||||
const reader = readableStream.getReader();
|
||||
const result = await reader.read();
|
||||
|
||||
// Post an object with individual fields so that the test side has
|
||||
// values to verify the serialization of the RTCEncodedVideoFrame.
|
||||
postMessage({
|
||||
type: result.value.type,
|
||||
timestamp: result.value.timestamp,
|
||||
data: result.value.data,
|
||||
metadata: result.value.getMetadata(),
|
||||
});
|
||||
|
||||
// Send the frame twice to verify that the frame does not change after the
|
||||
// first serialization.
|
||||
postMessage(result.value);
|
||||
postMessage(result.value);
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
// META: script=/resources/WebIDLParser.js
|
||||
// META: script=/resources/idlharness.js
|
||||
// META: script=./RTCPeerConnection-helper.js
|
||||
|
||||
'use strict';
|
||||
|
||||
idl_test(
|
||||
['webrtc-encoded-transform'],
|
||||
['webrtc', 'streams', 'html', 'dom'],
|
||||
async idlArray => {
|
||||
idlArray.add_objects({
|
||||
// TODO: RTCEncodedVideoFrame
|
||||
// TODO: RTCEncodedAudioFrame
|
||||
RTCRtpSender: [`new RTCPeerConnection().addTransceiver('audio').sender`],
|
||||
RTCRtpReceiver: [`new RTCPeerConnection().addTransceiver('audio').receiver`],
|
||||
});
|
||||
}
|
||||
);
|
|
@ -0,0 +1,2 @@
|
|||
<!DOCTYPE html>
|
||||
<title>Empty doc</title>
|
|
@ -0,0 +1,30 @@
|
|||
// Based on similar tests in html/infrastructure/safe-passing-of-structured-data/shared-array-buffers/.
|
||||
"use strict";
|
||||
self.importScripts("/resources/testharness.js");
|
||||
|
||||
let state = "start in worker";
|
||||
|
||||
self.onmessage = e => {
|
||||
if (e.data === "start in window") {
|
||||
assert_equals(state, "start in worker");
|
||||
e.source.postMessage(state);
|
||||
state = "we are expecting a messageerror due to the window sending us an RTCEncodedVideoFrame or RTCEncodedAudioFrame";
|
||||
} else {
|
||||
e.source.postMessage(`worker onmessage was reached when in state "${state}" and data ${e.data}`);
|
||||
}
|
||||
};
|
||||
|
||||
self.onmessageerror = e => {
|
||||
if (state === "we are expecting a messageerror due to the window sending us an RTCEncodedVideoFrame or RTCEncodedAudioFrame") {
|
||||
assert_equals(e.constructor.name, "ExtendableMessageEvent", "type");
|
||||
assert_equals(e.data, null, "data");
|
||||
assert_equals(e.origin, self.origin, "origin");
|
||||
assert_not_equals(e.source, null, "source");
|
||||
assert_equals(e.ports.length, 0, "ports length");
|
||||
|
||||
state = "onmessageerror was received in worker";
|
||||
e.source.postMessage(state);
|
||||
} else {
|
||||
e.source.postMessage(`worker onmessageerror was reached when in state "${state}" and data ${e.data}`);
|
||||
}
|
||||
};
|
|
@ -0,0 +1,32 @@
|
|||
async function createConnections(test, setupLocalConnection, setupRemoteConnection, doNotCloseAutmoatically) {
|
||||
const localConnection = new RTCPeerConnection();
|
||||
const remoteConnection = new RTCPeerConnection();
|
||||
|
||||
remoteConnection.onicecandidate = (event) => { localConnection.addIceCandidate(event.candidate); };
|
||||
localConnection.onicecandidate = (event) => { remoteConnection.addIceCandidate(event.candidate); };
|
||||
|
||||
await setupLocalConnection(localConnection);
|
||||
await setupRemoteConnection(remoteConnection);
|
||||
|
||||
const offer = await localConnection.createOffer();
|
||||
await localConnection.setLocalDescription(offer);
|
||||
await remoteConnection.setRemoteDescription(offer);
|
||||
|
||||
const answer = await remoteConnection.createAnswer();
|
||||
await remoteConnection.setLocalDescription(answer);
|
||||
await localConnection.setRemoteDescription(answer);
|
||||
|
||||
if (!doNotCloseAutmoatically) {
|
||||
test.add_cleanup(() => {
|
||||
localConnection.close();
|
||||
remoteConnection.close();
|
||||
});
|
||||
}
|
||||
|
||||
return [localConnection, remoteConnection];
|
||||
}
|
||||
|
||||
function waitFor(test, duration)
|
||||
{
|
||||
return new Promise((resolve) => test.step_timeout(resolve, duration));
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
class MockRTCRtpTransformer {
|
||||
constructor(transformer) {
|
||||
this.context = transformer;
|
||||
this.start();
|
||||
}
|
||||
start()
|
||||
{
|
||||
this.reader = this.context.readable.getReader();
|
||||
this.writer = this.context.writable.getWriter();
|
||||
this.process();
|
||||
this.context.options.port.postMessage("started " + this.context.options.mediaType + " " + this.context.options.side);
|
||||
}
|
||||
|
||||
process()
|
||||
{
|
||||
this.reader.read().then(chunk => {
|
||||
if (chunk.done)
|
||||
return;
|
||||
|
||||
this.writer.write(chunk.value);
|
||||
this.process();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
onrtctransform = (event) => {
|
||||
new MockRTCRtpTransformer(event.transformer);
|
||||
};
|
||||
|
||||
self.postMessage("registered");
|
|
@ -0,0 +1,65 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="video" autoplay playsInline></video>
|
||||
<script src="routines.js"></script>
|
||||
<script>
|
||||
function waitForMessage(test, port, data)
|
||||
{
|
||||
let gotMessage;
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
gotMessage = resolve;
|
||||
test.step_timeout(() => { reject("did not get " + data) }, 5000);
|
||||
});
|
||||
port.onmessage = event => {
|
||||
if (event.data === data)
|
||||
gotMessage();
|
||||
};
|
||||
return promise;
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker("script-audio-transform-worker.js");
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({audio: true});
|
||||
|
||||
const senderChannel = new MessageChannel;
|
||||
const receiverChannel = new MessageChannel;
|
||||
const senderTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', mediaType:'audio', side:'sender', port:senderChannel.port2}, [senderChannel.port2]);
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', mediaType:'audio', side:'receiver', port:receiverChannel.port2}, [receiverChannel.port2]);
|
||||
senderTransform.port = senderChannel.port1;
|
||||
receiverTransform.port = receiverChannel.port1;
|
||||
|
||||
promise1 = waitForMessage(test, senderTransform.port, "started audio sender");
|
||||
promise2 = waitForMessage(test, receiverTransform.port, "started audio receiver");
|
||||
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
sender = firstConnection.addTrack(localStream.getAudioTracks()[0], localStream);
|
||||
sender.transform = senderTransform;
|
||||
}, (secondConnection) => {
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = receiverTransform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
await promise1;
|
||||
await promise2;
|
||||
|
||||
video.srcObject = stream;
|
||||
return video.play();
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,39 @@
|
|||
function appendToBuffer(buffer, value) {
|
||||
const result = new ArrayBuffer(buffer.byteLength + 1);
|
||||
const byteResult = new Uint8Array(result);
|
||||
byteResult.set(new Uint8Array(buffer), 0);
|
||||
byteResult[buffer.byteLength] = value;
|
||||
return result;
|
||||
}
|
||||
|
||||
onrtctransform = (event) => {
|
||||
const transformer = event.transformer;
|
||||
|
||||
transformer.reader = transformer.readable.getReader();
|
||||
transformer.writer = transformer.writable.getWriter();
|
||||
|
||||
function process(transformer)
|
||||
{
|
||||
transformer.reader.read().then(chunk => {
|
||||
if (chunk.done)
|
||||
return;
|
||||
if (transformer.options.name === 'sender1')
|
||||
chunk.value.data = appendToBuffer(chunk.value.data, 1);
|
||||
else if (transformer.options.name === 'sender2')
|
||||
chunk.value.data = appendToBuffer(chunk.value.data, 2);
|
||||
else {
|
||||
const value = new Uint8Array(chunk.value.data)[chunk.value.data.byteLength - 1];
|
||||
if (value !== 1 && value !== 2)
|
||||
self.postMessage("unexpected value: " + value);
|
||||
else if (value === 2)
|
||||
self.postMessage("got value 2");
|
||||
chunk.value.data = chunk.value.data.slice(0, chunk.value.data.byteLength - 1);
|
||||
}
|
||||
transformer.writer.write(chunk.value);
|
||||
process(transformer);
|
||||
});
|
||||
}
|
||||
|
||||
process(transformer);
|
||||
};
|
||||
self.postMessage("registered");
|
|
@ -0,0 +1,57 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="video1" autoplay controls playsinline></video>
|
||||
<script src ="routines.js"></script>
|
||||
<script>
|
||||
async function waitForMessage(worker, data)
|
||||
{
|
||||
while (true) {
|
||||
const received = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
if (data === received)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker('script-change-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({video: true});
|
||||
|
||||
let sender, receiver;
|
||||
const senderTransform1 = new RTCRtpScriptTransform(worker, {name:'sender1'});
|
||||
const senderTransform2 = new RTCRtpScriptTransform(worker, {name:'sender2'});
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, {name:'receiver'});
|
||||
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
sender = firstConnection.addTrack(localStream.getVideoTracks()[0], localStream);
|
||||
firstConnection.getTransceivers()[0].setCodecPreferences([{mimeType: "video/VP8", clockRate: 90000}]);
|
||||
sender.transform = senderTransform1;
|
||||
}, (secondConnection) => {
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = receiverTransform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
video1.srcObject = stream;
|
||||
await video1.play();
|
||||
|
||||
const updatePromise = new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
sender.transform = senderTransform2;
|
||||
assert_equals(await updatePromise, "got value 2");
|
||||
}, "change sender transform");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,90 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video controls id="video" autoplay></video>
|
||||
<canvas id="canvas" width="640" height="480"></canvas>
|
||||
<script src ="routines.js"></script>
|
||||
<script>
|
||||
function grabFrameData(x, y, w, h)
|
||||
{
|
||||
canvas.width = video.videoWidth;
|
||||
canvas.height = video.videoHeight;
|
||||
|
||||
canvas.getContext('2d').drawImage(video, x, y, w, h, x, y, w, h);
|
||||
return canvas.getContext('2d').getImageData(x, y, w, h).data;
|
||||
}
|
||||
|
||||
function getCircleImageData()
|
||||
{
|
||||
return grabFrameData(450, 100, 150, 100);
|
||||
}
|
||||
|
||||
async function checkVideoIsUpdated(test, shouldBeUpdated, count, referenceData)
|
||||
{
|
||||
if (count === undefined)
|
||||
count = 0;
|
||||
else if (count >= 20)
|
||||
return Promise.reject("checkVideoIsUpdated timed out :" + shouldBeUpdated + " " + count);
|
||||
|
||||
if (referenceData === undefined)
|
||||
referenceData = getCircleImageData();
|
||||
|
||||
await waitFor(test, 200);
|
||||
const newData = getCircleImageData();
|
||||
|
||||
if (shouldBeUpdated === (JSON.stringify(referenceData) !== JSON.stringify(newData)))
|
||||
return;
|
||||
|
||||
await checkVideoIsUpdated(test, shouldBeUpdated, ++count, newData);
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({video: true});
|
||||
const senderTransform = new SFrameTransform({ compatibilityMode: "H264" });
|
||||
const receiverTransform = new SFrameTransform({ compatibilityMode: "H264" });
|
||||
await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]).then(key => {
|
||||
senderTransform.setEncryptionKey(key);
|
||||
receiverTransform.setEncryptionKey(key);
|
||||
});
|
||||
|
||||
let sender, receiver;
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
pc1 = firstConnection;
|
||||
sender = firstConnection.addTrack(localStream.getVideoTracks()[0], localStream);
|
||||
sender.transform = senderTransform;
|
||||
}, (secondConnection) => {
|
||||
pc2 = secondConnection;
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
// we do not set the receiver transform here;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
}, {
|
||||
observeOffer : (offer) => {
|
||||
const lines = offer.sdp.split('\r\n');
|
||||
const h264Lines = lines.filter(line => line.indexOf("a=fmtp") === 0 && line.indexOf("42e01f") !== -1);
|
||||
const baselineNumber = h264Lines[0].substring(6).split(' ')[0];
|
||||
offer.sdp = lines.filter(line => {
|
||||
return (line.indexOf('a=fmtp') === -1 && line.indexOf('a=rtcp-fb') === -1 && line.indexOf('a=rtpmap') === -1) || line.indexOf(baselineNumber) !== -1;
|
||||
}).join('\r\n');
|
||||
}
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
video.srcObject = stream;
|
||||
video.play();
|
||||
|
||||
// We set the receiver transform here so that the decoder probably tried to decode sframe content.
|
||||
test.step_timeout(() => receiver.transform = receiverTransform, 50);
|
||||
await checkVideoIsUpdated(test, true);
|
||||
}, "video exchange with late receiver transform");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,24 @@
|
|||
onrtctransform = (event) => {
|
||||
const transformer = event.transformer;
|
||||
|
||||
transformer.reader = transformer.readable.getReader();
|
||||
transformer.writer = transformer.writable.getWriter();
|
||||
|
||||
let isFirstFrame = true;
|
||||
function process(transformer)
|
||||
{
|
||||
transformer.reader.read().then(chunk => {
|
||||
if (chunk.done)
|
||||
return;
|
||||
|
||||
if (isFirstFrame) {
|
||||
isFirstFrame = false;
|
||||
self.postMessage({ name: transformer.options.name, timestamp: chunk.value.timestamp, metadata: chunk.value.getMetadata() });
|
||||
}
|
||||
transformer.writer.write(chunk.value);
|
||||
process(transformer);
|
||||
});
|
||||
}
|
||||
process(transformer);
|
||||
};
|
||||
self.postMessage("registered");
|
|
@ -0,0 +1,85 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="video1" autoplay></video>
|
||||
<script src ="routines.js"></script>
|
||||
<script>
|
||||
async function waitForMessage(worker, data)
|
||||
{
|
||||
while (true) {
|
||||
const received = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
if (data === received)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
async function gatherMetadata(test, audio)
|
||||
{
|
||||
worker = new Worker('script-metadata-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({audio: audio, video: !audio});
|
||||
|
||||
let sender, receiver;
|
||||
const senderTransform = new RTCRtpScriptTransform(worker, {name:'sender'});
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, {name:'receiver'});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
pc1 = firstConnection;
|
||||
sender = firstConnection.addTrack(localStream.getTracks()[0], localStream);
|
||||
sender.transform = senderTransform;
|
||||
}, (secondConnection) => {
|
||||
pc2 = secondConnection;
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = receiverTransform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let senderMetadata, senderTimestamp;
|
||||
worker.onmessage = (event) => {
|
||||
if (event.data.name === 'sender') {
|
||||
senderMetadata = event.data.metadata;
|
||||
senderTimestamp = event.data.timestamp;
|
||||
} else if (event.data.name === 'receiver')
|
||||
resolve([senderMetadata, senderTimestamp, event.data.metadata, event.data.timestamp]);
|
||||
};
|
||||
test.step_timeout(() => reject("Metadata test timed out"), 5000);
|
||||
});
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
const [senderMetadata, senderTimestamp, receiverMetadata, receiverTimestamp] = await gatherMetadata(test, true);
|
||||
|
||||
assert_equals(senderTimestamp, receiverTimestamp, "timestamp");
|
||||
assert_true(!!senderMetadata.synchronizationSource, "ssrc");
|
||||
assert_equals(senderMetadata.synchronizationSource, receiverMetadata.synchronizationSource, "ssrc");
|
||||
assert_array_equals(senderMetadata.contributingSources, receiverMetadata.contributingSources, "csrc");
|
||||
}, "audio exchange with transform");
|
||||
|
||||
promise_test(async (test) => {
|
||||
const [senderMetadata, senderTimestamp, receiverMetadata, receiverTimestamp] = await gatherMetadata(test, true);
|
||||
|
||||
assert_equals(senderTimestamp, receiverTimestamp, "timestamp");
|
||||
assert_true(!!senderMetadata.synchronizationSource, "ssrc");
|
||||
assert_equals(senderMetadata.synchronizationSource, receiverMetadata.synchronizationSource, "ssrc");
|
||||
assert_array_equals(senderMetadata.contributingSources, receiverMetadata.contributingSources, "csrc");
|
||||
assert_equals(senderMetadata.height, receiverMetadata.height, "height");
|
||||
assert_equals(senderMetadata.width, receiverMetadata.width, "width");
|
||||
assert_equals(senderMetadata.spatialIndex, receiverMetadata.spatialIndex, "spatialIndex");
|
||||
assert_equals(senderMetadata.temporalIndex, receiverMetadata.temporalIndex, "temporalIndex");
|
||||
}, "video exchange with transform");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,25 @@
|
|||
onrtctransform = (event) => {
|
||||
const transformer = event.transformer;
|
||||
transformer.options.port.onmessage = (event) => transformer.options.port.postMessage(event.data);
|
||||
|
||||
self.postMessage("started");
|
||||
transformer.reader = transformer.readable.getReader();
|
||||
transformer.writer = transformer.writable.getWriter();
|
||||
|
||||
function process(transformer)
|
||||
{
|
||||
transformer.reader.read().then(chunk => {
|
||||
if (chunk.done)
|
||||
return;
|
||||
if (chunk.value instanceof RTCEncodedVideoFrame)
|
||||
self.postMessage("video chunk");
|
||||
else if (chunk.value instanceof RTCEncodedAudioFrame)
|
||||
self.postMessage("audio chunk");
|
||||
transformer.writer.write(chunk.value);
|
||||
process(transformer);
|
||||
});
|
||||
}
|
||||
|
||||
process(transformer);
|
||||
};
|
||||
self.postMessage("registered");
|
|
@ -0,0 +1,148 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="video1" autoplay></video>
|
||||
<video id="video2" autoplay></video>
|
||||
<script src ="routines.js"></script>
|
||||
<script>
|
||||
async function waitForMessage(worker, data)
|
||||
{
|
||||
while (true) {
|
||||
const received = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
if (data === received)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker('script-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
|
||||
const channel = new MessageChannel;
|
||||
const transform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', port: channel.port2}, [channel.port2]);
|
||||
transform.port = channel.port1;
|
||||
const promise = new Promise(resolve => transform.port.onmessage = (event) => resolve(event.data));
|
||||
transform.port.postMessage("test");
|
||||
assert_equals(await promise, "test");
|
||||
}, "transform messaging");
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker('script-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
|
||||
const pc = new RTCPeerConnection();
|
||||
|
||||
const senderChannel = new MessageChannel;
|
||||
const receiverChannel = new MessageChannel;
|
||||
const senderTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', port: senderChannel.port2}, [senderChannel.port2]);
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', port: receiverChannel.port2}, [receiverChannel.port2]);
|
||||
senderTransform.port = senderChannel.port1;
|
||||
receiverTransform.port = receiverChannel.port1;
|
||||
|
||||
const sender1 = pc.addTransceiver('audio').sender;
|
||||
const sender2 = pc.addTransceiver('video').sender;
|
||||
const receiver1 = pc.getReceivers()[0];
|
||||
const receiver2 = pc.getReceivers()[1];
|
||||
|
||||
sender1.transform = senderTransform;
|
||||
receiver1.transform = receiverTransform;
|
||||
assert_throws_dom("InvalidStateError", () => sender2.transform = senderTransform);
|
||||
assert_throws_dom("InvalidStateError", () => receiver2.transform = receiverTransform);
|
||||
|
||||
sender1.transform = senderTransform;
|
||||
receiver1.transform = receiverTransform;
|
||||
|
||||
sender1.transform = null;
|
||||
receiver1.transform = null;
|
||||
}, "Cannot reuse attached transforms");
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker('script-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({audio: true});
|
||||
|
||||
const senderChannel = new MessageChannel;
|
||||
const receiverChannel = new MessageChannel;
|
||||
let sender, receiver;
|
||||
const senderTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', port: senderChannel.port2}, [senderChannel.port2]);
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', port: receiverChannel.port2}, [receiverChannel.port2]);
|
||||
senderTransform.port = senderChannel.port1;
|
||||
receiverTransform.port = receiverChannel.port1;
|
||||
|
||||
const startedPromise = new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
pc1 = firstConnection;
|
||||
sender = firstConnection.addTrack(localStream.getAudioTracks()[0], localStream);
|
||||
sender.transform = senderTransform;
|
||||
}, (secondConnection) => {
|
||||
pc2 = secondConnection;
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = receiverTransform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
assert_equals(await startedPromise, "started");
|
||||
|
||||
await waitForMessage(worker, "audio chunk");
|
||||
|
||||
video1.srcObject = stream;
|
||||
await video1.play();
|
||||
}, "audio exchange with transform");
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker('script-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({video: true});
|
||||
|
||||
const senderChannel = new MessageChannel;
|
||||
const receiverChannel = new MessageChannel;
|
||||
let sender, receiver;
|
||||
const senderTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', port: senderChannel.port2}, [senderChannel.port2]);
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', port: receiverChannel.port2}, [receiverChannel.port2]);
|
||||
senderTransform.port = senderChannel.port1;
|
||||
receiverTransform.port = receiverChannel.port1;
|
||||
|
||||
const startedPromise = new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
pc1 = firstConnection;
|
||||
sender = firstConnection.addTrack(localStream.getVideoTracks()[0], localStream);
|
||||
sender.transform = senderTransform;
|
||||
}, (secondConnection) => {
|
||||
pc2 = secondConnection;
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = receiverTransform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
assert_equals(await startedPromise, "started");
|
||||
|
||||
await waitForMessage(worker, "video chunk");
|
||||
|
||||
video1.srcObject = stream;
|
||||
await video1.play();
|
||||
}, "video exchange with transform");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,22 @@
|
|||
onrtctransform = (event) => {
|
||||
const transformer = event.transformer;
|
||||
|
||||
self.postMessage("started");
|
||||
|
||||
transformer.reader = transformer.readable.getReader();
|
||||
transformer.writer = transformer.writable.getWriter();
|
||||
function process(transformer)
|
||||
{
|
||||
transformer.reader.read().then(chunk => {
|
||||
if (chunk.done)
|
||||
return;
|
||||
|
||||
transformer.writer.write(chunk.value);
|
||||
transformer.writer.write(chunk.value);
|
||||
process(transformer);
|
||||
});
|
||||
}
|
||||
|
||||
process(transformer);
|
||||
};
|
||||
self.postMessage("registered");
|
|
@ -0,0 +1,58 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="video1" autoplay></video>
|
||||
<video id="video2" autoplay></video>
|
||||
<script src ="routines.js"></script>
|
||||
<script>
|
||||
async function waitForMessage(worker, data)
|
||||
{
|
||||
while (true) {
|
||||
const received = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
if (data === received)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker('script-write-twice-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({video: true});
|
||||
|
||||
let sender, receiver;
|
||||
const senderTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', side:'sender', role:'encrypt'});
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, {name:'MockRTCRtpTransform', side:'receiver', role:'decrypt'});
|
||||
|
||||
const startedPromise = new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
pc1 = firstConnection;
|
||||
sender = firstConnection.addTrack(localStream.getVideoTracks()[0], localStream);
|
||||
sender.transform = senderTransform;
|
||||
}, (secondConnection) => {
|
||||
pc2 = secondConnection;
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = receiverTransform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
assert_equals(await startedPromise, "started");
|
||||
|
||||
video1.srcObject = stream;
|
||||
await video1.play();
|
||||
}, "video exchange with write twice transform");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,65 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="audio" autoplay playsInline></video>
|
||||
<script src ="routines.js"></script>
|
||||
<script>
|
||||
let sender, receiver;
|
||||
let key1, key2, key3, key4;
|
||||
|
||||
promise_test(async (test) => {
|
||||
const key = await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
const transform = new SFrameTransform;
|
||||
|
||||
await transform.setEncryptionKey(key);
|
||||
await transform.setEncryptionKey(key, 1);
|
||||
|
||||
await transform.setEncryptionKey(key, BigInt('18446744073709551613'));
|
||||
await transform.setEncryptionKey(key, BigInt('18446744073709551614'));
|
||||
await transform.setEncryptionKey(key, BigInt('18446744073709551615'));
|
||||
await transform.setEncryptionKey(key, BigInt('18446744073709551616')).then(assert_unreached, (e) => {
|
||||
assert_true(e instanceof RangeError);
|
||||
assert_equals(e.message, "Not a 64 bits integer");
|
||||
});
|
||||
}, "Passing various key IDs");
|
||||
|
||||
promise_test(async (test) => {
|
||||
key1 = await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
key2 = await crypto.subtle.importKey("raw", new Uint8Array([144, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
key3 = await crypto.subtle.importKey("raw", new Uint8Array([145, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
key4 = await crypto.subtle.importKey("raw", new Uint8Array([146, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({audio: true});
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
const connections = createConnections(test, (firstConnection) => {
|
||||
sender = firstConnection.addTrack(localStream.getAudioTracks()[0], localStream);
|
||||
let transform = new SFrameTransform;
|
||||
transform.setEncryptionKey(key1);
|
||||
sender.transform = transform;
|
||||
}, (secondConnection) => {
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
let transform = new SFrameTransform;
|
||||
transform.setEncryptionKey(key1);
|
||||
transform.setEncryptionKey(key2);
|
||||
transform.setEncryptionKey(key3, 1000);
|
||||
transform.setEncryptionKey(key4, BigInt('18446744073709551615'));
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = transform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
audio.srcObject = stream;
|
||||
await audio.play();
|
||||
}, "Audio exchange with SFrame setup");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,50 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
|
||||
async function getEncryptedData(transform)
|
||||
{
|
||||
const chunk = await transform.readable.getReader().read();
|
||||
const value = new Uint8Array(chunk.value);
|
||||
return [...value];
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
const key = await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
const transform1 = new SFrameTransform;
|
||||
const transform2 = new SFrameTransform;
|
||||
const transform3 = new SFrameTransform;
|
||||
|
||||
await transform1.setEncryptionKey(key);
|
||||
await transform2.setEncryptionKey(key);
|
||||
await transform3.setEncryptionKey(key);
|
||||
|
||||
const buffer1 = new ArrayBuffer(10);
|
||||
const buffer2 = new ArrayBuffer(11);
|
||||
const view1 = new Uint8Array(buffer1);
|
||||
const view2 = new Uint8Array(buffer2, 1);
|
||||
for (let i = 0 ; i < buffer1.byteLength; ++i) {
|
||||
view1[i] = i;
|
||||
view2[i] = i;
|
||||
}
|
||||
|
||||
transform1.writable.getWriter().write(buffer1);
|
||||
transform2.writable.getWriter().write(view1);
|
||||
transform3.writable.getWriter().write(view2);
|
||||
|
||||
const result1 = await getEncryptedData(transform1);
|
||||
const result2 = await getEncryptedData(transform2);
|
||||
const result3 = await getEncryptedData(transform3);
|
||||
|
||||
assert_array_equals(result1, result2, "result2");
|
||||
assert_array_equals(result1, result3, "result3");
|
||||
}, "Uint8Array as input to SFrameTransform");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,57 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<video id="video1" controls autoplay></video>
|
||||
<script src ="routines.js"></script>
|
||||
<script>
|
||||
async function waitForMessage(worker, data)
|
||||
{
|
||||
while (true) {
|
||||
const received = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
if (data === received)
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
promise_test(async (test) => {
|
||||
worker = new Worker('sframe-transform-worker.js');
|
||||
const data = await new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
assert_equals(data, "registered");
|
||||
const localStream = await navigator.mediaDevices.getUserMedia({ video: true });
|
||||
|
||||
let sender, receiver;
|
||||
const senderTransform = new SFrameTransform({ compatibilityMode: "H264" });
|
||||
const receiverTransform = new RTCRtpScriptTransform(worker, "SFrameRTCRtpTransform");
|
||||
|
||||
const key = await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
senderTransform.setEncryptionKey(key);
|
||||
|
||||
const startedPromise = new Promise(resolve => worker.onmessage = (event) => resolve(event.data));
|
||||
|
||||
const stream = await new Promise((resolve, reject) => {
|
||||
createConnections(test, (firstConnection) => {
|
||||
pc1 = firstConnection;
|
||||
sender = firstConnection.addTrack(localStream.getTracks()[0], localStream);
|
||||
sender.transform = senderTransform;
|
||||
}, (secondConnection) => {
|
||||
pc2 = secondConnection;
|
||||
secondConnection.ontrack = (trackEvent) => {
|
||||
receiver = trackEvent.receiver;
|
||||
receiver.transform = receiverTransform;
|
||||
resolve(trackEvent.streams[0]);
|
||||
};
|
||||
});
|
||||
test.step_timeout(() => reject("Test timed out"), 5000);
|
||||
});
|
||||
|
||||
video1.srcObject = stream;
|
||||
await video1.play();
|
||||
}, "video exchange with SFrame transform in worker");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,19 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<iframe src="." id="frame"></iframe>
|
||||
<script>
|
||||
promise_test(async (test) => {
|
||||
const frameDOMException = frame.contentWindow.DOMException;
|
||||
const transform = new frame.contentWindow.SFrameTransform;
|
||||
frame.remove();
|
||||
assert_throws_dom("InvalidStateError", frameDOMException, () => transform.readable);
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,7 @@
|
|||
onrtctransform = (event) => {
|
||||
const sframeTransform = new SFrameTransform({ role : "decrypt", authenticationSize: "10", compatibilityMode: "H264" });
|
||||
crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]).then(key => sframeTransform.setEncryptionKey(key));
|
||||
const transformer = event.transformer;
|
||||
transformer.readable.pipeThrough(sframeTransform).pipeTo(transformer.writable);
|
||||
}
|
||||
self.postMessage("registered");
|
|
@ -0,0 +1,141 @@
|
|||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
|
||||
promise_test(async (test) => {
|
||||
const pc = new RTCPeerConnection();
|
||||
const senderTransform = new SFrameTransform();
|
||||
const receiverTransform = new SFrameTransform();
|
||||
const sender1 = pc.addTransceiver('audio').sender;
|
||||
const sender2 = pc.addTransceiver('video').sender;
|
||||
const receiver1 = pc.getReceivers()[0];
|
||||
const receiver2 = pc.getReceivers()[1];
|
||||
|
||||
sender1.transform = senderTransform;
|
||||
receiver1.transform = receiverTransform;
|
||||
assert_throws_dom("InvalidStateError", () => sender2.transform = senderTransform);
|
||||
assert_throws_dom("InvalidStateError", () => receiver2.transform = receiverTransform);
|
||||
|
||||
sender1.transform = senderTransform;
|
||||
receiver1.transform = receiverTransform;
|
||||
|
||||
sender1.transform = null;
|
||||
receiver1.transform = null;
|
||||
}, "Cannot reuse attached transforms");
|
||||
|
||||
test(() => {
|
||||
const senderTransform = new SFrameTransform();
|
||||
|
||||
assert_true(senderTransform.readable instanceof ReadableStream);
|
||||
assert_true(senderTransform.writable instanceof WritableStream);
|
||||
}, "SFrameTransform exposes readable and writable");
|
||||
|
||||
promise_test(async (test) => {
|
||||
const pc = new RTCPeerConnection();
|
||||
const senderTransform = new SFrameTransform();
|
||||
const receiverTransform = new SFrameTransform();
|
||||
const sender1 = pc.addTransceiver('audio').sender;
|
||||
const sender2 = pc.addTransceiver('video').sender;
|
||||
const receiver1 = pc.getReceivers()[0];
|
||||
const receiver2 = pc.getReceivers()[1];
|
||||
|
||||
assert_false(senderTransform.readable.locked, "sender readable before");
|
||||
assert_false(senderTransform.writable.locked, "sender writable before");
|
||||
assert_false(receiverTransform.readable.locked, "receiver readable before");
|
||||
assert_false(receiverTransform.writable.locked, "receiver writable before");
|
||||
|
||||
sender1.transform = senderTransform;
|
||||
receiver1.transform = receiverTransform;
|
||||
|
||||
assert_true(senderTransform.readable.locked, "sender readable during");
|
||||
assert_true(senderTransform.writable.locked, "sender writable during");
|
||||
assert_true(receiverTransform.readable.locked, "receiver readable during");
|
||||
assert_true(receiverTransform.writable.locked, "receiver writable during");
|
||||
|
||||
sender1.transform = null;
|
||||
receiver1.transform = null;
|
||||
|
||||
assert_true(senderTransform.readable.locked, "sender readable after");
|
||||
assert_true(senderTransform.writable.locked, "sender writable after");
|
||||
assert_true(receiverTransform.readable.locked, "receiver readable after");
|
||||
assert_true(receiverTransform.writable.locked, "receiver writable after");
|
||||
}, "readable/writable are locked when attached and after being attached");
|
||||
|
||||
promise_test(async (test) => {
|
||||
const key = await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
|
||||
const senderTransform = new SFrameTransform({ role : 'encrypt', authenticationSize: 10 });
|
||||
senderTransform.setEncryptionKey(key);
|
||||
|
||||
const receiverTransform = new SFrameTransform({ role : 'decrypt', authenticationSize: 10 });
|
||||
receiverTransform.setEncryptionKey(key);
|
||||
|
||||
const writer = senderTransform.writable.getWriter();
|
||||
const reader = receiverTransform.readable.getReader();
|
||||
|
||||
senderTransform.readable.pipeTo(receiverTransform.writable);
|
||||
|
||||
const sent = new ArrayBuffer(8);
|
||||
const view = new Int8Array(sent);
|
||||
for (let cptr = 0; cptr < sent.byteLength; ++cptr)
|
||||
view[cptr] = cptr;
|
||||
|
||||
writer.write(sent);
|
||||
const received = await reader.read();
|
||||
|
||||
assert_equals(received.value.byteLength, 8);
|
||||
const view2 = new Int8Array(received.value);
|
||||
for (let cptr = 0; cptr < sent.byteLength; ++cptr)
|
||||
assert_equals(view2[cptr], view[cptr]);
|
||||
}, "SFrame with array buffer - authentication size 10");
|
||||
|
||||
promise_test(async (test) => {
|
||||
const key = await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
|
||||
const senderTransform = new SFrameTransform({ role : 'encrypt', authenticationSize: 10 });
|
||||
const senderWriter = senderTransform.writable.getWriter();
|
||||
const senderReader = senderTransform.readable.getReader();
|
||||
|
||||
const receiverTransform = new SFrameTransform({ role : 'decrypt', authenticationSize: 10 });
|
||||
const receiverWriter = receiverTransform.writable.getWriter();
|
||||
const receiverReader = receiverTransform.readable.getReader();
|
||||
|
||||
senderTransform.setEncryptionKey(key);
|
||||
receiverTransform.setEncryptionKey(key);
|
||||
|
||||
const chunk = new ArrayBuffer(8);
|
||||
|
||||
// decryption should fail, leading to an empty array buffer.
|
||||
await receiverWriter.write(chunk);
|
||||
let received = await receiverReader.read();
|
||||
assert_equals(received.value.byteLength, 0);
|
||||
|
||||
// We write again but this time with a chunk we can decrypt.
|
||||
await senderWriter.write(chunk);
|
||||
const encrypted = await senderReader.read();
|
||||
await receiverWriter.write(encrypted.value);
|
||||
received = await receiverReader.read();
|
||||
assert_equals(received.value.byteLength, 8);
|
||||
}, "SFrame decryption with array buffer that is too small");
|
||||
|
||||
promise_test(async (test) => {
|
||||
const key = await crypto.subtle.importKey("raw", new Uint8Array([143, 77, 43, 10, 72, 19, 37, 67, 236, 219, 24, 93, 26, 165, 91, 178]), "HKDF", false, ["deriveBits", "deriveKey"]);
|
||||
|
||||
const receiverTransform = new SFrameTransform({ role : 'decrypt', authenticationSize: 10 });
|
||||
const receiverWriter = receiverTransform.writable.getWriter();
|
||||
receiverTransform.setEncryptionKey(key);
|
||||
|
||||
// decryption should fail, leading to erroring the transform.
|
||||
await promise_rejects_js(test, TypeError, receiverWriter.write({ }));
|
||||
await promise_rejects_js(test, TypeError, receiverWriter.closed);
|
||||
}, "SFrame transform gets errored if trying to process unexpected value types");
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
Loading…
Add table
Add a link
Reference in a new issue