Update servo media to include global mute support

This commit is contained in:
Fernando Jiménez Moreno 2019-07-05 11:01:19 +02:00
parent 0dc17af7f0
commit 8e0160fa71
9 changed files with 363 additions and 219 deletions

View file

@ -23,6 +23,7 @@ use crate::dom::promise::Promise;
use crate::dom::window::Window;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use msg::constellation_msg::BrowsingContextId;
use servo_media::audio::context::{LatencyCategory, ProcessingState, RealTimeAudioContextOptions};
use std::rc::Rc;
@ -39,10 +40,15 @@ pub struct AudioContext {
impl AudioContext {
#[allow(unrooted_must_root)]
// https://webaudio.github.io/web-audio-api/#AudioContext-constructors
fn new_inherited(options: &AudioContextOptions) -> AudioContext {
fn new_inherited(
options: &AudioContextOptions,
browsing_context_id: BrowsingContextId,
) -> AudioContext {
// Steps 1-3.
let context =
BaseAudioContext::new_inherited(BaseAudioContextOptions::AudioContext(options.into()));
let context = BaseAudioContext::new_inherited(
BaseAudioContextOptions::AudioContext(options.into()),
browsing_context_id,
);
// Step 4.1.
let latency_hint = options.latencyHint;
@ -64,7 +70,8 @@ impl AudioContext {
#[allow(unrooted_must_root)]
pub fn new(window: &Window, options: &AudioContextOptions) -> DomRoot<AudioContext> {
let context = AudioContext::new_inherited(options);
let browsing_context_id = window.window_proxy().top_level_browsing_context_id().0;
let context = AudioContext::new_inherited(options, browsing_context_id);
let context = reflect_dom_object(Box::new(context), window, AudioContextBinding::Wrap);
context.resume();
context
@ -128,7 +135,7 @@ impl AudioContextMethods for AudioContext {
let window = DomRoot::downcast::<Window>(self.global()).unwrap();
let task_source = window.task_manager().dom_manipulation_task_source();
let trusted_promise = TrustedPromise::new(promise.clone());
match self.context.audio_context_impl().suspend() {
match self.context.audio_context_impl().lock().unwrap().suspend() {
Ok(_) => {
let base_context = Trusted::new(&self.context);
let context = Trusted::new(self);
@ -189,7 +196,7 @@ impl AudioContextMethods for AudioContext {
let window = DomRoot::downcast::<Window>(self.global()).unwrap();
let task_source = window.task_manager().dom_manipulation_task_source();
let trusted_promise = TrustedPromise::new(promise.clone());
match self.context.audio_context_impl().close() {
match self.context.audio_context_impl().lock().unwrap().close() {
Ok(_) => {
let base_context = Trusted::new(&self.context);
let context = Trusted::new(self);

View file

@ -58,7 +58,11 @@ impl AudioNode {
mode: options.mode.into(),
interpretation: options.interpretation.into(),
};
let node_id = context.audio_context_impl().create_node(node_type, ch);
let node_id = context
.audio_context_impl()
.lock()
.unwrap()
.create_node(node_type, ch);
Ok(AudioNode::new_inherited_for_id(
node_id,
context,
@ -90,6 +94,8 @@ impl AudioNode {
pub fn message(&self, message: AudioNodeMessage) {
self.context
.audio_context_impl()
.lock()
.unwrap()
.message_node(self.node_id, message);
}
@ -116,10 +122,14 @@ impl AudioNodeMethods for AudioNode {
// servo-media takes care of ignoring duplicated connections.
self.context.audio_context_impl().connect_ports(
self.node_id().output(output),
destination.node_id().input(input),
);
self.context
.audio_context_impl()
.lock()
.unwrap()
.connect_ports(
self.node_id().output(output),
destination.node_id().input(input),
);
Ok(DomRoot::from_ref(destination))
}
@ -136,10 +146,14 @@ impl AudioNodeMethods for AudioNode {
// servo-media takes care of ignoring duplicated connections.
self.context.audio_context_impl().connect_ports(
self.node_id().output(output),
dest.node_id().param(dest.param_type()),
);
self.context
.audio_context_impl()
.lock()
.unwrap()
.connect_ports(
self.node_id().output(output),
dest.node_id().param(dest.param_type()),
);
Ok(())
}
@ -148,6 +162,8 @@ impl AudioNodeMethods for AudioNode {
fn Disconnect(&self) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_all_from(self.node_id());
Ok(())
}
@ -156,6 +172,8 @@ impl AudioNodeMethods for AudioNode {
fn Disconnect_(&self, out: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output(self.node_id().output(out));
Ok(())
}
@ -164,6 +182,8 @@ impl AudioNodeMethods for AudioNode {
fn Disconnect__(&self, to: &AudioNode) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_between(self.node_id(), to.node_id());
Ok(())
}
@ -172,6 +192,8 @@ impl AudioNodeMethods for AudioNode {
fn Disconnect___(&self, to: &AudioNode, out: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output_between(self.node_id().output(out), to.node_id());
Ok(())
}
@ -180,6 +202,8 @@ impl AudioNodeMethods for AudioNode {
fn Disconnect____(&self, to: &AudioNode, out: u32, inp: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output_between_to(self.node_id().output(out), to.node_id().input(inp));
Ok(())
}
@ -188,6 +212,8 @@ impl AudioNodeMethods for AudioNode {
fn Disconnect_____(&self, param: &AudioParam) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_to(self.node_id(), param.node_id().param(param.param_type()));
Ok(())
}
@ -196,6 +222,8 @@ impl AudioNodeMethods for AudioNode {
fn Disconnect______(&self, param: &AudioParam, out: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output_between_to(
self.node_id().output(out),
param.node_id().param(param.param_type()),

View file

@ -81,6 +81,8 @@ impl AudioParam {
fn message_node(&self, message: AudioNodeMessage) {
self.context
.audio_context_impl()
.lock()
.unwrap()
.message_node(self.node, message);
}

View file

@ -49,11 +49,12 @@ use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use js::rust::CustomAutoRooterGuard;
use js::typedarray::ArrayBuffer;
use msg::constellation_msg::BrowsingContextId;
use servo_media::audio::context::{AudioContext, AudioContextOptions, ProcessingState};
use servo_media::audio::context::{OfflineAudioContextOptions, RealTimeAudioContextOptions};
use servo_media::audio::decoder::AudioDecoderCallbacks;
use servo_media::audio::graph::NodeId;
use servo_media::ServoMedia;
use servo_media::{ClientContextId, ServoMedia};
use std::cell::Cell;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, VecDeque};
@ -79,7 +80,7 @@ struct DecodeResolver {
pub struct BaseAudioContext {
eventtarget: EventTarget,
#[ignore_malloc_size_of = "servo_media"]
audio_context_impl: AudioContext,
audio_context_impl: Arc<Mutex<AudioContext>>,
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination
destination: MutNullableDom<AudioDestinationNode>,
listener: MutNullableDom<AudioListener>,
@ -104,7 +105,10 @@ pub struct BaseAudioContext {
impl BaseAudioContext {
#[allow(unrooted_must_root)]
pub fn new_inherited(options: BaseAudioContextOptions) -> BaseAudioContext {
pub fn new_inherited(
options: BaseAudioContextOptions,
browsing_context_id: BrowsingContextId,
) -> BaseAudioContext {
let (sample_rate, channel_count) = match options {
BaseAudioContextOptions::AudioContext(ref opt) => (opt.sample_rate, 2),
BaseAudioContextOptions::OfflineAudioContext(ref opt) => {
@ -112,11 +116,15 @@ impl BaseAudioContext {
},
};
let client_context_id = ClientContextId::build(
browsing_context_id.namespace_id.0,
browsing_context_id.index.0.get(),
);
let context = BaseAudioContext {
eventtarget: EventTarget::new_inherited(),
audio_context_impl: ServoMedia::get()
.unwrap()
.create_audio_context(options.into()),
.create_audio_context(&client_context_id, options.into()),
destination: Default::default(),
listener: Default::default(),
in_flight_resume_promises_queue: Default::default(),
@ -135,16 +143,16 @@ impl BaseAudioContext {
false
}
pub fn audio_context_impl(&self) -> &AudioContext {
&self.audio_context_impl
pub fn audio_context_impl(&self) -> Arc<Mutex<AudioContext>> {
self.audio_context_impl.clone()
}
pub fn destination_node(&self) -> NodeId {
self.audio_context_impl.dest_node()
self.audio_context_impl.lock().unwrap().dest_node()
}
pub fn listener(&self) -> NodeId {
self.audio_context_impl.listener()
self.audio_context_impl.lock().unwrap().listener()
}
// https://webaudio.github.io/web-audio-api/#allowed-to-start
@ -205,7 +213,7 @@ impl BaseAudioContext {
/// Control thread processing state
pub fn control_thread_state(&self) -> ProcessingState {
self.audio_context_impl.state()
self.audio_context_impl.lock().unwrap().state()
}
/// Set audio context state
@ -220,7 +228,7 @@ impl BaseAudioContext {
let this = Trusted::new(self);
// Set the rendering thread state to 'running' and start
// rendering the audio graph.
match self.audio_context_impl.resume() {
match self.audio_context_impl.lock().unwrap().resume() {
Ok(()) => {
self.take_pending_resume_promises(Ok(()));
let _ = task_source.queue(
@ -264,7 +272,7 @@ impl BaseAudioContextMethods for BaseAudioContext {
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-currenttime
fn CurrentTime(&self) -> Finite<f64> {
let current_time = self.audio_context_impl.current_time();
let current_time = self.audio_context_impl.lock().unwrap().current_time();
Finite::wrap(current_time)
}
@ -279,7 +287,7 @@ impl BaseAudioContextMethods for BaseAudioContext {
let promise = Promise::new_in_current_compartment(&self.global(), comp);
// Step 2.
if self.audio_context_impl.state() == ProcessingState::Closed {
if self.audio_context_impl.lock().unwrap().state() == ProcessingState::Closed {
promise.reject_error(Error::InvalidState);
return promise;
}
@ -520,6 +528,8 @@ impl BaseAudioContextMethods for BaseAudioContext {
})
.build();
self.audio_context_impl
.lock()
.unwrap()
.decode_audio_data(audio_data, callbacks);
} else {
// Step 3.

View file

@ -493,10 +493,10 @@ unsafe_no_jsmanaged_fields!(InteractiveWindow);
unsafe_no_jsmanaged_fields!(CanvasId);
unsafe_no_jsmanaged_fields!(SourceSet);
unsafe_no_jsmanaged_fields!(AudioBuffer);
unsafe_no_jsmanaged_fields!(AudioContext);
unsafe_no_jsmanaged_fields!(Arc<Mutex<AudioContext>>);
unsafe_no_jsmanaged_fields!(NodeId);
unsafe_no_jsmanaged_fields!(AnalysisEngine, DistanceModel, PanningModel, ParamType);
unsafe_no_jsmanaged_fields!(dyn Player);
unsafe_no_jsmanaged_fields!(Arc<Mutex<dyn Player>>);
unsafe_no_jsmanaged_fields!(WebRtcController);
unsafe_no_jsmanaged_fields!(MediaStreamId, MediaStreamType);
unsafe_no_jsmanaged_fields!(Mutex<MediaFrameRenderer>);

View file

@ -75,7 +75,7 @@ use script_layout_interface::HTMLMediaData;
use servo_config::pref;
use servo_media::player::frame::{Frame, FrameRenderer};
use servo_media::player::{PlaybackState, Player, PlayerError, PlayerEvent, StreamType};
use servo_media::{ServoMedia, SupportsMediaType};
use servo_media::{ClientContextId, ServoMedia, SupportsMediaType};
use servo_url::ServoUrl;
use std::cell::Cell;
use std::collections::VecDeque;
@ -297,7 +297,7 @@ pub struct HTMLMediaElement {
#[ignore_malloc_size_of = "promises are hard"]
in_flight_play_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
#[ignore_malloc_size_of = "servo_media"]
player: DomRefCell<Option<Box<dyn Player>>>,
player: DomRefCell<Option<Arc<Mutex<dyn Player>>>>,
#[ignore_malloc_size_of = "Arc"]
frame_renderer: Arc<Mutex<MediaFrameRenderer>>,
/// https://html.spec.whatwg.org/multipage/#show-poster-flag
@ -415,10 +415,10 @@ impl HTMLMediaElement {
fn play_media(&self) {
if let Some(ref player) = *self.player.borrow() {
if let Err(e) = player.set_rate(self.playbackRate.get()) {
if let Err(e) = player.lock().unwrap().set_rate(self.playbackRate.get()) {
warn!("Could not set the playback rate {:?}", e);
}
if let Err(e) = player.play() {
if let Err(e) = player.lock().unwrap().play() {
warn!("Could not play media {:?}", e);
}
}
@ -485,7 +485,7 @@ impl HTMLMediaElement {
this.upcast::<EventTarget>().fire_event(atom!("pause"));
if let Some(ref player) = *this.player.borrow() {
if let Err(e) = player.pause() {
if let Err(e) = player.lock().unwrap().pause() {
eprintln!("Could not pause player {:?}", e);
}
}
@ -925,6 +925,8 @@ impl HTMLMediaElement {
.borrow()
.as_ref()
.unwrap()
.lock()
.unwrap()
.set_stream(&track.id(), pos == tracks.len() - 1)
{
self.queue_dedicated_media_source_failure_steps();
@ -974,7 +976,7 @@ impl HTMLMediaElement {
this.upcast::<EventTarget>().fire_event(atom!("error"));
if let Some(ref player) = *this.player.borrow() {
if let Err(e) = player.stop() {
if let Err(e) = player.lock().unwrap().stop() {
eprintln!("Could not stop player {:?}", e);
}
}
@ -1230,7 +1232,7 @@ impl HTMLMediaElement {
// Step 11.
if let Some(ref player) = *self.player.borrow() {
if let Err(e) = player.seek(time) {
if let Err(e) = player.lock().unwrap().seek(time) {
eprintln!("Seek error {:?}", e);
}
}
@ -1302,7 +1304,13 @@ impl HTMLMediaElement {
HTMLMediaElementTypeId::HTMLVideoElement => Some(self.frame_renderer.clone()),
};
let browsing_context_id = window.window_proxy().top_level_browsing_context_id().0;
let client_context_id = ClientContextId::build(
browsing_context_id.namespace_id.0,
browsing_context_id.index.0.get(),
);
let player = ServoMedia::get().unwrap().create_player(
&client_context_id,
stream_type,
action_sender,
renderer,
@ -1737,9 +1745,14 @@ impl Drop for HTMLMediaElement {
});
if let Some(ref player) = *self.player.borrow() {
if let Err(err) = player.shutdown() {
warn!("Error shutting down player {:?}", err);
}
let browsing_context_id = window.window_proxy().top_level_browsing_context_id().0;
let client_context_id = ClientContextId::build(
browsing_context_id.namespace_id.0,
browsing_context_id.index.0.get(),
);
ServoMedia::get()
.unwrap()
.shutdown_player(&client_context_id, player.clone());
}
}
}
@ -1797,7 +1810,7 @@ impl HTMLMediaElementMethods for HTMLMediaElement {
}
if let Some(ref player) = *self.player.borrow() {
let _ = player.set_mute(value);
let _ = player.lock().unwrap().set_mute(value);
}
self.muted.set(value);
@ -2005,7 +2018,7 @@ impl HTMLMediaElementMethods for HTMLMediaElement {
self.queue_ratechange_event();
if self.is_potentially_playing() {
if let Some(ref player) = *self.player.borrow() {
if let Err(e) = player.set_rate(*value) {
if let Err(e) = player.lock().unwrap().set_rate(*value) {
warn!("Could not set the playback rate {:?}", e);
}
}
@ -2072,7 +2085,7 @@ impl HTMLMediaElementMethods for HTMLMediaElement {
fn Buffered(&self) -> DomRoot<TimeRanges> {
let mut buffered = TimeRangesContainer::new();
if let Some(ref player) = *self.player.borrow() {
if let Ok(ranges) = player.buffered() {
if let Ok(ranges) = player.lock().unwrap().buffered() {
for range in ranges {
let _ = buffered.add(range.start as f64, range.end as f64);
}
@ -2378,6 +2391,8 @@ impl FetchResponseListener for HTMLMediaElementFetchListener {
.borrow()
.as_ref()
.unwrap()
.lock()
.unwrap()
.set_input_size(content_length)
{
warn!("Could not set player input size {:?}", e);
@ -2431,7 +2446,15 @@ impl FetchResponseListener for HTMLMediaElementFetchListener {
let payload_len = payload.len() as u64;
// Push input data into the player.
if let Err(e) = elem.player.borrow().as_ref().unwrap().push_data(payload) {
if let Err(e) = elem
.player
.borrow()
.as_ref()
.unwrap()
.lock()
.unwrap()
.push_data(payload)
{
// If we are pushing too much data and we know that we can
// restart the download later from where we left, we cancel
// the current request. Otherwise, we continue the request
@ -2478,7 +2501,15 @@ impl FetchResponseListener for HTMLMediaElementFetchListener {
if elem.generation_id.get() == self.generation_id {
if let Some(ref current_fetch_context) = *elem.current_fetch_context.borrow() {
if let Some(CancelReason::Error) = current_fetch_context.cancel_reason() {
if let Err(e) = elem.player.borrow().as_ref().unwrap().end_of_stream() {
if let Err(e) = elem
.player
.borrow()
.as_ref()
.unwrap()
.lock()
.unwrap()
.end_of_stream()
{
warn!("Could not signal EOS to player {:?}", e);
}
return;

View file

@ -23,6 +23,7 @@ use crate::dom::promise::Promise;
use crate::dom::window::Window;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use msg::constellation_msg::BrowsingContextId;
use servo_media::audio::context::OfflineAudioContextOptions as ServoMediaOfflineAudioContextOptions;
use std::cell::Cell;
use std::rc::Rc;
@ -42,14 +43,21 @@ pub struct OfflineAudioContext {
impl OfflineAudioContext {
#[allow(unrooted_must_root)]
fn new_inherited(channel_count: u32, length: u32, sample_rate: f32) -> OfflineAudioContext {
fn new_inherited(
channel_count: u32,
length: u32,
sample_rate: f32,
browsing_context_id: BrowsingContextId,
) -> OfflineAudioContext {
let options = ServoMediaOfflineAudioContextOptions {
channels: channel_count as u8,
length: length as usize,
sample_rate,
};
let context =
BaseAudioContext::new_inherited(BaseAudioContextOptions::OfflineAudioContext(options));
let context = BaseAudioContext::new_inherited(
BaseAudioContextOptions::OfflineAudioContext(options),
browsing_context_id,
);
OfflineAudioContext {
context,
channel_count,
@ -74,7 +82,13 @@ impl OfflineAudioContext {
{
return Err(Error::NotSupported);
}
let context = OfflineAudioContext::new_inherited(channel_count, length, sample_rate);
let browsing_context_id = window.window_proxy().top_level_browsing_context_id().0;
let context = OfflineAudioContext::new_inherited(
channel_count,
length,
sample_rate,
browsing_context_id,
);
Ok(reflect_dom_object(
Box::new(context),
window,
@ -130,6 +144,8 @@ impl OfflineAudioContextMethods for OfflineAudioContext {
let sender = Mutex::new(sender);
self.context
.audio_context_impl()
.lock()
.unwrap()
.set_eos_callback(Box::new(move |buffer| {
processed_audio_
.lock()
@ -181,7 +197,14 @@ impl OfflineAudioContextMethods for OfflineAudioContext {
})
.unwrap();
if self.context.audio_context_impl().resume().is_err() {
if self
.context
.audio_context_impl()
.lock()
.unwrap()
.resume()
.is_err()
{
promise.reject_error(Error::Type("Could not start offline rendering".to_owned()));
}