AudioContext construction

This commit is contained in:
Fernando Jiménez Moreno 2018-06-22 12:47:19 +02:00
parent 7e04031a95
commit 98741ddf84
2 changed files with 66 additions and 5 deletions

View file

@ -3,24 +3,30 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::audiodestinationnode::AudioDestinationNode;
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
use dom::bindings::codegen::Bindings::AudioNodeBinding::{ChannelCountMode, ChannelInterpretation};
use dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContextMethods;
use dom::bindings::codegen::Bindings::BaseAudioContextBinding::AudioContextState;
use dom::bindings::codegen::Bindings::OscillatorNodeBinding::OscillatorOptions;
use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite;
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::{DomObject, Reflector};
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use dom::promise::Promise;
use dom::oscillatornode::OscillatorNode;
use dom::promise::Promise;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_media::ServoMedia;
use servo_media::audio::graph::AudioGraph;
use servo_media::audio::graph::{OfflineAudioGraphOptions, RealTimeAudioGraphOptions};
use servo_media::audio::graph_impl::NodeId;
use servo_media::audio::node::AudioNodeType;
use std::cell::Cell;
use std::rc::Rc;
use task_source::TaskSource;
pub enum BaseAudioContextOptions {
AudioContext(RealTimeAudioGraphOptions),
@ -32,10 +38,17 @@ pub struct BaseAudioContext {
reflector_: Reflector,
#[ignore_malloc_size_of = "servo_media"]
audio_graph: AudioGraph,
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination
destination: Option<DomRoot<AudioDestinationNode>>,
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-samplerate
sample_rate: f32,
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-currenttime
current_time: f64,
state: AudioContextState,
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state
state: Cell<AudioContextState>,
/// https://webaudio.github.io/web-audio-api/#pendingresumepromises
#[ignore_malloc_size_of = "promises are hard"]
pending_resume_promises: DomRefCell<Vec<Rc<Promise>>>,
}
impl BaseAudioContext {
@ -58,7 +71,8 @@ impl BaseAudioContext {
destination: None,
current_time: 0.,
sample_rate,
state: AudioContextState::Suspended,
state: Cell::new(AudioContextState::Suspended),
pending_resume_promises: Default::default(),
};
let mut options = unsafe { AudioNodeOptions::empty(global.get_cx()) };
@ -74,6 +88,30 @@ impl BaseAudioContext {
pub fn create_node_engine(&self, node_type: AudioNodeType) -> NodeId {
self.audio_graph.create_node(node_type)
}
// https://webaudio.github.io/web-audio-api/#allowed-to-start
pub fn is_allowed_to_start(&self) -> bool {
self.state.get() == AudioContextState::Suspended
}
pub fn resume(&self) {
let window = DomRoot::downcast::<Window>(self.global()).unwrap();
let task_source = window.dom_manipulation_task_source();
// Set the state attribute to `running`.
let this = Trusted::new(self);
task_source.queue(task!(set_state: move || {
let this = this.root();
this.state.set(AudioContextState::Running);
}), window.upcast()).unwrap();
// Queue a task to fire a simple event named `statechange` at the AudioContext.
task_source.queue_simple_event(
self.upcast(),
atom!("statechange"),
&window,
);
}
}
impl BaseAudioContextMethods for BaseAudioContext {
@ -89,7 +127,7 @@ impl BaseAudioContextMethods for BaseAudioContext {
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state
fn State(&self) -> AudioContextState {
self.state
self.state.get()
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-resume