mirror of
https://github.com/servo/servo.git
synced 2025-07-23 07:13:52 +01:00
AudioContext construction
This commit is contained in:
parent
7e04031a95
commit
98741ddf84
2 changed files with 66 additions and 5 deletions
|
@ -21,14 +21,36 @@ use std::rc::Rc;
|
|||
#[dom_struct]
|
||||
pub struct AudioContext {
|
||||
context: BaseAudioContext,
|
||||
latency_hint: AudioContextLatencyCategory,
|
||||
/// https://webaudio.github.io/web-audio-api/#dom-audiocontext-baselatency
|
||||
base_latency: f64,
|
||||
/// https://webaudio.github.io/web-audio-api/#dom-audiocontext-outputlatency
|
||||
output_latency: f64,
|
||||
}
|
||||
|
||||
impl AudioContext {
|
||||
#[allow(unrooted_must_root)]
|
||||
// https://webaudio.github.io/web-audio-api/#AudioContext-constructors
|
||||
fn new_inherited(global: &GlobalScope, options: &AudioContextOptions) -> AudioContext {
|
||||
// Steps 1-3.
|
||||
let context = BaseAudioContext::new_inherited(global, BaseAudioContextOptions::AudioContext(options.into()));
|
||||
|
||||
// Step 4.1.
|
||||
let latency_hint = options.latencyHint;
|
||||
|
||||
// Step 4.2. The sample rate is set during the creation of the BaseAudioContext.
|
||||
// servo-media takes care of setting the default sample rate of the output device
|
||||
// and of resampling the audio output if needed.
|
||||
|
||||
// Step 5.
|
||||
if context.is_allowed_to_start() {
|
||||
// Step 6.
|
||||
context.resume();
|
||||
}
|
||||
|
||||
AudioContext {
|
||||
context: BaseAudioContext::new_inherited(global, BaseAudioContextOptions::AudioContext(options.into())),
|
||||
context,
|
||||
latency_hint,
|
||||
base_latency: 0., // TODO
|
||||
output_latency: 0., // TODO
|
||||
}
|
||||
|
@ -44,6 +66,7 @@ impl AudioContext {
|
|||
reflect_dom_object(Box::new(context), global, AudioContextBinding::Wrap)
|
||||
}
|
||||
|
||||
// https://webaudio.github.io/web-audio-api/#AudioContext-constructors
|
||||
pub fn Constructor(window: &Window,
|
||||
options: &AudioContextOptions) -> Fallible<DomRoot<AudioContext>> {
|
||||
let global = window.upcast::<GlobalScope>();
|
||||
|
|
|
@ -3,24 +3,30 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use dom::audiodestinationnode::AudioDestinationNode;
|
||||
use dom::bindings::cell::DomRefCell;
|
||||
use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
|
||||
use dom::bindings::codegen::Bindings::AudioNodeBinding::{ChannelCountMode, ChannelInterpretation};
|
||||
use dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContextMethods;
|
||||
use dom::bindings::codegen::Bindings::BaseAudioContextBinding::AudioContextState;
|
||||
use dom::bindings::codegen::Bindings::OscillatorNodeBinding::OscillatorOptions;
|
||||
use dom::bindings::inheritance::Castable;
|
||||
use dom::bindings::num::Finite;
|
||||
use dom::bindings::refcounted::Trusted;
|
||||
use dom::bindings::reflector::{DomObject, Reflector};
|
||||
use dom::bindings::root::DomRoot;
|
||||
use dom::globalscope::GlobalScope;
|
||||
use dom::promise::Promise;
|
||||
use dom::oscillatornode::OscillatorNode;
|
||||
use dom::promise::Promise;
|
||||
use dom::window::Window;
|
||||
use dom_struct::dom_struct;
|
||||
use servo_media::ServoMedia;
|
||||
use servo_media::audio::graph::AudioGraph;
|
||||
use servo_media::audio::graph::{OfflineAudioGraphOptions, RealTimeAudioGraphOptions};
|
||||
use servo_media::audio::graph_impl::NodeId;
|
||||
use servo_media::audio::node::AudioNodeType;
|
||||
use std::cell::Cell;
|
||||
use std::rc::Rc;
|
||||
use task_source::TaskSource;
|
||||
|
||||
pub enum BaseAudioContextOptions {
|
||||
AudioContext(RealTimeAudioGraphOptions),
|
||||
|
@ -32,10 +38,17 @@ pub struct BaseAudioContext {
|
|||
reflector_: Reflector,
|
||||
#[ignore_malloc_size_of = "servo_media"]
|
||||
audio_graph: AudioGraph,
|
||||
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination
|
||||
destination: Option<DomRoot<AudioDestinationNode>>,
|
||||
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-samplerate
|
||||
sample_rate: f32,
|
||||
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-currenttime
|
||||
current_time: f64,
|
||||
state: AudioContextState,
|
||||
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state
|
||||
state: Cell<AudioContextState>,
|
||||
/// https://webaudio.github.io/web-audio-api/#pendingresumepromises
|
||||
#[ignore_malloc_size_of = "promises are hard"]
|
||||
pending_resume_promises: DomRefCell<Vec<Rc<Promise>>>,
|
||||
}
|
||||
|
||||
impl BaseAudioContext {
|
||||
|
@ -58,7 +71,8 @@ impl BaseAudioContext {
|
|||
destination: None,
|
||||
current_time: 0.,
|
||||
sample_rate,
|
||||
state: AudioContextState::Suspended,
|
||||
state: Cell::new(AudioContextState::Suspended),
|
||||
pending_resume_promises: Default::default(),
|
||||
};
|
||||
|
||||
let mut options = unsafe { AudioNodeOptions::empty(global.get_cx()) };
|
||||
|
@ -74,6 +88,30 @@ impl BaseAudioContext {
|
|||
pub fn create_node_engine(&self, node_type: AudioNodeType) -> NodeId {
|
||||
self.audio_graph.create_node(node_type)
|
||||
}
|
||||
|
||||
// https://webaudio.github.io/web-audio-api/#allowed-to-start
|
||||
pub fn is_allowed_to_start(&self) -> bool {
|
||||
self.state.get() == AudioContextState::Suspended
|
||||
}
|
||||
|
||||
pub fn resume(&self) {
|
||||
let window = DomRoot::downcast::<Window>(self.global()).unwrap();
|
||||
let task_source = window.dom_manipulation_task_source();
|
||||
|
||||
// Set the state attribute to `running`.
|
||||
let this = Trusted::new(self);
|
||||
task_source.queue(task!(set_state: move || {
|
||||
let this = this.root();
|
||||
this.state.set(AudioContextState::Running);
|
||||
}), window.upcast()).unwrap();
|
||||
|
||||
// Queue a task to fire a simple event named `statechange` at the AudioContext.
|
||||
task_source.queue_simple_event(
|
||||
self.upcast(),
|
||||
atom!("statechange"),
|
||||
&window,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl BaseAudioContextMethods for BaseAudioContext {
|
||||
|
@ -89,7 +127,7 @@ impl BaseAudioContextMethods for BaseAudioContext {
|
|||
|
||||
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state
|
||||
fn State(&self) -> AudioContextState {
|
||||
self.state
|
||||
self.state.get()
|
||||
}
|
||||
|
||||
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-resume
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue