script: Move webaudio DOM interfaces to script/dom/audio/ (#38894)

Moves interfaces defined by the webaudio spec (27 files) to the new
`script/dom/audio/` module from the `script/dom/` module.

Testing: Just a refactor shouldn't need any testing
Fixes: N/A

Signed-off-by: Ashwin Naren <arihant2math@gmail.com>
This commit is contained in:
Ashwin Naren 2025-08-23 22:15:44 -07:00 committed by GitHub
parent 6ae61d796e
commit b4a454aaea
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 114 additions and 108 deletions

View file

@ -0,0 +1,246 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use ipc_channel::ipc::{self, IpcReceiver};
use ipc_channel::router::ROUTER;
use js::rust::{CustomAutoRooterGuard, HandleObject};
use js::typedarray::{Float32Array, Uint8Array};
use servo_media::audio::analyser_node::AnalysisEngine;
use servo_media::audio::block::Block;
use servo_media::audio::node::AudioNodeInit;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper};
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::AnalyserNodeBinding::{
AnalyserNodeMethods, AnalyserOptions,
};
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AnalyserNode {
node: AudioNode,
#[ignore_malloc_size_of = "Defined in servo-media"]
#[no_trace]
engine: DomRefCell<AnalysisEngine>,
}
impl AnalyserNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
_: &Window,
context: &BaseAudioContext,
options: &AnalyserOptions,
) -> Fallible<(AnalyserNode, IpcReceiver<Block>)> {
let node_options =
options
.parent
.unwrap_or(2, ChannelCountMode::Max, ChannelInterpretation::Speakers);
if options.fftSize > 32768 ||
options.fftSize < 32 ||
(options.fftSize & (options.fftSize - 1) != 0)
{
return Err(Error::IndexSize);
}
if *options.maxDecibels <= *options.minDecibels {
return Err(Error::IndexSize);
}
if *options.smoothingTimeConstant < 0. || *options.smoothingTimeConstant > 1. {
return Err(Error::IndexSize);
}
let (send, rcv) = ipc::channel().unwrap();
let callback = move |block| {
send.send(block).unwrap();
};
let node = AudioNode::new_inherited(
AudioNodeInit::AnalyserNode(Box::new(callback)),
context,
node_options,
1, // inputs
1, // outputs
)?;
let engine = AnalysisEngine::new(
options.fftSize as usize,
*options.smoothingTimeConstant,
*options.minDecibels,
*options.maxDecibels,
);
Ok((
AnalyserNode {
node,
engine: DomRefCell::new(engine),
},
rcv,
))
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &AnalyserOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<AnalyserNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &AnalyserOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<AnalyserNode>> {
let (node, recv) = AnalyserNode::new_inherited(window, context, options)?;
let object = reflect_dom_object_with_proto(Box::new(node), window, proto, can_gc);
let task_source = window
.as_global_scope()
.task_manager()
.dom_manipulation_task_source()
.to_sendable();
let this = Trusted::new(&*object);
ROUTER.add_typed_route(
recv,
Box::new(move |block| {
let this = this.clone();
task_source.queue(task!(append_analysis_block: move || {
let this = this.root();
this.push_block(block.unwrap())
}));
}),
);
Ok(object)
}
pub(crate) fn push_block(&self, block: Block) {
self.engine.borrow_mut().push(block)
}
}
impl AnalyserNodeMethods<crate::DomTypeHolder> for AnalyserNode {
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-analysernode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &AnalyserOptions,
) -> Fallible<DomRoot<AnalyserNode>> {
AnalyserNode::new_with_proto(window, proto, context, options, can_gc)
}
#[allow(unsafe_code)]
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-getfloatfrequencydata>
fn GetFloatFrequencyData(&self, mut array: CustomAutoRooterGuard<Float32Array>) {
// Invariant to maintain: No JS code that may touch the array should
// run whilst we're writing to it
let dest = unsafe { array.as_mut_slice() };
self.engine.borrow_mut().fill_frequency_data(dest);
}
#[allow(unsafe_code)]
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-getbytefrequencydata>
fn GetByteFrequencyData(&self, mut array: CustomAutoRooterGuard<Uint8Array>) {
// Invariant to maintain: No JS code that may touch the array should
// run whilst we're writing to it
let dest = unsafe { array.as_mut_slice() };
self.engine.borrow_mut().fill_byte_frequency_data(dest);
}
#[allow(unsafe_code)]
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-getfloattimedomaindata>
fn GetFloatTimeDomainData(&self, mut array: CustomAutoRooterGuard<Float32Array>) {
// Invariant to maintain: No JS code that may touch the array should
// run whilst we're writing to it
let dest = unsafe { array.as_mut_slice() };
self.engine.borrow().fill_time_domain_data(dest);
}
#[allow(unsafe_code)]
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-getbytetimedomaindata>
fn GetByteTimeDomainData(&self, mut array: CustomAutoRooterGuard<Uint8Array>) {
// Invariant to maintain: No JS code that may touch the array should
// run whilst we're writing to it
let dest = unsafe { array.as_mut_slice() };
self.engine.borrow().fill_byte_time_domain_data(dest);
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-fftsize>
fn SetFftSize(&self, value: u32) -> Fallible<()> {
if !(32..=32768).contains(&value) || (value & (value - 1) != 0) {
return Err(Error::IndexSize);
}
self.engine.borrow_mut().set_fft_size(value as usize);
Ok(())
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-fftsize>
fn FftSize(&self) -> u32 {
self.engine.borrow().get_fft_size() as u32
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-frequencybincount>
fn FrequencyBinCount(&self) -> u32 {
self.FftSize() / 2
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-mindecibels>
fn MinDecibels(&self) -> Finite<f64> {
Finite::wrap(self.engine.borrow().get_min_decibels())
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-mindecibels>
fn SetMinDecibels(&self, value: Finite<f64>) -> Fallible<()> {
if *value >= self.engine.borrow().get_max_decibels() {
return Err(Error::IndexSize);
}
self.engine.borrow_mut().set_min_decibels(*value);
Ok(())
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-maxdecibels>
fn MaxDecibels(&self) -> Finite<f64> {
Finite::wrap(self.engine.borrow().get_max_decibels())
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-maxdecibels>
fn SetMaxDecibels(&self, value: Finite<f64>) -> Fallible<()> {
if *value <= self.engine.borrow().get_min_decibels() {
return Err(Error::IndexSize);
}
self.engine.borrow_mut().set_max_decibels(*value);
Ok(())
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-smoothingtimeconstant>
fn SmoothingTimeConstant(&self) -> Finite<f64> {
Finite::wrap(self.engine.borrow().get_smoothing_constant())
}
/// <https://webaudio.github.io/web-audio-api/#dom-analysernode-smoothingtimeconstant>
fn SetSmoothingTimeConstant(&self, value: Finite<f64>) -> Fallible<()> {
if *value < 0. || *value > 1. {
return Err(Error::IndexSize);
}
self.engine.borrow_mut().set_smoothing_constant(*value);
Ok(())
}
}

View file

@ -0,0 +1,329 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cmp::min;
use dom_struct::dom_struct;
use js::rust::{CustomAutoRooterGuard, HandleObject};
use js::typedarray::{Float32, Float32Array};
use servo_media::audio::buffer_source_node::AudioBuffer as ServoMediaAudioBuffer;
use crate::dom::audio::audionode::MAX_CHANNEL_COUNT;
use crate::dom::bindings::buffer_source::HeapBufferSource;
use crate::dom::bindings::cell::{DomRefCell, Ref};
use crate::dom::bindings::codegen::Bindings::AudioBufferBinding::{
AudioBufferMethods, AudioBufferOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::{Reflector, reflect_dom_object_with_proto};
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use crate::dom::window::Window;
use crate::realms::enter_realm;
use crate::script_runtime::{CanGc, JSContext};
// Spec mandates at least [8000, 96000], we use [8000, 192000] to match Firefox
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer
pub(crate) const MIN_SAMPLE_RATE: f32 = 8000.;
pub(crate) const MAX_SAMPLE_RATE: f32 = 192000.;
/// The AudioBuffer keeps its data either in js_channels
/// or in shared_channels if js_channels buffers are detached.
///
/// js_channels buffers are (re)attached right before calling GetChannelData
/// and remain attached until its contents are needed by some other API
/// implementation. Follow <https://webaudio.github.io/web-audio-api/#acquire-the-content>
/// to know in which situations js_channels buffers must be detached.
///
#[dom_struct]
pub(crate) struct AudioBuffer {
reflector_: Reflector,
/// Float32Arrays returned by calls to GetChannelData.
#[ignore_malloc_size_of = "mozjs"]
js_channels: DomRefCell<Vec<HeapBufferSource<Float32>>>,
/// Aggregates the data from js_channels.
/// This is `Some<T>` iff the buffers in js_channels are detached.
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
shared_channels: DomRefCell<Option<ServoMediaAudioBuffer>>,
/// <https://webaudio.github.io/web-audio-api/#dom-audiobuffer-samplerate>
sample_rate: f32,
/// <https://webaudio.github.io/web-audio-api/#dom-audiobuffer-length>
length: u32,
/// <https://webaudio.github.io/web-audio-api/#dom-audiobuffer-duration>
duration: f64,
/// <https://webaudio.github.io/web-audio-api/#dom-audiobuffer-numberofchannels>
number_of_channels: u32,
}
impl AudioBuffer {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
number_of_channels: u32,
length: u32,
sample_rate: f32,
) -> AudioBuffer {
let vec = (0..number_of_channels)
.map(|_| HeapBufferSource::default())
.collect();
AudioBuffer {
reflector_: Reflector::new(),
js_channels: DomRefCell::new(vec),
shared_channels: DomRefCell::new(None),
sample_rate,
length,
duration: length as f64 / sample_rate as f64,
number_of_channels,
}
}
pub(crate) fn new(
global: &Window,
number_of_channels: u32,
length: u32,
sample_rate: f32,
initial_data: Option<&[Vec<f32>]>,
can_gc: CanGc,
) -> DomRoot<AudioBuffer> {
Self::new_with_proto(
global,
None,
number_of_channels,
length,
sample_rate,
initial_data,
can_gc,
)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
global: &Window,
proto: Option<HandleObject>,
number_of_channels: u32,
length: u32,
sample_rate: f32,
initial_data: Option<&[Vec<f32>]>,
can_gc: CanGc,
) -> DomRoot<AudioBuffer> {
let buffer = AudioBuffer::new_inherited(number_of_channels, length, sample_rate);
let buffer = reflect_dom_object_with_proto(Box::new(buffer), global, proto, can_gc);
buffer.set_initial_data(initial_data);
buffer
}
// Initialize the underlying channels data with initial data provided by
// the user or silence otherwise.
fn set_initial_data(&self, initial_data: Option<&[Vec<f32>]>) {
let mut channels = ServoMediaAudioBuffer::new(
self.number_of_channels as u8,
self.length as usize,
self.sample_rate,
);
for channel in 0..self.number_of_channels {
channels.buffers[channel as usize] = match initial_data {
Some(data) => data[channel as usize].clone(),
None => vec![0.; self.length as usize],
};
}
*self.shared_channels.borrow_mut() = Some(channels);
}
fn restore_js_channel_data(&self, cx: JSContext, can_gc: CanGc) -> bool {
let _ac = enter_realm(self);
for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() {
if channel.is_initialized() {
// Already have data in JS array.
continue;
}
if let Some(ref shared_channels) = *self.shared_channels.borrow() {
// Step 4. of
// https://webaudio.github.io/web-audio-api/#acquire-the-content
// "Attach ArrayBuffers containing copies of the data to the AudioBuffer,
// to be returned by the next call to getChannelData()".
if channel
.set_data(cx, &shared_channels.buffers[i], can_gc)
.is_err()
{
return false;
}
}
}
*self.shared_channels.borrow_mut() = None;
true
}
// https://webaudio.github.io/web-audio-api/#acquire-the-content
fn acquire_contents(&self) -> Option<ServoMediaAudioBuffer> {
let mut result = ServoMediaAudioBuffer::new(
self.number_of_channels as u8,
self.length as usize,
self.sample_rate,
);
let cx = GlobalScope::get_cx();
for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() {
// Step 1.
if !channel.is_initialized() {
return None;
}
// Step 3.
result.buffers[i] = channel.acquire_data(cx).ok()?;
}
Some(result)
}
pub(crate) fn get_channels(&self) -> Ref<'_, Option<ServoMediaAudioBuffer>> {
if self.shared_channels.borrow().is_none() {
let channels = self.acquire_contents();
if channels.is_some() {
*self.shared_channels.borrow_mut() = channels;
}
}
self.shared_channels.borrow()
}
}
impl AudioBufferMethods<crate::DomTypeHolder> for AudioBuffer {
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-audiobuffer
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
options: &AudioBufferOptions,
) -> Fallible<DomRoot<AudioBuffer>> {
if options.length == 0 ||
options.numberOfChannels == 0 ||
options.numberOfChannels > MAX_CHANNEL_COUNT ||
*options.sampleRate < MIN_SAMPLE_RATE ||
*options.sampleRate > MAX_SAMPLE_RATE
{
return Err(Error::NotSupported);
}
Ok(AudioBuffer::new_with_proto(
window,
proto,
options.numberOfChannels,
options.length,
*options.sampleRate,
None,
can_gc,
))
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-samplerate
fn SampleRate(&self) -> Finite<f32> {
Finite::wrap(self.sample_rate)
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-length
fn Length(&self) -> u32 {
self.length
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-duration
fn Duration(&self) -> Finite<f64> {
Finite::wrap(self.duration)
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-numberofchannels
fn NumberOfChannels(&self) -> u32 {
self.number_of_channels
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-getchanneldata
fn GetChannelData(&self, cx: JSContext, channel: u32, can_gc: CanGc) -> Fallible<Float32Array> {
if channel >= self.number_of_channels {
return Err(Error::IndexSize);
}
if !self.restore_js_channel_data(cx, can_gc) {
return Err(Error::JSFailed);
}
self.js_channels.borrow()[channel as usize]
.get_typed_array()
.map_err(|_| Error::JSFailed)
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-copyfromchannel
#[allow(unsafe_code)]
fn CopyFromChannel(
&self,
mut destination: CustomAutoRooterGuard<Float32Array>,
channel_number: u32,
start_in_channel: u32,
) -> Fallible<()> {
if destination.is_shared() {
return Err(Error::Type("Cannot copy to shared buffer".to_owned()));
}
if channel_number >= self.number_of_channels || start_in_channel >= self.length {
return Err(Error::IndexSize);
}
let bytes_to_copy = min(self.length - start_in_channel, destination.len() as u32) as usize;
let cx = GlobalScope::get_cx();
let channel_number = channel_number as usize;
let offset = start_in_channel as usize;
let mut dest = vec![0.0_f32; bytes_to_copy];
// We either copy form js_channels or shared_channels.
let js_channel = &self.js_channels.borrow()[channel_number];
if js_channel.is_initialized() {
if js_channel
.copy_data_to(cx, &mut dest, offset, offset + bytes_to_copy)
.is_err()
{
return Err(Error::IndexSize);
}
} else if let Some(ref shared_channels) = *self.shared_channels.borrow() {
if let Some(shared_channel) = shared_channels.buffers.get(channel_number) {
dest.extend_from_slice(&shared_channel.as_slice()[offset..offset + bytes_to_copy]);
}
}
destination.update(&dest);
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-copytochannel
fn CopyToChannel(
&self,
source: CustomAutoRooterGuard<Float32Array>,
channel_number: u32,
start_in_channel: u32,
can_gc: CanGc,
) -> Fallible<()> {
if source.is_shared() {
return Err(Error::Type("Cannot copy from shared buffer".to_owned()));
}
if channel_number >= self.number_of_channels || start_in_channel > (source.len() as u32) {
return Err(Error::IndexSize);
}
let cx = GlobalScope::get_cx();
if !self.restore_js_channel_data(cx, can_gc) {
return Err(Error::JSFailed);
}
let js_channel = &self.js_channels.borrow()[channel_number as usize];
if !js_channel.is_initialized() {
// The array buffer was detached.
return Err(Error::IndexSize);
}
let bytes_to_copy = min(self.length - start_in_channel, source.len() as u32) as usize;
js_channel
.copy_data_from(cx, source, start_in_channel as usize, bytes_to_copy)
.map_err(|_| Error::IndexSize)
}
}

View file

@ -0,0 +1,292 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use std::f32;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::buffer_source_node::{
AudioBufferSourceNodeMessage, AudioBufferSourceNodeOptions,
};
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioNodeType};
use servo_media::audio::param::ParamType;
use crate::conversions::Convert;
use crate::dom::audio::audiobuffer::AudioBuffer;
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::audioscheduledsourcenode::AudioScheduledSourceNode;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioBufferSourceNodeBinding::{
AudioBufferSourceNodeMethods, AudioBufferSourceOptions,
};
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use crate::dom::bindings::codegen::Bindings::AudioScheduledSourceNodeBinding::AudioScheduledSourceNodeMethods;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AudioBufferSourceNode {
source_node: AudioScheduledSourceNode,
buffer: MutNullableDom<AudioBuffer>,
buffer_set: Cell<bool>,
playback_rate: Dom<AudioParam>,
detune: Dom<AudioParam>,
loop_enabled: Cell<bool>,
loop_start: Cell<f64>,
loop_end: Cell<f64>,
}
impl AudioBufferSourceNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &AudioBufferSourceOptions,
can_gc: CanGc,
) -> Fallible<AudioBufferSourceNode> {
let node_options = Default::default();
let source_node = AudioScheduledSourceNode::new_inherited(
AudioNodeInit::AudioBufferSourceNode(options.convert()),
context,
node_options,
0, /* inputs */
1, /* outputs */
)?;
let node_id = source_node.node().node_id();
let playback_rate = AudioParam::new(
window,
context,
node_id,
AudioNodeType::AudioBufferSourceNode,
ParamType::PlaybackRate,
AutomationRate::K_rate,
*options.playbackRate,
f32::MIN,
f32::MAX,
can_gc,
);
let detune = AudioParam::new(
window,
context,
node_id,
AudioNodeType::AudioBufferSourceNode,
ParamType::Detune,
AutomationRate::K_rate,
*options.detune,
f32::MIN,
f32::MAX,
can_gc,
);
let node = AudioBufferSourceNode {
source_node,
buffer: Default::default(),
buffer_set: Cell::new(false),
playback_rate: Dom::from_ref(&playback_rate),
detune: Dom::from_ref(&detune),
loop_enabled: Cell::new(options.loop_),
loop_start: Cell::new(*options.loopStart),
loop_end: Cell::new(*options.loopEnd),
};
if let Some(Some(ref buffer)) = options.buffer {
node.SetBuffer(Some(buffer))?;
}
Ok(node)
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &AudioBufferSourceOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<AudioBufferSourceNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &AudioBufferSourceOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<AudioBufferSourceNode>> {
let node = AudioBufferSourceNode::new_inherited(window, context, options, can_gc)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl AudioBufferSourceNodeMethods<crate::DomTypeHolder> for AudioBufferSourceNode {
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-audiobuffersourcenode
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &AudioBufferSourceOptions,
) -> Fallible<DomRoot<AudioBufferSourceNode>> {
AudioBufferSourceNode::new_with_proto(window, proto, context, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
fn GetBuffer(&self) -> Fallible<Option<DomRoot<AudioBuffer>>> {
Ok(self.buffer.get())
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
fn SetBuffer(&self, new_buffer: Option<&AudioBuffer>) -> Fallible<()> {
if new_buffer.is_some() {
if self.buffer_set.get() {
// Step 2.
return Err(Error::InvalidState);
}
// Step 3.
self.buffer_set.set(true);
}
// Step 4.
self.buffer.set(new_buffer);
// Step 5.
if self.source_node.has_start() {
if let Some(buffer) = self.buffer.get() {
let buffer = buffer.get_channels();
if buffer.is_some() {
self.source_node
.node()
.message(AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetBuffer((*buffer).clone()),
));
}
}
}
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-playbackrate
fn PlaybackRate(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.playback_rate)
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-detune
fn Detune(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.detune)
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loop
fn Loop(&self) -> bool {
self.loop_enabled.get()
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loop
fn SetLoop(&self, should_loop: bool) {
self.loop_enabled.set(should_loop);
let msg = AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetLoopEnabled(should_loop),
);
self.source_node.node().message(msg);
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopstart
fn LoopStart(&self) -> Finite<f64> {
Finite::wrap(self.loop_start.get())
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopstart
fn SetLoopStart(&self, loop_start: Finite<f64>) {
self.loop_start.set(*loop_start);
let msg = AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetLoopStart(*loop_start),
);
self.source_node.node().message(msg);
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopend
fn LoopEnd(&self) -> Finite<f64> {
Finite::wrap(self.loop_end.get())
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopend
fn SetLoopEnd(&self, loop_end: Finite<f64>) {
self.loop_end.set(*loop_end);
let msg = AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetLoopEnd(*loop_end),
);
self.source_node.node().message(msg);
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-start
fn Start(
&self,
when: Finite<f64>,
offset: Option<Finite<f64>>,
duration: Option<Finite<f64>>,
) -> Fallible<()> {
if let Some(offset) = offset {
if *offset < 0. {
return Err(Error::Range("'offset' must be a positive value".to_owned()));
}
}
if let Some(duration) = duration {
if *duration < 0. {
return Err(Error::Range(
"'duration' must be a positive value".to_owned(),
));
}
}
if let Some(buffer) = self.buffer.get() {
let buffer = buffer.get_channels();
if buffer.is_some() {
self.source_node
.node()
.message(AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetBuffer((*buffer).clone()),
));
}
}
self.source_node
.node()
.message(AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetStartParams(
*when,
offset.map(|f| *f),
duration.map(|f| *f),
),
));
self.source_node
.upcast::<AudioScheduledSourceNode>()
.Start(when)
}
}
impl Convert<AudioBufferSourceNodeOptions> for &AudioBufferSourceOptions {
fn convert(self) -> AudioBufferSourceNodeOptions {
AudioBufferSourceNodeOptions {
buffer: self
.buffer
.as_ref()
.and_then(|b| (*b.as_ref()?.get_channels()).clone()),
detune: *self.detune,
loop_enabled: self.loop_,
loop_end: Some(*self.loopEnd),
loop_start: Some(*self.loopStart),
playback_rate: *self.playbackRate,
}
}
}

View file

@ -0,0 +1,320 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::rc::Rc;
use base::id::PipelineId;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::context::{LatencyCategory, ProcessingState, RealTimeAudioContextOptions};
use crate::conversions::Convert;
use crate::dom::audio::baseaudiocontext::{BaseAudioContext, BaseAudioContextOptions};
use crate::dom::audio::mediaelementaudiosourcenode::MediaElementAudioSourceNode;
use crate::dom::audio::mediastreamaudiodestinationnode::MediaStreamAudioDestinationNode;
use crate::dom::audio::mediastreamaudiosourcenode::MediaStreamAudioSourceNode;
use crate::dom::audio::mediastreamtrackaudiosourcenode::MediaStreamTrackAudioSourceNode;
use crate::dom::bindings::codegen::Bindings::AudioContextBinding::{
AudioContextLatencyCategory, AudioContextMethods, AudioContextOptions, AudioTimestamp,
};
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::AudioContextState;
use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContext_Binding::BaseAudioContextMethods;
use crate::dom::bindings::codegen::UnionTypes::AudioContextLatencyCategoryOrDouble;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{DomGlobal, reflect_dom_object_with_proto};
use crate::dom::bindings::root::DomRoot;
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::mediastream::MediaStream;
use crate::dom::mediastreamtrack::MediaStreamTrack;
use crate::dom::promise::Promise;
use crate::dom::window::Window;
use crate::realms::InRealm;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AudioContext {
context: BaseAudioContext,
latency_hint: AudioContextLatencyCategory,
/// <https://webaudio.github.io/web-audio-api/#dom-audiocontext-baselatency>
base_latency: f64,
/// <https://webaudio.github.io/web-audio-api/#dom-audiocontext-outputlatency>
output_latency: f64,
}
impl AudioContext {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
// https://webaudio.github.io/web-audio-api/#AudioContext-constructors
fn new_inherited(
options: &AudioContextOptions,
pipeline_id: PipelineId,
) -> Fallible<AudioContext> {
// Steps 1-3.
let context = BaseAudioContext::new_inherited(
BaseAudioContextOptions::AudioContext(options.convert()),
pipeline_id,
)?;
// Step 4.1.
let latency_hint = match options.latencyHint {
AudioContextLatencyCategoryOrDouble::AudioContextLatencyCategory(category) => category,
AudioContextLatencyCategoryOrDouble::Double(_) => {
AudioContextLatencyCategory::Interactive
}, // TODO
};
// Step 4.2. The sample rate is set during the creation of the BaseAudioContext.
// servo-media takes care of setting the default sample rate of the output device
// and of resampling the audio output if needed.
// Steps 5 and 6 of the construction algorithm will happen in `resume`,
// after reflecting dom object.
Ok(AudioContext {
context,
latency_hint,
base_latency: 0., // TODO
output_latency: 0., // TODO
})
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new(
window: &Window,
proto: Option<HandleObject>,
options: &AudioContextOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<AudioContext>> {
let pipeline_id = window.pipeline_id();
let context = AudioContext::new_inherited(options, pipeline_id)?;
let context = reflect_dom_object_with_proto(Box::new(context), window, proto, can_gc);
context.resume();
Ok(context)
}
fn resume(&self) {
// Step 5.
if self.context.is_allowed_to_start() {
// Step 6.
self.context.resume();
}
}
pub(crate) fn base(&self) -> DomRoot<BaseAudioContext> {
DomRoot::from_ref(&self.context)
}
}
impl AudioContextMethods<crate::DomTypeHolder> for AudioContext {
// https://webaudio.github.io/web-audio-api/#AudioContext-constructors
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
options: &AudioContextOptions,
) -> Fallible<DomRoot<AudioContext>> {
AudioContext::new(window, proto, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-audiocontext-baselatency
fn BaseLatency(&self) -> Finite<f64> {
Finite::wrap(self.base_latency)
}
// https://webaudio.github.io/web-audio-api/#dom-audiocontext-outputlatency
fn OutputLatency(&self) -> Finite<f64> {
Finite::wrap(self.output_latency)
}
// https://webaudio.github.io/web-audio-api/#dom-audiocontext-outputlatency
fn GetOutputTimestamp(&self) -> AudioTimestamp {
// TODO
AudioTimestamp {
contextTime: Some(Finite::wrap(0.)),
performanceTime: Some(Finite::wrap(0.)),
}
}
// https://webaudio.github.io/web-audio-api/#dom-audiocontext-suspend
fn Suspend(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
// Step 1.
let promise = Promise::new_in_current_realm(comp, can_gc);
// Step 2.
if self.context.control_thread_state() == ProcessingState::Closed {
promise.reject_error(Error::InvalidState, can_gc);
return promise;
}
// Step 3.
if self.context.State() == AudioContextState::Suspended {
promise.resolve_native(&(), can_gc);
return promise;
}
// Steps 4 and 5.
let trusted_promise = TrustedPromise::new(promise.clone());
match self.context.audio_context_impl().lock().unwrap().suspend() {
Ok(_) => {
let base_context = Trusted::new(&self.context);
let context = Trusted::new(self);
self.global().task_manager().dom_manipulation_task_source().queue(
task!(suspend_ok: move || {
let base_context = base_context.root();
let context = context.root();
let promise = trusted_promise.root();
promise.resolve_native(&(), CanGc::note());
if base_context.State() != AudioContextState::Suspended {
base_context.set_state_attribute(AudioContextState::Suspended);
context.global().task_manager().dom_manipulation_task_source().queue_simple_event(
context.upcast(),
atom!("statechange"),
);
}
})
);
},
Err(_) => {
// The spec does not define the error case and `suspend` should
// never fail, but we handle the case here for completion.
self.global()
.task_manager()
.dom_manipulation_task_source()
.queue(task!(suspend_error: move || {
let promise = trusted_promise.root();
promise.reject_error(Error::Type("Something went wrong".to_owned()), CanGc::note());
}));
},
};
// Step 6.
promise
}
// https://webaudio.github.io/web-audio-api/#dom-audiocontext-close
fn Close(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
// Step 1.
let promise = Promise::new_in_current_realm(comp, can_gc);
// Step 2.
if self.context.control_thread_state() == ProcessingState::Closed {
promise.reject_error(Error::InvalidState, can_gc);
return promise;
}
// Step 3.
if self.context.State() == AudioContextState::Closed {
promise.resolve_native(&(), can_gc);
return promise;
}
// Steps 4 and 5.
let trusted_promise = TrustedPromise::new(promise.clone());
match self.context.audio_context_impl().lock().unwrap().close() {
Ok(_) => {
let base_context = Trusted::new(&self.context);
let context = Trusted::new(self);
self.global().task_manager().dom_manipulation_task_source().queue(
task!(suspend_ok: move || {
let base_context = base_context.root();
let context = context.root();
let promise = trusted_promise.root();
promise.resolve_native(&(), CanGc::note());
if base_context.State() != AudioContextState::Closed {
base_context.set_state_attribute(AudioContextState::Closed);
context.global().task_manager().dom_manipulation_task_source().queue_simple_event(
context.upcast(),
atom!("statechange"),
);
}
})
);
},
Err(_) => {
// The spec does not define the error case and `suspend` should
// never fail, but we handle the case here for completion.
self.global()
.task_manager()
.dom_manipulation_task_source()
.queue(task!(suspend_error: move || {
let promise = trusted_promise.root();
promise.reject_error(Error::Type("Something went wrong".to_owned()), CanGc::note());
}));
},
};
// Step 6.
promise
}
/// <https://webaudio.github.io/web-audio-api/#dom-audiocontext-createmediaelementsource>
fn CreateMediaElementSource(
&self,
media_element: &HTMLMediaElement,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaElementAudioSourceNode>> {
let global = self.global();
let window = global.as_window();
MediaElementAudioSourceNode::new(window, self, media_element, can_gc)
}
/// <https://webaudio.github.io/web-audio-api/#dom-audiocontext-createmediastreamsource>
fn CreateMediaStreamSource(
&self,
stream: &MediaStream,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioSourceNode>> {
let global = self.global();
let window = global.as_window();
MediaStreamAudioSourceNode::new(window, self, stream, can_gc)
}
/// <https://webaudio.github.io/web-audio-api/#dom-audiocontext-createmediastreamtracksource>
fn CreateMediaStreamTrackSource(
&self,
track: &MediaStreamTrack,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamTrackAudioSourceNode>> {
let global = self.global();
let window = global.as_window();
MediaStreamTrackAudioSourceNode::new(window, self, track, can_gc)
}
/// <https://webaudio.github.io/web-audio-api/#dom-audiocontext-createmediastreamdestination>
fn CreateMediaStreamDestination(
&self,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioDestinationNode>> {
let global = self.global();
let window = global.as_window();
MediaStreamAudioDestinationNode::new(window, self, &AudioNodeOptions::empty(), can_gc)
}
}
impl Convert<LatencyCategory> for AudioContextLatencyCategory {
fn convert(self) -> LatencyCategory {
match self {
AudioContextLatencyCategory::Balanced => LatencyCategory::Balanced,
AudioContextLatencyCategory::Interactive => LatencyCategory::Interactive,
AudioContextLatencyCategory::Playback => LatencyCategory::Playback,
}
}
}
impl Convert<RealTimeAudioContextOptions> for &AudioContextOptions {
fn convert(self) -> RealTimeAudioContextOptions {
RealTimeAudioContextOptions {
sample_rate: *self.sampleRate.unwrap_or(Finite::wrap(44100.)),
latency_hint: match self.latencyHint {
AudioContextLatencyCategoryOrDouble::AudioContextLatencyCategory(category) => {
category.convert()
},
AudioContextLatencyCategoryOrDouble::Double(_) => LatencyCategory::Interactive, // TODO
},
}
}
}

View file

@ -0,0 +1,58 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper, MAX_CHANNEL_COUNT};
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioDestinationNodeBinding::AudioDestinationNodeMethods;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
AudioNodeOptions, ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::reflector::reflect_dom_object;
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AudioDestinationNode {
node: AudioNode,
}
impl AudioDestinationNode {
fn new_inherited(
context: &BaseAudioContext,
options: &AudioNodeOptions,
) -> AudioDestinationNode {
let node_options =
options.unwrap_or(2, ChannelCountMode::Max, ChannelInterpretation::Speakers);
AudioDestinationNode {
node: AudioNode::new_inherited_for_id(
context.destination_node(),
context,
node_options,
1,
1,
),
}
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new(
global: &GlobalScope,
context: &BaseAudioContext,
options: &AudioNodeOptions,
can_gc: CanGc,
) -> DomRoot<AudioDestinationNode> {
let node = AudioDestinationNode::new_inherited(context, options);
reflect_dom_object(Box::new(node), global, can_gc)
}
}
impl AudioDestinationNodeMethods<crate::DomTypeHolder> for AudioDestinationNode {
// https://webaudio.github.io/web-audio-api/#dom-audiodestinationnode-maxchannelcount
fn MaxChannelCount(&self) -> u32 {
MAX_CHANNEL_COUNT
}
}

View file

@ -0,0 +1,248 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::f32;
use dom_struct::dom_struct;
use servo_media::audio::node::AudioNodeType;
use servo_media::audio::param::{ParamDir, ParamType};
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioListenerBinding::AudioListenerMethods;
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::{
AudioParamMethods, AutomationRate,
};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::{Reflector, reflect_dom_object};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AudioListener {
reflector_: Reflector,
position_x: Dom<AudioParam>,
position_y: Dom<AudioParam>,
position_z: Dom<AudioParam>,
forward_x: Dom<AudioParam>,
forward_y: Dom<AudioParam>,
forward_z: Dom<AudioParam>,
up_x: Dom<AudioParam>,
up_y: Dom<AudioParam>,
up_z: Dom<AudioParam>,
}
impl AudioListener {
fn new_inherited(window: &Window, context: &BaseAudioContext, can_gc: CanGc) -> AudioListener {
let node = context.listener();
let position_x = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Position(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let position_y = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Position(ParamDir::Y),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let position_z = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Position(ParamDir::Z),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let forward_x = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Forward(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let forward_y = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Forward(ParamDir::Y),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let forward_z = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Forward(ParamDir::Z),
AutomationRate::A_rate,
-1., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let up_x = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Up(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let up_y = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Up(ParamDir::Y),
AutomationRate::A_rate,
1., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let up_z = AudioParam::new(
window,
context,
node,
AudioNodeType::AudioListenerNode,
ParamType::Up(ParamDir::Z),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
AudioListener {
reflector_: Reflector::new(),
position_x: Dom::from_ref(&position_x),
position_y: Dom::from_ref(&position_y),
position_z: Dom::from_ref(&position_z),
forward_x: Dom::from_ref(&forward_x),
forward_y: Dom::from_ref(&forward_y),
forward_z: Dom::from_ref(&forward_z),
up_x: Dom::from_ref(&up_x),
up_y: Dom::from_ref(&up_y),
up_z: Dom::from_ref(&up_z),
}
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
can_gc: CanGc,
) -> DomRoot<AudioListener> {
let node = AudioListener::new_inherited(window, context, can_gc);
reflect_dom_object(Box::new(node), window, can_gc)
}
}
#[allow(non_snake_case)]
impl AudioListenerMethods<crate::DomTypeHolder> for AudioListener {
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positionx
fn PositionX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positiony
fn PositionY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positionz
fn PositionZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_z)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardx
fn ForwardX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardy
fn ForwardY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardz
fn ForwardZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_z)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upx
fn UpX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upy
fn UpY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upz
fn UpZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_z)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-setorientation
fn SetOrientation(
&self,
x: Finite<f32>,
y: Finite<f32>,
z: Finite<f32>,
xUp: Finite<f32>,
yUp: Finite<f32>,
zUp: Finite<f32>,
) -> Fallible<DomRoot<AudioListener>> {
self.forward_x.SetValue(x);
self.forward_y.SetValue(y);
self.forward_z.SetValue(z);
self.up_x.SetValue(xUp);
self.up_y.SetValue(yUp);
self.up_z.SetValue(zUp);
Ok(DomRoot::from_ref(self))
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-setposition
fn SetPosition(
&self,
x: Finite<f32>,
y: Finite<f32>,
z: Finite<f32>,
) -> Fallible<DomRoot<AudioListener>> {
self.position_x.SetValue(x);
self.position_y.SetValue(y);
self.position_z.SetValue(z);
Ok(DomRoot::from_ref(self))
}
}

View file

@ -0,0 +1,430 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use dom_struct::dom_struct;
use script_bindings::codegen::InheritTypes::{
AudioNodeTypeId, AudioScheduledSourceNodeTypeId, EventTargetTypeId,
};
use servo_media::audio::graph::NodeId;
use servo_media::audio::node::{
AudioNodeInit, AudioNodeMessage, ChannelCountMode as ServoMediaChannelCountMode, ChannelInfo,
ChannelInterpretation as ServoMediaChannelInterpretation,
};
use crate::conversions::Convert;
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
AudioNodeMethods, AudioNodeOptions, ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::eventtarget::EventTarget;
// 32 is the minimum required by the spec for createBuffer() and the deprecated
// createScriptProcessor() and matches what is used by Blink and Gecko.
// The limit protects against large memory allocations.
pub(crate) const MAX_CHANNEL_COUNT: u32 = 32;
#[dom_struct]
pub(crate) struct AudioNode {
eventtarget: EventTarget,
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
node_id: NodeId,
context: Dom<BaseAudioContext>,
number_of_inputs: u32,
number_of_outputs: u32,
channel_count: Cell<u32>,
channel_count_mode: Cell<ChannelCountMode>,
channel_interpretation: Cell<ChannelInterpretation>,
}
impl AudioNode {
pub(crate) fn new_inherited(
node_type: AudioNodeInit,
context: &BaseAudioContext,
options: UnwrappedAudioNodeOptions,
number_of_inputs: u32,
number_of_outputs: u32,
) -> Fallible<AudioNode> {
if options.count == 0 || options.count > MAX_CHANNEL_COUNT {
return Err(Error::NotSupported);
}
let ch = ChannelInfo {
count: options.count as u8,
mode: options.mode.convert(),
interpretation: options.interpretation.convert(),
context_channel_count: context.channel_count() as u8,
};
let node_id = context
.audio_context_impl()
.lock()
.unwrap()
.create_node(node_type, ch);
Ok(AudioNode::new_inherited_for_id(
node_id,
context,
options,
number_of_inputs,
number_of_outputs,
))
}
pub(crate) fn new_inherited_for_id(
node_id: NodeId,
context: &BaseAudioContext,
options: UnwrappedAudioNodeOptions,
number_of_inputs: u32,
number_of_outputs: u32,
) -> AudioNode {
AudioNode {
eventtarget: EventTarget::new_inherited(),
node_id,
context: Dom::from_ref(context),
number_of_inputs,
number_of_outputs,
channel_count: Cell::new(options.count),
channel_count_mode: Cell::new(options.mode),
channel_interpretation: Cell::new(options.interpretation),
}
}
pub(crate) fn message(&self, message: AudioNodeMessage) {
self.context
.audio_context_impl()
.lock()
.unwrap()
.message_node(self.node_id, message);
}
pub(crate) fn node_id(&self) -> NodeId {
self.node_id
}
}
impl AudioNodeMethods<crate::DomTypeHolder> for AudioNode {
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect
fn Connect(
&self,
destination: &AudioNode,
output: u32,
input: u32,
) -> Fallible<DomRoot<AudioNode>> {
if self.context != destination.context {
return Err(Error::InvalidAccess);
}
if output >= self.NumberOfOutputs() || input >= destination.NumberOfInputs() {
return Err(Error::IndexSize);
}
// servo-media takes care of ignoring duplicated connections.
self.context
.audio_context_impl()
.lock()
.unwrap()
.connect_ports(
self.node_id().output(output),
destination.node_id().input(input),
);
Ok(DomRoot::from_ref(destination))
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect-destinationparam-output
fn Connect_(&self, dest: &AudioParam, output: u32) -> Fallible<()> {
if self.context != dest.context() {
return Err(Error::InvalidAccess);
}
if output >= self.NumberOfOutputs() {
return Err(Error::IndexSize);
}
// servo-media takes care of ignoring duplicated connections.
self.context
.audio_context_impl()
.lock()
.unwrap()
.connect_ports(
self.node_id().output(output),
dest.node_id().param(dest.param_type()),
);
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect
fn Disconnect(&self) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_all_from(self.node_id());
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-output
fn Disconnect_(&self, out: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output(self.node_id().output(out));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode
fn Disconnect__(&self, to: &AudioNode) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_between(self.node_id(), to.node_id());
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output
fn Disconnect___(&self, to: &AudioNode, out: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output_between(self.node_id().output(out), to.node_id());
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output-input
fn Disconnect____(&self, to: &AudioNode, out: u32, inp: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output_between_to(self.node_id().output(out), to.node_id().input(inp));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect
fn Disconnect_____(&self, param: &AudioParam) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_to(self.node_id(), param.node_id().param(param.param_type()));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect
fn Disconnect______(&self, param: &AudioParam, out: u32) -> ErrorResult {
self.context
.audio_context_impl()
.lock()
.unwrap()
.disconnect_output_between_to(
self.node_id().output(out),
param.node_id().param(param.param_type()),
);
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-context
fn Context(&self) -> DomRoot<BaseAudioContext> {
DomRoot::from_ref(&self.context)
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-numberofinputs
fn NumberOfInputs(&self) -> u32 {
self.number_of_inputs
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-numberofoutputs
fn NumberOfOutputs(&self) -> u32 {
self.number_of_outputs
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcount
fn ChannelCount(&self) -> u32 {
self.channel_count.get()
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcount
fn SetChannelCount(&self, value: u32) -> ErrorResult {
match self.upcast::<EventTarget>().type_id() {
EventTargetTypeId::AudioNode(AudioNodeTypeId::AudioDestinationNode) => {
if self.context.is_offline() {
return Err(Error::InvalidState);
} else if !(1..=MAX_CHANNEL_COUNT).contains(&value) {
return Err(Error::IndexSize);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::PannerNode) => {
if value > 2 {
return Err(Error::NotSupported);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::AudioScheduledSourceNode(
AudioScheduledSourceNodeTypeId::StereoPannerNode,
)) => {
if value > 2 {
return Err(Error::NotSupported);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::ChannelMergerNode) => {
return Err(Error::InvalidState);
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::ChannelSplitterNode) => {
return Err(Error::InvalidState);
},
// XXX We do not support any of the other AudioNodes with
// constraints yet. Add more cases here as we add support
// for new AudioNodes.
_ => (),
};
if value == 0 || value > MAX_CHANNEL_COUNT {
return Err(Error::NotSupported);
}
self.channel_count.set(value);
self.message(AudioNodeMessage::SetChannelCount(value as u8));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcountmode
fn ChannelCountMode(&self) -> ChannelCountMode {
self.channel_count_mode.get()
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcountmode
fn SetChannelCountMode(&self, value: ChannelCountMode) -> ErrorResult {
// Channel count mode has no effect for nodes with no inputs.
if self.number_of_inputs == 0 {
return Ok(());
}
match self.upcast::<EventTarget>().type_id() {
EventTargetTypeId::AudioNode(AudioNodeTypeId::AudioDestinationNode) => {
if self.context.is_offline() {
return Err(Error::InvalidState);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::PannerNode) => {
if value == ChannelCountMode::Max {
return Err(Error::NotSupported);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::AudioScheduledSourceNode(
AudioScheduledSourceNodeTypeId::StereoPannerNode,
)) => {
if value == ChannelCountMode::Max {
return Err(Error::NotSupported);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::ChannelMergerNode) => {
return Err(Error::InvalidState);
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::ChannelSplitterNode) => {
return Err(Error::InvalidState);
},
// XXX We do not support any of the other AudioNodes with
// constraints yet. Add more cases here as we add support
// for new AudioNodes.
_ => (),
};
self.channel_count_mode.set(value);
self.message(AudioNodeMessage::SetChannelMode(value.convert()));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelinterpretation
fn ChannelInterpretation(&self) -> ChannelInterpretation {
self.channel_interpretation.get()
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelinterpretation
fn SetChannelInterpretation(&self, value: ChannelInterpretation) -> ErrorResult {
// Channel interpretation mode has no effect for nodes with no inputs.
if self.number_of_inputs == 0 {
return Ok(());
}
if let EventTargetTypeId::AudioNode(AudioNodeTypeId::ChannelSplitterNode) =
self.upcast::<EventTarget>().type_id()
{
return Err(Error::InvalidState);
};
self.channel_interpretation.set(value);
self.message(AudioNodeMessage::SetChannelInterpretation(value.convert()));
Ok(())
}
}
impl Convert<ServoMediaChannelCountMode> for ChannelCountMode {
fn convert(self) -> ServoMediaChannelCountMode {
match self {
ChannelCountMode::Max => ServoMediaChannelCountMode::Max,
ChannelCountMode::Clamped_max => ServoMediaChannelCountMode::ClampedMax,
ChannelCountMode::Explicit => ServoMediaChannelCountMode::Explicit,
}
}
}
impl Convert<ServoMediaChannelInterpretation> for ChannelInterpretation {
fn convert(self) -> ServoMediaChannelInterpretation {
match self {
ChannelInterpretation::Discrete => ServoMediaChannelInterpretation::Discrete,
ChannelInterpretation::Speakers => ServoMediaChannelInterpretation::Speakers,
}
}
}
pub(crate) trait AudioNodeOptionsHelper {
fn unwrap_or(
&self,
count: u32,
mode: ChannelCountMode,
interpretation: ChannelInterpretation,
) -> UnwrappedAudioNodeOptions;
}
impl AudioNodeOptionsHelper for AudioNodeOptions {
fn unwrap_or(
&self,
count: u32,
mode: ChannelCountMode,
interpretation: ChannelInterpretation,
) -> UnwrappedAudioNodeOptions {
UnwrappedAudioNodeOptions {
count: self.channelCount.unwrap_or(count),
mode: self.channelCountMode.unwrap_or(mode),
interpretation: self.channelInterpretation.unwrap_or(interpretation),
}
}
}
/// Each node has a set of defaults, so this lets us work with them
/// easily without having to deal with the Options
pub(crate) struct UnwrappedAudioNodeOptions {
pub(crate) count: u32,
pub(crate) mode: ChannelCountMode,
pub(crate) interpretation: ChannelInterpretation,
}
impl Default for UnwrappedAudioNodeOptions {
fn default() -> Self {
UnwrappedAudioNodeOptions {
count: 2,
mode: ChannelCountMode::Max,
interpretation: ChannelInterpretation::Speakers,
}
}
}

View file

@ -0,0 +1,336 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use std::sync::mpsc;
use dom_struct::dom_struct;
use servo_media::audio::graph::NodeId;
use servo_media::audio::node::{AudioNodeMessage, AudioNodeType};
use servo_media::audio::param::{ParamRate, ParamType, RampKind, UserAutomationEvent};
use crate::conversions::Convert;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::{
AudioParamMethods, AutomationRate,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::{Reflector, reflect_dom_object};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AudioParam {
reflector_: Reflector,
context: Dom<BaseAudioContext>,
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
node: NodeId,
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
node_type: AudioNodeType,
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
param: ParamType,
automation_rate: Cell<AutomationRate>,
default_value: f32,
min_value: f32,
max_value: f32,
}
impl AudioParam {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new_inherited(
context: &BaseAudioContext,
node: NodeId,
node_type: AudioNodeType,
param: ParamType,
automation_rate: AutomationRate,
default_value: f32,
min_value: f32,
max_value: f32,
) -> AudioParam {
AudioParam {
reflector_: Reflector::new(),
context: Dom::from_ref(context),
node,
node_type,
param,
automation_rate: Cell::new(automation_rate),
default_value,
min_value,
max_value,
}
}
#[allow(clippy::too_many_arguments)]
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
node: NodeId,
node_type: AudioNodeType,
param: ParamType,
automation_rate: AutomationRate,
default_value: f32,
min_value: f32,
max_value: f32,
can_gc: CanGc,
) -> DomRoot<AudioParam> {
let audio_param = AudioParam::new_inherited(
context,
node,
node_type,
param,
automation_rate,
default_value,
min_value,
max_value,
);
reflect_dom_object(Box::new(audio_param), window, can_gc)
}
fn message_node(&self, message: AudioNodeMessage) {
self.context
.audio_context_impl()
.lock()
.unwrap()
.message_node(self.node, message);
}
pub(crate) fn context(&self) -> &BaseAudioContext {
&self.context
}
pub(crate) fn node_id(&self) -> NodeId {
self.node
}
pub(crate) fn param_type(&self) -> ParamType {
self.param
}
}
impl AudioParamMethods<crate::DomTypeHolder> for AudioParam {
// https://webaudio.github.io/web-audio-api/#dom-audioparam-automationrate
fn AutomationRate(&self) -> AutomationRate {
self.automation_rate.get()
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-automationrate
fn SetAutomationRate(&self, automation_rate: AutomationRate) -> Fallible<()> {
// > AudioBufferSourceNode
// > The AudioParams playbackRate and detune MUST be "k-rate". An InvalidStateError must be
// > thrown if the rate is changed to "a-rate".
if automation_rate == AutomationRate::A_rate &&
self.node_type == AudioNodeType::AudioBufferSourceNode &&
(self.param == ParamType::Detune || self.param == ParamType::PlaybackRate)
{
return Err(Error::InvalidState);
}
self.automation_rate.set(automation_rate);
self.message_node(AudioNodeMessage::SetParamRate(
self.param,
automation_rate.convert(),
));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-value
fn Value(&self) -> Finite<f32> {
let (tx, rx) = mpsc::channel();
self.message_node(AudioNodeMessage::GetParamValue(self.param, tx));
Finite::wrap(rx.recv().unwrap())
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-value
fn SetValue(&self, value: Finite<f32>) {
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::SetValue(*value),
));
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-defaultvalue
fn DefaultValue(&self) -> Finite<f32> {
Finite::wrap(self.default_value)
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-minvalue
fn MinValue(&self) -> Finite<f32> {
Finite::wrap(self.min_value)
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-maxvalue
fn MaxValue(&self) -> Finite<f32> {
Finite::wrap(self.max_value)
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-setvalueattime
fn SetValueAtTime(
&self,
value: Finite<f32>,
start_time: Finite<f64>,
) -> Fallible<DomRoot<AudioParam>> {
if *start_time < 0. {
return Err(Error::Range(format!(
"start time {} should not be negative",
*start_time
)));
}
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::SetValueAtTime(*value, *start_time),
));
Ok(DomRoot::from_ref(self))
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-linearramptovalueattime
fn LinearRampToValueAtTime(
&self,
value: Finite<f32>,
end_time: Finite<f64>,
) -> Fallible<DomRoot<AudioParam>> {
if *end_time < 0. {
return Err(Error::Range(format!(
"end time {} should not be negative",
*end_time
)));
}
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::RampToValueAtTime(RampKind::Linear, *value, *end_time),
));
Ok(DomRoot::from_ref(self))
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-exponentialramptovalueattime
fn ExponentialRampToValueAtTime(
&self,
value: Finite<f32>,
end_time: Finite<f64>,
) -> Fallible<DomRoot<AudioParam>> {
if *end_time < 0. {
return Err(Error::Range(format!(
"end time {} should not be negative",
*end_time
)));
}
if *value == 0. {
return Err(Error::Range(format!(
"target value {} should not be 0",
*value
)));
}
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::RampToValueAtTime(RampKind::Exponential, *value, *end_time),
));
Ok(DomRoot::from_ref(self))
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-settargetattime
fn SetTargetAtTime(
&self,
target: Finite<f32>,
start_time: Finite<f64>,
time_constant: Finite<f32>,
) -> Fallible<DomRoot<AudioParam>> {
if *start_time < 0. {
return Err(Error::Range(format!(
"start time {} should not be negative",
*start_time
)));
}
if *time_constant < 0. {
return Err(Error::Range(format!(
"time constant {} should not be negative",
*time_constant
)));
}
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::SetTargetAtTime(*target, *start_time, (*time_constant).into()),
));
Ok(DomRoot::from_ref(self))
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-setvaluecurveattime
fn SetValueCurveAtTime(
&self,
values: Vec<Finite<f32>>,
start_time: Finite<f64>,
end_time: Finite<f64>,
) -> Fallible<DomRoot<AudioParam>> {
if *start_time < 0. {
return Err(Error::Range(format!(
"start time {} should not be negative",
*start_time
)));
}
if values.len() < 2. as usize {
return Err(Error::InvalidState);
}
if *end_time < 0. {
return Err(Error::Range(format!(
"end time {} should not be negative",
*end_time
)));
}
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::SetValueCurveAtTime(
values.into_iter().map(|v| *v).collect(),
*start_time,
*end_time,
),
));
Ok(DomRoot::from_ref(self))
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-cancelscheduledvalues
fn CancelScheduledValues(&self, cancel_time: Finite<f64>) -> Fallible<DomRoot<AudioParam>> {
if *cancel_time < 0. {
return Err(Error::Range(format!(
"cancel time {} should not be negative",
*cancel_time
)));
}
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::CancelScheduledValues(*cancel_time),
));
Ok(DomRoot::from_ref(self))
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-cancelandholdattime
fn CancelAndHoldAtTime(&self, cancel_time: Finite<f64>) -> Fallible<DomRoot<AudioParam>> {
if *cancel_time < 0. {
return Err(Error::Range(format!(
"cancel time {} should not be negative",
*cancel_time
)));
}
self.message_node(AudioNodeMessage::SetParam(
self.param,
UserAutomationEvent::CancelAndHoldAtTime(*cancel_time),
));
Ok(DomRoot::from_ref(self))
}
}
// https://webaudio.github.io/web-audio-api/#enumdef-automationrate
impl Convert<ParamRate> for AutomationRate {
fn convert(self) -> ParamRate {
match self {
AutomationRate::A_rate => ParamRate::ARate,
AutomationRate::K_rate => ParamRate::KRate,
}
}
}

View file

@ -0,0 +1,118 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use dom_struct::dom_struct;
use servo_media::audio::node::{
AudioNodeInit, AudioNodeMessage, AudioScheduledSourceNodeMessage, OnEndedCallback,
};
use crate::dom::audio::audionode::{AudioNode, UnwrappedAudioNodeOptions};
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioScheduledSourceNodeBinding::AudioScheduledSourceNodeMethods;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomGlobal;
#[dom_struct]
pub(crate) struct AudioScheduledSourceNode {
node: AudioNode,
has_start: Cell<bool>,
has_stop: Cell<bool>,
}
impl AudioScheduledSourceNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
node_type: AudioNodeInit,
context: &BaseAudioContext,
options: UnwrappedAudioNodeOptions,
number_of_inputs: u32,
number_of_outputs: u32,
) -> Fallible<AudioScheduledSourceNode> {
Ok(AudioScheduledSourceNode {
node: AudioNode::new_inherited(
node_type,
context,
options,
number_of_inputs,
number_of_outputs,
)?,
has_start: Cell::new(false),
has_stop: Cell::new(false),
})
}
pub(crate) fn node(&self) -> &AudioNode {
&self.node
}
pub(crate) fn has_start(&self) -> bool {
self.has_start.get()
}
}
impl AudioScheduledSourceNodeMethods<crate::DomTypeHolder> for AudioScheduledSourceNode {
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-onended
event_handler!(ended, GetOnended, SetOnended);
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-start
fn Start(&self, when: Finite<f64>) -> Fallible<()> {
if *when < 0. {
return Err(Error::Range("'when' must be a positive value".to_owned()));
}
if self.has_start.get() || self.has_stop.get() {
return Err(Error::InvalidState);
}
let this = Trusted::new(self);
let task_source = self
.global()
.task_manager()
.dom_manipulation_task_source()
.to_sendable();
let callback = OnEndedCallback::new(move || {
task_source.queue(task!(ended: move || {
let this = this.root();
this.global().task_manager().dom_manipulation_task_source().queue_simple_event(
this.upcast(),
atom!("ended"),
);
}));
});
self.node()
.message(AudioNodeMessage::AudioScheduledSourceNode(
AudioScheduledSourceNodeMessage::RegisterOnEndedCallback(callback),
));
self.has_start.set(true);
self.node
.message(AudioNodeMessage::AudioScheduledSourceNode(
AudioScheduledSourceNodeMessage::Start(*when),
));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-stop
fn Stop(&self, when: Finite<f64>) -> Fallible<()> {
if *when < 0. {
return Err(Error::Range("'when' must be a positive value".to_owned()));
}
if !self.has_start.get() {
return Err(Error::InvalidState);
}
self.has_stop.set(true);
self.node
.message(AudioNodeMessage::AudioScheduledSourceNode(
AudioScheduledSourceNodeMessage::Stop(*when),
));
Ok(())
}
}

View file

@ -0,0 +1,126 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use dom_struct::dom_struct;
use crate::dom::audio::audiotracklist::AudioTrackList;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::AudioTrackBinding::AudioTrackMethods;
use crate::dom::bindings::reflector::{Reflector, reflect_dom_object};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AudioTrack {
reflector_: Reflector,
id: DOMString,
kind: DOMString,
label: DOMString,
language: DOMString,
enabled: Cell<bool>,
track_list: DomRefCell<Option<Dom<AudioTrackList>>>,
}
impl AudioTrack {
pub(crate) fn new_inherited(
id: DOMString,
kind: DOMString,
label: DOMString,
language: DOMString,
track_list: Option<&AudioTrackList>,
) -> AudioTrack {
AudioTrack {
reflector_: Reflector::new(),
id,
kind,
label,
language,
enabled: Cell::new(false),
track_list: DomRefCell::new(track_list.map(Dom::from_ref)),
}
}
pub(crate) fn new(
window: &Window,
id: DOMString,
kind: DOMString,
label: DOMString,
language: DOMString,
track_list: Option<&AudioTrackList>,
can_gc: CanGc,
) -> DomRoot<AudioTrack> {
reflect_dom_object(
Box::new(AudioTrack::new_inherited(
id, kind, label, language, track_list,
)),
window,
can_gc,
)
}
pub(crate) fn id(&self) -> DOMString {
self.id.clone()
}
pub(crate) fn kind(&self) -> DOMString {
self.kind.clone()
}
pub(crate) fn enabled(&self) -> bool {
self.enabled.get()
}
pub(crate) fn set_enabled(&self, value: bool) {
self.enabled.set(value);
}
pub(crate) fn add_track_list(&self, track_list: &AudioTrackList) {
*self.track_list.borrow_mut() = Some(Dom::from_ref(track_list));
}
pub(crate) fn remove_track_list(&self) {
*self.track_list.borrow_mut() = None;
}
}
impl AudioTrackMethods<crate::DomTypeHolder> for AudioTrack {
// https://html.spec.whatwg.org/multipage/#dom-audiotrack-id
fn Id(&self) -> DOMString {
self.id()
}
// https://html.spec.whatwg.org/multipage/#dom-audiotrack-kind
fn Kind(&self) -> DOMString {
self.kind()
}
// https://html.spec.whatwg.org/multipage/#dom-audiotrack-label
fn Label(&self) -> DOMString {
self.label.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-audiotrack-language
fn Language(&self) -> DOMString {
self.language.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-audiotrack-enabled
fn Enabled(&self) -> bool {
self.enabled()
}
// https://html.spec.whatwg.org/multipage/#dom-audiotrack-enabled
fn SetEnabled(&self, value: bool) {
if let Some(list) = self.track_list.borrow().as_ref() {
if let Some(idx) = list.find(self) {
list.set_enabled(idx, value);
}
}
self.set_enabled(value);
}
}

View file

@ -0,0 +1,142 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use crate::dom::audio::audiotrack::AudioTrack;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::AudioTrackListBinding::AudioTrackListMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{DomGlobal, reflect_dom_object};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct AudioTrackList {
eventtarget: EventTarget,
tracks: DomRefCell<Vec<Dom<AudioTrack>>>,
media_element: Option<Dom<HTMLMediaElement>>,
}
impl AudioTrackList {
pub(crate) fn new_inherited(
tracks: &[&AudioTrack],
media_element: Option<&HTMLMediaElement>,
) -> AudioTrackList {
AudioTrackList {
eventtarget: EventTarget::new_inherited(),
tracks: DomRefCell::new(tracks.iter().map(|track| Dom::from_ref(&**track)).collect()),
media_element: media_element.map(Dom::from_ref),
}
}
pub(crate) fn new(
window: &Window,
tracks: &[&AudioTrack],
media_element: Option<&HTMLMediaElement>,
can_gc: CanGc,
) -> DomRoot<AudioTrackList> {
reflect_dom_object(
Box::new(AudioTrackList::new_inherited(tracks, media_element)),
window,
can_gc,
)
}
pub(crate) fn len(&self) -> usize {
self.tracks.borrow().len()
}
pub(crate) fn find(&self, track: &AudioTrack) -> Option<usize> {
self.tracks.borrow().iter().position(|t| &**t == track)
}
pub(crate) fn item(&self, idx: usize) -> Option<DomRoot<AudioTrack>> {
self.tracks
.borrow()
.get(idx)
.map(|track| DomRoot::from_ref(&**track))
}
pub(crate) fn enabled_index(&self) -> Option<usize> {
self.tracks
.borrow()
.iter()
.position(|track| track.enabled())
}
pub(crate) fn set_enabled(&self, idx: usize, value: bool) {
let track = match self.item(idx) {
Some(t) => t,
None => return,
};
// If the chosen tracks enabled status is the same as the new status, return early.
if track.enabled() == value {
return;
}
// Set the tracks enabled status.
track.set_enabled(value);
if let Some(media_element) = self.media_element.as_ref() {
media_element.set_audio_track(idx, value);
}
// Queue a task to fire an event named change.
let global = &self.global();
let this = Trusted::new(self);
let task_source = global.task_manager().media_element_task_source();
task_source.queue(task!(media_track_change: move || {
let this = this.root();
this.upcast::<EventTarget>().fire_event(atom!("change"), CanGc::note());
}));
}
pub(crate) fn add(&self, track: &AudioTrack) {
self.tracks.borrow_mut().push(Dom::from_ref(track));
track.add_track_list(self);
}
pub(crate) fn clear(&self) {
self.tracks
.borrow()
.iter()
.for_each(|t| t.remove_track_list());
self.tracks.borrow_mut().clear();
}
}
impl AudioTrackListMethods<crate::DomTypeHolder> for AudioTrackList {
// https://html.spec.whatwg.org/multipage/#dom-audiotracklist-length
fn Length(&self) -> u32 {
self.len() as u32
}
// https://html.spec.whatwg.org/multipage/#dom-tracklist-item
fn IndexedGetter(&self, idx: u32) -> Option<DomRoot<AudioTrack>> {
self.item(idx as usize)
}
// https://html.spec.whatwg.org/multipage/#dom-audiotracklist-gettrackbyid
fn GetTrackById(&self, id: DOMString) -> Option<DomRoot<AudioTrack>> {
self.tracks
.borrow()
.iter()
.find(|track| track.id() == id)
.map(|track| DomRoot::from_ref(&**track))
}
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onchange
event_handler!(change, GetOnchange, SetOnchange);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onaddtrack
event_handler!(addtrack, GetOnaddtrack, SetOnaddtrack);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onremovetrack
event_handler!(removetrack, GetOnremovetrack, SetOnremovetrack);
}

View file

@ -0,0 +1,622 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, VecDeque};
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use base::id::PipelineId;
use dom_struct::dom_struct;
use js::rust::CustomAutoRooterGuard;
use js::typedarray::ArrayBuffer;
use servo_media::audio::context::{
AudioContext, AudioContextOptions, OfflineAudioContextOptions, ProcessingState,
RealTimeAudioContextOptions,
};
use servo_media::audio::decoder::AudioDecoderCallbacks;
use servo_media::audio::graph::NodeId;
use servo_media::{ClientContextId, ServoMedia};
use uuid::Uuid;
use crate::conversions::Convert;
use crate::dom::audio::analysernode::AnalyserNode;
use crate::dom::audio::audiobuffer::AudioBuffer;
use crate::dom::audio::audiobuffersourcenode::AudioBufferSourceNode;
use crate::dom::audio::audiodestinationnode::AudioDestinationNode;
use crate::dom::audio::audiolistener::AudioListener;
use crate::dom::audio::audionode::MAX_CHANNEL_COUNT;
use crate::dom::audio::biquadfilternode::BiquadFilterNode;
use crate::dom::audio::channelmergernode::ChannelMergerNode;
use crate::dom::audio::channelsplitternode::ChannelSplitterNode;
use crate::dom::audio::constantsourcenode::ConstantSourceNode;
use crate::dom::audio::gainnode::GainNode;
use crate::dom::audio::iirfilternode::IIRFilterNode;
use crate::dom::audio::oscillatornode::OscillatorNode;
use crate::dom::audio::pannernode::PannerNode;
use crate::dom::audio::stereopannernode::StereoPannerNode;
use crate::dom::bindings::callback::ExceptionHandling;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::AnalyserNodeBinding::AnalyserOptions;
use crate::dom::bindings::codegen::Bindings::AudioBufferSourceNodeBinding::AudioBufferSourceOptions;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
AudioNodeOptions, ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::{
AudioContextState, BaseAudioContextMethods, DecodeErrorCallback, DecodeSuccessCallback,
};
use crate::dom::bindings::codegen::Bindings::BiquadFilterNodeBinding::BiquadFilterOptions;
use crate::dom::bindings::codegen::Bindings::ChannelMergerNodeBinding::ChannelMergerOptions;
use crate::dom::bindings::codegen::Bindings::ChannelSplitterNodeBinding::ChannelSplitterOptions;
use crate::dom::bindings::codegen::Bindings::ConstantSourceNodeBinding::ConstantSourceOptions;
use crate::dom::bindings::codegen::Bindings::GainNodeBinding::GainOptions;
use crate::dom::bindings::codegen::Bindings::IIRFilterNodeBinding::IIRFilterOptions;
use crate::dom::bindings::codegen::Bindings::OscillatorNodeBinding::OscillatorOptions;
use crate::dom::bindings::codegen::Bindings::PannerNodeBinding::PannerOptions;
use crate::dom::bindings::codegen::Bindings::StereoPannerNodeBinding::StereoPannerOptions;
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomGlobal;
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::domexception::{DOMErrorName, DOMException};
use crate::dom::eventtarget::EventTarget;
use crate::dom::promise::Promise;
use crate::realms::InRealm;
use crate::script_runtime::CanGc;
#[allow(dead_code)]
pub(crate) enum BaseAudioContextOptions {
AudioContext(RealTimeAudioContextOptions),
OfflineAudioContext(OfflineAudioContextOptions),
}
#[derive(JSTraceable)]
struct DecodeResolver {
pub(crate) promise: Rc<Promise>,
pub(crate) success_callback: Option<Rc<DecodeSuccessCallback>>,
pub(crate) error_callback: Option<Rc<DecodeErrorCallback>>,
}
type BoxedSliceOfPromises = Box<[Rc<Promise>]>;
#[dom_struct]
pub(crate) struct BaseAudioContext {
eventtarget: EventTarget,
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
audio_context_impl: Arc<Mutex<AudioContext>>,
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination>
destination: MutNullableDom<AudioDestinationNode>,
listener: MutNullableDom<AudioListener>,
/// Resume promises which are soon to be fulfilled by a queued task.
#[ignore_malloc_size_of = "promises are hard"]
in_flight_resume_promises_queue: DomRefCell<VecDeque<(BoxedSliceOfPromises, ErrorResult)>>,
/// <https://webaudio.github.io/web-audio-api/#pendingresumepromises>
#[ignore_malloc_size_of = "promises are hard"]
pending_resume_promises: DomRefCell<Vec<Rc<Promise>>>,
#[ignore_malloc_size_of = "promises are hard"]
decode_resolvers: DomRefCell<HashMap<String, DecodeResolver>>,
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-samplerate>
sample_rate: f32,
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state>
/// Although servo-media already keeps track of the control thread state,
/// we keep a state flag here as well. This is so that we can synchronously
/// throw when trying to do things on the context when the context has just
/// been "closed()".
state: Cell<AudioContextState>,
channel_count: u32,
}
impl BaseAudioContext {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
options: BaseAudioContextOptions,
pipeline_id: PipelineId,
) -> Fallible<BaseAudioContext> {
let (sample_rate, channel_count) = match options {
BaseAudioContextOptions::AudioContext(ref opt) => (opt.sample_rate, 2),
BaseAudioContextOptions::OfflineAudioContext(ref opt) => {
(opt.sample_rate, opt.channels)
},
};
let client_context_id =
ClientContextId::build(pipeline_id.namespace_id.0, pipeline_id.index.0.get());
let audio_context_impl = ServoMedia::get()
.create_audio_context(&client_context_id, options.convert())
.map_err(|_| Error::NotSupported)?;
Ok(BaseAudioContext {
eventtarget: EventTarget::new_inherited(),
audio_context_impl,
destination: Default::default(),
listener: Default::default(),
in_flight_resume_promises_queue: Default::default(),
pending_resume_promises: Default::default(),
decode_resolvers: Default::default(),
sample_rate,
state: Cell::new(AudioContextState::Suspended),
channel_count: channel_count.into(),
})
}
/// Tells whether this is an OfflineAudioContext or not.
pub(crate) fn is_offline(&self) -> bool {
false
}
pub(crate) fn audio_context_impl(&self) -> Arc<Mutex<AudioContext>> {
self.audio_context_impl.clone()
}
pub(crate) fn destination_node(&self) -> NodeId {
self.audio_context_impl.lock().unwrap().dest_node()
}
pub(crate) fn listener(&self) -> NodeId {
self.audio_context_impl.lock().unwrap().listener()
}
// https://webaudio.github.io/web-audio-api/#allowed-to-start
pub(crate) fn is_allowed_to_start(&self) -> bool {
self.state.get() == AudioContextState::Suspended
}
fn push_pending_resume_promise(&self, promise: &Rc<Promise>) {
self.pending_resume_promises
.borrow_mut()
.push(promise.clone());
}
/// Takes the pending resume promises.
///
/// The result with which these promises will be fulfilled is passed here
/// and this method returns nothing because we actually just move the
/// current list of pending resume promises to the
/// `in_flight_resume_promises_queue` field.
///
/// Each call to this method must be followed by a call to
/// `fulfill_in_flight_resume_promises`, to actually fulfill the promises
/// which were taken and moved to the in-flight queue.
fn take_pending_resume_promises(&self, result: ErrorResult) {
let pending_resume_promises =
std::mem::take(&mut *self.pending_resume_promises.borrow_mut());
self.in_flight_resume_promises_queue
.borrow_mut()
.push_back((pending_resume_promises.into(), result));
}
/// Fulfills the next in-flight resume promises queue after running a closure.
///
/// See the comment on `take_pending_resume_promises` for why this method
/// does not take a list of promises to fulfill. Callers cannot just pop
/// the front list off of `in_flight_resume_promises_queue` and later fulfill
/// the promises because that would mean putting
/// `#[cfg_attr(crown, allow(crown::unrooted_must_root))]` on even more functions, potentially
/// hiding actual safety bugs.
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn fulfill_in_flight_resume_promises<F>(&self, f: F)
where
F: FnOnce(),
{
let (promises, result) = self
.in_flight_resume_promises_queue
.borrow_mut()
.pop_front()
.expect("there should be at least one list of in flight resume promises");
f();
for promise in &*promises {
match result {
Ok(ref value) => promise.resolve_native(value, CanGc::note()),
Err(ref error) => promise.reject_error(error.clone(), CanGc::note()),
}
}
}
/// Control thread processing state
pub(crate) fn control_thread_state(&self) -> ProcessingState {
self.audio_context_impl.lock().unwrap().state()
}
/// Set audio context state
pub(crate) fn set_state_attribute(&self, state: AudioContextState) {
self.state.set(state);
}
pub(crate) fn resume(&self) {
let this = Trusted::new(self);
// Set the rendering thread state to 'running' and start
// rendering the audio graph.
match self.audio_context_impl.lock().unwrap().resume() {
Ok(()) => {
self.take_pending_resume_promises(Ok(()));
self.global().task_manager().dom_manipulation_task_source().queue(
task!(resume_success: move || {
let this = this.root();
this.fulfill_in_flight_resume_promises(|| {
if this.state.get() != AudioContextState::Running {
this.state.set(AudioContextState::Running);
this.global().task_manager().dom_manipulation_task_source().queue_simple_event(
this.upcast(),
atom!("statechange"),
);
}
});
})
);
},
Err(()) => {
self.take_pending_resume_promises(Err(Error::Type(
"Something went wrong".to_owned(),
)));
self.global()
.task_manager()
.dom_manipulation_task_source()
.queue(task!(resume_error: move || {
this.root().fulfill_in_flight_resume_promises(|| {})
}));
},
}
}
pub(crate) fn channel_count(&self) -> u32 {
self.channel_count
}
}
impl BaseAudioContextMethods<crate::DomTypeHolder> for BaseAudioContext {
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-samplerate>
fn SampleRate(&self) -> Finite<f32> {
Finite::wrap(self.sample_rate)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-currenttime>
fn CurrentTime(&self) -> Finite<f64> {
let current_time = self.audio_context_impl.lock().unwrap().current_time();
Finite::wrap(current_time)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state>
fn State(&self) -> AudioContextState {
self.state.get()
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-resume>
fn Resume(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
// Step 1.
let promise = Promise::new_in_current_realm(comp, can_gc);
// Step 2.
if self.audio_context_impl.lock().unwrap().state() == ProcessingState::Closed {
promise.reject_error(Error::InvalidState, can_gc);
return promise;
}
// Step 3.
if self.state.get() == AudioContextState::Running {
promise.resolve_native(&(), can_gc);
return promise;
}
self.push_pending_resume_promise(&promise);
// Step 4.
if !self.is_allowed_to_start() {
return promise;
}
// Steps 5 and 6.
self.resume();
// Step 7.
promise
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination>
fn Destination(&self, can_gc: CanGc) -> DomRoot<AudioDestinationNode> {
let global = self.global();
self.destination.or_init(|| {
let mut options = AudioNodeOptions::empty();
options.channelCount = Some(self.channel_count);
options.channelCountMode = Some(ChannelCountMode::Explicit);
options.channelInterpretation = Some(ChannelInterpretation::Speakers);
AudioDestinationNode::new(&global, self, &options, can_gc)
})
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-listener>
fn Listener(&self, can_gc: CanGc) -> DomRoot<AudioListener> {
let global = self.global();
let window = global.as_window();
self.listener
.or_init(|| AudioListener::new(window, self, can_gc))
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-onstatechange
event_handler!(statechange, GetOnstatechange, SetOnstatechange);
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createoscillator>
fn CreateOscillator(&self, can_gc: CanGc) -> Fallible<DomRoot<OscillatorNode>> {
OscillatorNode::new(
self.global().as_window(),
self,
&OscillatorOptions::empty(),
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-creategain>
fn CreateGain(&self, can_gc: CanGc) -> Fallible<DomRoot<GainNode>> {
GainNode::new(
self.global().as_window(),
self,
&GainOptions::empty(),
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createpanner>
fn CreatePanner(&self, can_gc: CanGc) -> Fallible<DomRoot<PannerNode>> {
PannerNode::new(
self.global().as_window(),
self,
&PannerOptions::empty(),
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createanalyser>
fn CreateAnalyser(&self, can_gc: CanGc) -> Fallible<DomRoot<AnalyserNode>> {
AnalyserNode::new(
self.global().as_window(),
self,
&AnalyserOptions::empty(),
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbiquadfilter>
fn CreateBiquadFilter(&self, can_gc: CanGc) -> Fallible<DomRoot<BiquadFilterNode>> {
BiquadFilterNode::new(
self.global().as_window(),
self,
&BiquadFilterOptions::empty(),
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createstereopanner>
fn CreateStereoPanner(&self, can_gc: CanGc) -> Fallible<DomRoot<StereoPannerNode>> {
StereoPannerNode::new(
self.global().as_window(),
self,
&StereoPannerOptions::empty(),
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createconstantsource>
fn CreateConstantSource(&self, can_gc: CanGc) -> Fallible<DomRoot<ConstantSourceNode>> {
ConstantSourceNode::new(
self.global().as_window(),
self,
&ConstantSourceOptions::empty(),
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createchannelmerger>
fn CreateChannelMerger(
&self,
count: u32,
can_gc: CanGc,
) -> Fallible<DomRoot<ChannelMergerNode>> {
let mut opts = ChannelMergerOptions::empty();
opts.numberOfInputs = count;
ChannelMergerNode::new(self.global().as_window(), self, &opts, can_gc)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createchannelsplitter>
fn CreateChannelSplitter(
&self,
count: u32,
can_gc: CanGc,
) -> Fallible<DomRoot<ChannelSplitterNode>> {
let mut opts = ChannelSplitterOptions::empty();
opts.numberOfOutputs = count;
ChannelSplitterNode::new(self.global().as_window(), self, &opts, can_gc)
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer>
fn CreateBuffer(
&self,
number_of_channels: u32,
length: u32,
sample_rate: Finite<f32>,
can_gc: CanGc,
) -> Fallible<DomRoot<AudioBuffer>> {
if number_of_channels == 0 ||
number_of_channels > MAX_CHANNEL_COUNT ||
length == 0 ||
*sample_rate <= 0.
{
return Err(Error::NotSupported);
}
Ok(AudioBuffer::new(
self.global().as_window(),
number_of_channels,
length,
*sample_rate,
None,
can_gc,
))
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffersource
fn CreateBufferSource(&self, can_gc: CanGc) -> Fallible<DomRoot<AudioBufferSourceNode>> {
AudioBufferSourceNode::new(
self.global().as_window(),
self,
&AudioBufferSourceOptions::empty(),
can_gc,
)
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-decodeaudiodata
fn DecodeAudioData(
&self,
audio_data: CustomAutoRooterGuard<ArrayBuffer>,
decode_success_callback: Option<Rc<DecodeSuccessCallback>>,
decode_error_callback: Option<Rc<DecodeErrorCallback>>,
comp: InRealm,
can_gc: CanGc,
) -> Rc<Promise> {
// Step 1.
let promise = Promise::new_in_current_realm(comp, can_gc);
if audio_data.len() > 0 {
// Step 2.
// XXX detach array buffer.
let uuid = Uuid::new_v4().simple().to_string();
let uuid_ = uuid.clone();
self.decode_resolvers.borrow_mut().insert(
uuid.clone(),
DecodeResolver {
promise: promise.clone(),
success_callback: decode_success_callback,
error_callback: decode_error_callback,
},
);
let audio_data = audio_data.to_vec();
let decoded_audio = Arc::new(Mutex::new(Vec::new()));
let decoded_audio_ = decoded_audio.clone();
let decoded_audio__ = decoded_audio.clone();
// servo-media returns an audio channel position along
// with the AudioDecoderCallback progress callback, which
// may not be the same as the index of the decoded_audio
// Vec.
let channels = Arc::new(Mutex::new(HashMap::new()));
let this = Trusted::new(self);
let this_ = this.clone();
let task_source = self
.global()
.task_manager()
.dom_manipulation_task_source()
.to_sendable();
let task_source_clone = task_source.clone();
let callbacks = AudioDecoderCallbacks::new()
.ready(move |channel_count| {
decoded_audio
.lock()
.unwrap()
.resize(channel_count as usize, Vec::new());
})
.progress(move |buffer, channel_pos_mask| {
let mut decoded_audio = decoded_audio_.lock().unwrap();
let mut channels = channels.lock().unwrap();
let channel = match channels.entry(channel_pos_mask) {
Entry::Occupied(entry) => *entry.get(),
Entry::Vacant(entry) => {
let x = (channel_pos_mask as f32).log2() as usize;
*entry.insert(x)
},
};
decoded_audio[channel].extend_from_slice((*buffer).as_ref());
})
.eos(move || {
task_source.queue(task!(audio_decode_eos: move || {
let this = this.root();
let decoded_audio = decoded_audio__.lock().unwrap();
let length = if !decoded_audio.is_empty() {
decoded_audio[0].len()
} else {
0
};
let buffer = AudioBuffer::new(
this.global().as_window(),
decoded_audio.len() as u32 /* number of channels */,
length as u32,
this.sample_rate,
Some(decoded_audio.as_slice()),
CanGc::note());
let mut resolvers = this.decode_resolvers.borrow_mut();
assert!(resolvers.contains_key(&uuid_));
let resolver = resolvers.remove(&uuid_).unwrap();
if let Some(callback) = resolver.success_callback {
let _ = callback.Call__(&buffer, ExceptionHandling::Report, CanGc::note());
}
resolver.promise.resolve_native(&buffer, CanGc::note());
}));
})
.error(move |error| {
task_source_clone.queue(task!(audio_decode_eos: move || {
let this = this_.root();
let mut resolvers = this.decode_resolvers.borrow_mut();
assert!(resolvers.contains_key(&uuid));
let resolver = resolvers.remove(&uuid).unwrap();
if let Some(callback) = resolver.error_callback {
let _ = callback.Call__(
&DOMException::new(&this.global(), DOMErrorName::DataCloneError, CanGc::note()),
ExceptionHandling::Report, CanGc::note());
}
let error = format!("Audio decode error {:?}", error);
resolver.promise.reject_error(Error::Type(error), CanGc::note());
}));
})
.build();
self.audio_context_impl
.lock()
.unwrap()
.decode_audio_data(audio_data, callbacks);
} else {
// Step 3.
promise.reject_error(Error::DataClone(None), can_gc);
return promise;
}
// Step 4.
promise
}
/// <https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createiirfilter>
fn CreateIIRFilter(
&self,
feedforward: Vec<Finite<f64>>,
feedback: Vec<Finite<f64>>,
can_gc: CanGc,
) -> Fallible<DomRoot<IIRFilterNode>> {
let opts = IIRFilterOptions {
parent: AudioNodeOptions::empty(),
feedback,
feedforward,
};
IIRFilterNode::new(self.global().as_window(), self, &opts, can_gc)
}
}
impl Convert<AudioContextOptions> for BaseAudioContextOptions {
fn convert(self) -> AudioContextOptions {
match self {
BaseAudioContextOptions::AudioContext(options) => {
AudioContextOptions::RealTimeAudioContext(options)
},
BaseAudioContextOptions::OfflineAudioContext(options) => {
AudioContextOptions::OfflineAudioContext(options)
},
}
}
}
impl Convert<AudioContextState> for ProcessingState {
fn convert(self) -> AudioContextState {
match self {
ProcessingState::Suspended => AudioContextState::Suspended,
ProcessingState::Running => AudioContextState::Running,
ProcessingState::Closed => AudioContextState::Closed,
}
}
}

View file

@ -0,0 +1,220 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use std::f32;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::biquad_filter_node::{
BiquadFilterNodeMessage, BiquadFilterNodeOptions, FilterType,
};
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioNodeType};
use servo_media::audio::param::ParamType;
use crate::conversions::Convert;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper};
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use crate::dom::bindings::codegen::Bindings::BiquadFilterNodeBinding::{
BiquadFilterNodeMethods, BiquadFilterOptions, BiquadFilterType,
};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct BiquadFilterNode {
node: AudioNode,
gain: Dom<AudioParam>,
frequency: Dom<AudioParam>,
q: Dom<AudioParam>,
detune: Dom<AudioParam>,
filter: Cell<BiquadFilterType>,
}
impl BiquadFilterNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &BiquadFilterOptions,
can_gc: CanGc,
) -> Fallible<BiquadFilterNode> {
let node_options =
options
.parent
.unwrap_or(2, ChannelCountMode::Max, ChannelInterpretation::Speakers);
let filter = Cell::new(options.type_);
let options = options.convert();
let node = AudioNode::new_inherited(
AudioNodeInit::BiquadFilterNode(options),
context,
node_options,
1, // inputs
1, // outputs
)?;
let gain = AudioParam::new(
window,
context,
node.node_id(),
AudioNodeType::BiquadFilterNode,
ParamType::Gain,
AutomationRate::A_rate,
options.gain, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let q = AudioParam::new(
window,
context,
node.node_id(),
AudioNodeType::BiquadFilterNode,
ParamType::Q,
AutomationRate::A_rate,
options.q, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let frequency = AudioParam::new(
window,
context,
node.node_id(),
AudioNodeType::BiquadFilterNode,
ParamType::Frequency,
AutomationRate::A_rate,
options.frequency, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let detune = AudioParam::new(
window,
context,
node.node_id(),
AudioNodeType::BiquadFilterNode,
ParamType::Detune,
AutomationRate::A_rate,
options.detune, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
Ok(BiquadFilterNode {
node,
filter,
gain: Dom::from_ref(&gain),
q: Dom::from_ref(&q),
frequency: Dom::from_ref(&frequency),
detune: Dom::from_ref(&detune),
})
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &BiquadFilterOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<BiquadFilterNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &BiquadFilterOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<BiquadFilterNode>> {
let node = BiquadFilterNode::new_inherited(window, context, options, can_gc)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl BiquadFilterNodeMethods<crate::DomTypeHolder> for BiquadFilterNode {
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-biquadfilternode-context-options
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &BiquadFilterOptions,
) -> Fallible<DomRoot<BiquadFilterNode>> {
BiquadFilterNode::new_with_proto(window, proto, context, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-gain
fn Gain(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.gain)
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-q
fn Q(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.q)
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-detune
fn Detune(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.detune)
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-frequency
fn Frequency(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.frequency)
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-type
fn Type(&self) -> BiquadFilterType {
self.filter.get()
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-type
fn SetType(&self, filter: BiquadFilterType) {
self.filter.set(filter);
self.node.message(AudioNodeMessage::BiquadFilterNode(
BiquadFilterNodeMessage::SetFilterType(filter.convert()),
));
}
}
impl Convert<BiquadFilterNodeOptions> for &BiquadFilterOptions {
fn convert(self) -> BiquadFilterNodeOptions {
BiquadFilterNodeOptions {
gain: *self.gain,
q: *self.Q,
frequency: *self.frequency,
detune: *self.detune,
filter: self.type_.convert(),
}
}
}
impl Convert<FilterType> for BiquadFilterType {
fn convert(self) -> FilterType {
match self {
BiquadFilterType::Lowpass => FilterType::LowPass,
BiquadFilterType::Highpass => FilterType::HighPass,
BiquadFilterType::Bandpass => FilterType::BandPass,
BiquadFilterType::Lowshelf => FilterType::LowShelf,
BiquadFilterType::Highshelf => FilterType::HighShelf,
BiquadFilterType::Peaking => FilterType::Peaking,
BiquadFilterType::Allpass => FilterType::AllPass,
BiquadFilterType::Notch => FilterType::Notch,
}
}
}

View file

@ -0,0 +1,108 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::channel_node::ChannelNodeOptions;
use servo_media::audio::node::AudioNodeInit;
use crate::conversions::Convert;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper, MAX_CHANNEL_COUNT};
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::ChannelMergerNodeBinding::{
ChannelMergerNodeMethods, ChannelMergerOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct ChannelMergerNode {
node: AudioNode,
}
impl ChannelMergerNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
_: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<ChannelMergerNode> {
let node_options = options.parent.unwrap_or(
1,
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers,
);
if node_options.count != 1 || node_options.mode != ChannelCountMode::Explicit {
return Err(Error::InvalidState);
}
if options.numberOfInputs < 1 || options.numberOfInputs > MAX_CHANNEL_COUNT {
return Err(Error::IndexSize);
}
let num_inputs = options.numberOfInputs;
let node = AudioNode::new_inherited(
AudioNodeInit::ChannelMergerNode(options.convert()),
context,
node_options,
num_inputs, // inputs
1, // outputs
)?;
Ok(ChannelMergerNode { node })
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<ChannelMergerNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<ChannelMergerNode>> {
let node = ChannelMergerNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl ChannelMergerNodeMethods<crate::DomTypeHolder> for ChannelMergerNode {
/// <https://webaudio.github.io/web-audio-api/#dom-channelmergernode-channelmergernode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &ChannelMergerOptions,
) -> Fallible<DomRoot<ChannelMergerNode>> {
ChannelMergerNode::new_with_proto(window, proto, context, options, can_gc)
}
}
impl Convert<ChannelNodeOptions> for ChannelMergerOptions {
fn convert(self) -> ChannelNodeOptions {
ChannelNodeOptions {
channels: self.numberOfInputs as u8,
}
}
}

View file

@ -0,0 +1,100 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::node::AudioNodeInit;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper, MAX_CHANNEL_COUNT};
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::ChannelSplitterNodeBinding::{
ChannelSplitterNodeMethods, ChannelSplitterOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct ChannelSplitterNode {
node: AudioNode,
}
impl ChannelSplitterNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
_: &Window,
context: &BaseAudioContext,
options: &ChannelSplitterOptions,
) -> Fallible<ChannelSplitterNode> {
if options.numberOfOutputs < 1 || options.numberOfOutputs > MAX_CHANNEL_COUNT {
return Err(Error::IndexSize);
}
let node_options = options.parent.unwrap_or(
options.numberOfOutputs,
ChannelCountMode::Explicit,
ChannelInterpretation::Discrete,
);
if node_options.count != options.numberOfOutputs ||
node_options.mode != ChannelCountMode::Explicit ||
node_options.interpretation != ChannelInterpretation::Discrete
{
return Err(Error::InvalidState);
}
let node = AudioNode::new_inherited(
AudioNodeInit::ChannelSplitterNode,
context,
node_options,
1, // inputs
options.numberOfOutputs, // outputs
)?;
Ok(ChannelSplitterNode { node })
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &ChannelSplitterOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<ChannelSplitterNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &ChannelSplitterOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<ChannelSplitterNode>> {
let node = ChannelSplitterNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl ChannelSplitterNodeMethods<crate::DomTypeHolder> for ChannelSplitterNode {
/// <https://webaudio.github.io/web-audio-api/#dom-channelsplitternode-channelsplitternode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &ChannelSplitterOptions,
) -> Fallible<DomRoot<ChannelSplitterNode>> {
ChannelSplitterNode::new_with_proto(window, proto, context, options, can_gc)
}
}

View file

@ -0,0 +1,121 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::f32;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::constant_source_node::ConstantSourceNodeOptions as ServoMediaConstantSourceOptions;
use servo_media::audio::node::{AudioNodeInit, AudioNodeType};
use servo_media::audio::param::ParamType;
use crate::conversions::Convert;
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::audioscheduledsourcenode::AudioScheduledSourceNode;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use crate::dom::bindings::codegen::Bindings::ConstantSourceNodeBinding::{
ConstantSourceNodeMethods, ConstantSourceOptions,
};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct ConstantSourceNode {
source_node: AudioScheduledSourceNode,
offset: Dom<AudioParam>,
}
impl ConstantSourceNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &ConstantSourceOptions,
can_gc: CanGc,
) -> Fallible<ConstantSourceNode> {
let node_options = Default::default();
let offset = *options.offset;
let source_node = AudioScheduledSourceNode::new_inherited(
AudioNodeInit::ConstantSourceNode(options.convert()),
context,
node_options, /* 2, MAX, Speakers */
0, /* inputs */
1, /* outputs */
)?;
let node_id = source_node.node().node_id();
let offset = AudioParam::new(
window,
context,
node_id,
AudioNodeType::ConstantSourceNode,
ParamType::Offset,
AutomationRate::A_rate,
offset,
f32::MIN,
f32::MAX,
can_gc,
);
Ok(ConstantSourceNode {
source_node,
offset: Dom::from_ref(&offset),
})
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &ConstantSourceOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<ConstantSourceNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &ConstantSourceOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<ConstantSourceNode>> {
let node = ConstantSourceNode::new_inherited(window, context, options, can_gc)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl ConstantSourceNodeMethods<crate::DomTypeHolder> for ConstantSourceNode {
// https://webaudio.github.io/web-audio-api/#dom-constantsourcenode-constantsourcenode
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &ConstantSourceOptions,
) -> Fallible<DomRoot<ConstantSourceNode>> {
ConstantSourceNode::new_with_proto(window, proto, context, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-constantsourcenode-offset
fn Offset(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.offset)
}
}
impl Convert<ServoMediaConstantSourceOptions> for ConstantSourceOptions {
fn convert(self) -> ServoMediaConstantSourceOptions {
ServoMediaConstantSourceOptions {
offset: *self.offset,
}
}
}

View file

@ -0,0 +1,121 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::f32;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::gain_node::GainNodeOptions;
use servo_media::audio::node::{AudioNodeInit, AudioNodeType};
use servo_media::audio::param::ParamType;
use crate::conversions::Convert;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper};
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use crate::dom::bindings::codegen::Bindings::GainNodeBinding::{GainNodeMethods, GainOptions};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct GainNode {
node: AudioNode,
gain: Dom<AudioParam>,
}
impl GainNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &GainOptions,
can_gc: CanGc,
) -> Fallible<GainNode> {
let node_options =
options
.parent
.unwrap_or(2, ChannelCountMode::Max, ChannelInterpretation::Speakers);
let gain = *options.gain;
let node = AudioNode::new_inherited(
AudioNodeInit::GainNode(options.convert()),
context,
node_options,
1, // inputs
1, // outputs
)?;
let gain = AudioParam::new(
window,
context,
node.node_id(),
AudioNodeType::GainNode,
ParamType::Gain,
AutomationRate::A_rate,
gain, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
Ok(GainNode {
node,
gain: Dom::from_ref(&gain),
})
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &GainOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<GainNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &GainOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<GainNode>> {
let node = GainNode::new_inherited(window, context, options, can_gc)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl GainNodeMethods<crate::DomTypeHolder> for GainNode {
// https://webaudio.github.io/web-audio-api/#dom-gainnode-gainnode
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &GainOptions,
) -> Fallible<DomRoot<GainNode>> {
GainNode::new_with_proto(window, proto, context, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-gainnode-gain
fn Gain(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.gain)
}
}
impl Convert<GainNodeOptions> for GainOptions {
fn convert(self) -> GainNodeOptions {
GainNodeOptions { gain: *self.gain }
}
}

View file

@ -0,0 +1,154 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::sync::Arc;
use dom_struct::dom_struct;
use itertools::Itertools;
use js::gc::CustomAutoRooterGuard;
use js::rust::HandleObject;
use js::typedarray::Float32Array;
use servo_media::audio::iir_filter_node::{IIRFilterNode as IIRFilter, IIRFilterNodeOptions};
use servo_media::audio::node::AudioNodeInit;
use crate::conversions::Convert;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper};
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::IIRFilterNodeBinding::{
IIRFilterNodeMethods, IIRFilterOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::DomRoot;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct IIRFilterNode {
node: AudioNode,
feedforward: Vec<Finite<f64>>,
feedback: Vec<Finite<f64>>,
}
impl IIRFilterNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
_window: &Window,
context: &BaseAudioContext,
options: &IIRFilterOptions,
) -> Fallible<IIRFilterNode> {
if !(1..=20).contains(&options.feedforward.len()) ||
!(1..=20).contains(&options.feedback.len())
{
return Err(Error::NotSupported);
}
if options.feedforward.iter().all(|v| **v == 0.0) || *options.feedback[0] == 0.0 {
return Err(Error::InvalidState);
}
let node_options =
options
.parent
.unwrap_or(2, ChannelCountMode::Max, ChannelInterpretation::Speakers);
let feedforward = (*options.feedforward).to_vec();
let feedback = (*options.feedback).to_vec();
let init_options = options.clone().convert();
let node = AudioNode::new_inherited(
AudioNodeInit::IIRFilterNode(init_options),
context,
node_options,
1, // inputs
1, // outputs
)?;
Ok(IIRFilterNode {
node,
feedforward,
feedback,
})
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &IIRFilterOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<IIRFilterNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &IIRFilterOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<IIRFilterNode>> {
let node = IIRFilterNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl IIRFilterNodeMethods<crate::DomTypeHolder> for IIRFilterNode {
/// <https://webaudio.github.io/web-audio-api/#dom-iirfilternode-iirfilternode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &IIRFilterOptions,
) -> Fallible<DomRoot<IIRFilterNode>> {
IIRFilterNode::new_with_proto(window, proto, context, options, can_gc)
}
#[allow(unsafe_code)]
/// <https://webaudio.github.io/web-audio-api/#dom-iirfilternode-getfrequencyresponse>
fn GetFrequencyResponse(
&self,
frequency_hz: CustomAutoRooterGuard<Float32Array>,
mut mag_response: CustomAutoRooterGuard<Float32Array>,
mut phase_response: CustomAutoRooterGuard<Float32Array>,
) -> Result<(), Error> {
let len = frequency_hz.len();
if len != mag_response.len() || len != phase_response.len() {
return Err(Error::InvalidAccess);
}
let feedforward: Vec<f64> = (self.feedforward.iter().map(|v| **v).collect_vec()).to_vec();
let feedback: Vec<f64> = (self.feedback.iter().map(|v| **v).collect_vec()).to_vec();
let frequency_hz_vec = frequency_hz.to_vec();
let mut mag_response_vec = mag_response.to_vec();
let mut phase_response_vec = phase_response.to_vec();
IIRFilter::get_frequency_response(
&feedforward,
&feedback,
&frequency_hz_vec,
&mut mag_response_vec,
&mut phase_response_vec,
);
mag_response.update(&mag_response_vec);
phase_response.update(&phase_response_vec);
Ok(())
}
}
impl Convert<IIRFilterNodeOptions> for IIRFilterOptions {
fn convert(self) -> IIRFilterNodeOptions {
let feedforward: Vec<f64> = (*self.feedforward.iter().map(|v| **v).collect_vec()).to_vec();
let feedback: Vec<f64> = (*self.feedback.iter().map(|v| **v).collect_vec()).to_vec();
IIRFilterNodeOptions {
feedforward: Arc::new(feedforward),
feedback: Arc::new(feedback),
}
}
}

View file

@ -0,0 +1,106 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::sync::mpsc;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::media_element_source_node::MediaElementSourceNodeMessage;
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage};
use crate::dom::audio::audiocontext::AudioContext;
use crate::dom::audio::audionode::AudioNode;
use crate::dom::bindings::codegen::Bindings::MediaElementAudioSourceNodeBinding::{
MediaElementAudioSourceNodeMethods, MediaElementAudioSourceOptions,
};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct MediaElementAudioSourceNode {
node: AudioNode,
media_element: Dom<HTMLMediaElement>,
}
impl MediaElementAudioSourceNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_inherited(
context: &AudioContext,
media_element: &HTMLMediaElement,
can_gc: CanGc,
) -> Fallible<MediaElementAudioSourceNode> {
let node = AudioNode::new_inherited(
AudioNodeInit::MediaElementSourceNode,
&context.base(),
Default::default(),
0,
1,
)?;
let (sender, receiver) = mpsc::channel();
node.message(AudioNodeMessage::MediaElementSourceNode(
MediaElementSourceNodeMessage::GetAudioRenderer(sender),
));
let audio_renderer = receiver.recv().unwrap();
media_element.set_audio_renderer(audio_renderer, can_gc);
let media_element = Dom::from_ref(media_element);
Ok(MediaElementAudioSourceNode {
node,
media_element,
})
}
pub(crate) fn new(
window: &Window,
context: &AudioContext,
media_element: &HTMLMediaElement,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaElementAudioSourceNode>> {
Self::new_with_proto(window, None, context, media_element, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &AudioContext,
media_element: &HTMLMediaElement,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaElementAudioSourceNode>> {
let node = MediaElementAudioSourceNode::new_inherited(context, media_element, can_gc)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl MediaElementAudioSourceNodeMethods<crate::DomTypeHolder> for MediaElementAudioSourceNode {
/// <https://webaudio.github.io/web-audio-api/#dom-mediaelementaudiosourcenode-mediaelementaudiosourcenode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &AudioContext,
options: &MediaElementAudioSourceOptions,
) -> Fallible<DomRoot<MediaElementAudioSourceNode>> {
MediaElementAudioSourceNode::new_with_proto(
window,
proto,
context,
&options.mediaElement,
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-mediaelementaudiosourcenode-mediaelement>
fn MediaElement(&self) -> DomRoot<HTMLMediaElement> {
DomRoot::from_ref(&*self.media_element)
}
}

View file

@ -0,0 +1,104 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::ServoMedia;
use servo_media::audio::node::AudioNodeInit;
use servo_media::streams::MediaStreamType;
use crate::dom::audio::audiocontext::AudioContext;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper};
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
AudioNodeOptions, ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::MediaStreamAudioDestinationNodeBinding::MediaStreamAudioDestinationNodeMethods;
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{DomGlobal, reflect_dom_object_with_proto};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::mediastream::MediaStream;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct MediaStreamAudioDestinationNode {
node: AudioNode,
stream: Dom<MediaStream>,
}
impl MediaStreamAudioDestinationNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
context: &AudioContext,
options: &AudioNodeOptions,
can_gc: CanGc,
) -> Fallible<MediaStreamAudioDestinationNode> {
let media = ServoMedia::get();
let (socket, id) = media.create_stream_and_socket(MediaStreamType::Audio);
let stream = MediaStream::new_single(&context.global(), id, MediaStreamType::Audio, can_gc);
let node_options = options.unwrap_or(
2,
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers,
);
let node = AudioNode::new_inherited(
AudioNodeInit::MediaStreamDestinationNode(socket),
context.upcast(),
node_options,
1, // inputs
0, // outputs
)?;
Ok(MediaStreamAudioDestinationNode {
node,
stream: Dom::from_ref(&stream),
})
}
pub(crate) fn new(
window: &Window,
context: &AudioContext,
options: &AudioNodeOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioDestinationNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &AudioContext,
options: &AudioNodeOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioDestinationNode>> {
let node = MediaStreamAudioDestinationNode::new_inherited(context, options, can_gc)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl MediaStreamAudioDestinationNodeMethods<crate::DomTypeHolder>
for MediaStreamAudioDestinationNode
{
/// <https://webaudio.github.io/web-audio-api/#dom-mediastreamaudiodestinationnode-mediastreamaudiodestinationnode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &AudioContext,
options: &AudioNodeOptions,
) -> Fallible<DomRoot<MediaStreamAudioDestinationNode>> {
MediaStreamAudioDestinationNode::new_with_proto(window, proto, context, options, can_gc)
}
/// <https://webaudio.github.io/web-audio-api/#dom-mediastreamaudiodestinationnode-stream>
fn Stream(&self) -> DomRoot<MediaStream> {
DomRoot::from_ref(&self.stream)
}
}

View file

@ -0,0 +1,103 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::node::AudioNodeInit;
use servo_media::streams::MediaStreamType;
use crate::dom::audio::audiocontext::AudioContext;
use crate::dom::audio::audionode::AudioNode;
use crate::dom::bindings::codegen::Bindings::MediaStreamAudioSourceNodeBinding::{
MediaStreamAudioSourceNodeMethods, MediaStreamAudioSourceOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::mediastream::MediaStream;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct MediaStreamAudioSourceNode {
node: AudioNode,
stream: Dom<MediaStream>,
}
impl MediaStreamAudioSourceNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
context: &AudioContext,
stream: &MediaStream,
) -> Fallible<MediaStreamAudioSourceNode> {
let track = stream
.get_tracks()
.iter()
.find(|t| t.ty() == MediaStreamType::Audio)
.ok_or(Error::InvalidState)?
.id();
let node = AudioNode::new_inherited(
AudioNodeInit::MediaStreamSourceNode(track),
context.upcast(),
Default::default(),
0, // inputs
1, // outputs
)?;
Ok(MediaStreamAudioSourceNode {
node,
stream: Dom::from_ref(stream),
})
}
pub(crate) fn new(
window: &Window,
context: &AudioContext,
stream: &MediaStream,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioSourceNode>> {
Self::new_with_proto(window, None, context, stream, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &AudioContext,
stream: &MediaStream,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamAudioSourceNode>> {
let node = MediaStreamAudioSourceNode::new_inherited(context, stream)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl MediaStreamAudioSourceNodeMethods<crate::DomTypeHolder> for MediaStreamAudioSourceNode {
/// <https://webaudio.github.io/web-audio-api/#dom-mediastreamaudiosourcenode-mediastreamaudiosourcenode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &AudioContext,
options: &MediaStreamAudioSourceOptions,
) -> Fallible<DomRoot<MediaStreamAudioSourceNode>> {
MediaStreamAudioSourceNode::new_with_proto(
window,
proto,
context,
&options.mediaStream,
can_gc,
)
}
/// <https://webaudio.github.io/web-audio-api/#dom-MediaStreamAudioSourceNode-stream>
fn MediaStream(&self) -> DomRoot<MediaStream> {
DomRoot::from_ref(&self.stream)
}
}

View file

@ -0,0 +1,93 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::node::AudioNodeInit;
use crate::dom::audio::audiocontext::AudioContext;
use crate::dom::audio::audionode::AudioNode;
use crate::dom::bindings::codegen::Bindings::MediaStreamTrackAudioSourceNodeBinding::{
MediaStreamTrackAudioSourceNodeMethods, MediaStreamTrackAudioSourceOptions,
};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::mediastreamtrack::MediaStreamTrack;
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct MediaStreamTrackAudioSourceNode {
node: AudioNode,
track: Dom<MediaStreamTrack>,
}
impl MediaStreamTrackAudioSourceNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
context: &AudioContext,
track: &MediaStreamTrack,
) -> Fallible<MediaStreamTrackAudioSourceNode> {
let node = AudioNode::new_inherited(
AudioNodeInit::MediaStreamSourceNode(track.id()),
context.upcast(),
Default::default(),
0, // inputs
1, // outputs
)?;
Ok(MediaStreamTrackAudioSourceNode {
node,
track: Dom::from_ref(track),
})
}
pub(crate) fn new(
window: &Window,
context: &AudioContext,
track: &MediaStreamTrack,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamTrackAudioSourceNode>> {
Self::new_with_proto(window, None, context, track, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &AudioContext,
track: &MediaStreamTrack,
can_gc: CanGc,
) -> Fallible<DomRoot<MediaStreamTrackAudioSourceNode>> {
let node = MediaStreamTrackAudioSourceNode::new_inherited(context, track)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl MediaStreamTrackAudioSourceNodeMethods<crate::DomTypeHolder>
for MediaStreamTrackAudioSourceNode
{
/// <https://webaudio.github.io/web-audio-api/#dom-mediastreamtrackaudiosourcenode-mediastreamtrackaudiosourcenode>
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &AudioContext,
options: &MediaStreamTrackAudioSourceOptions,
) -> Fallible<DomRoot<MediaStreamTrackAudioSourceNode>> {
MediaStreamTrackAudioSourceNode::new_with_proto(
window,
proto,
context,
&options.mediaStreamTrack,
can_gc,
)
}
}

View file

@ -0,0 +1,31 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
pub(crate) mod analysernode;
pub(crate) mod audiobuffer;
pub(crate) mod audiobuffersourcenode;
pub(crate) mod audiocontext;
pub(crate) mod audiodestinationnode;
pub(crate) mod audiolistener;
pub(crate) mod audionode;
pub(crate) mod audioparam;
pub(crate) mod audioscheduledsourcenode;
pub(crate) mod audiotrack;
pub(crate) mod audiotracklist;
pub(crate) mod baseaudiocontext;
pub(crate) mod biquadfilternode;
pub(crate) mod channelmergernode;
pub(crate) mod channelsplitternode;
pub(crate) mod constantsourcenode;
pub(crate) mod gainnode;
pub(crate) mod iirfilternode;
pub(crate) mod mediaelementaudiosourcenode;
pub(crate) mod mediastreamaudiodestinationnode;
pub(crate) mod mediastreamaudiosourcenode;
pub(crate) mod mediastreamtrackaudiosourcenode;
pub(crate) mod offlineaudiocompletionevent;
pub(crate) mod offlineaudiocontext;
pub(crate) mod oscillatornode;
pub(crate) mod pannernode;
pub(crate) mod stereopannernode;

View file

@ -0,0 +1,106 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use js::rust::HandleObject;
use stylo_atoms::Atom;
use crate::dom::audio::audiobuffer::AudioBuffer;
use crate::dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use crate::dom::bindings::codegen::Bindings::OfflineAudioCompletionEventBinding::{
OfflineAudioCompletionEventInit, OfflineAudioCompletionEventMethods,
};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::event::{Event, EventBubbles, EventCancelable};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct OfflineAudioCompletionEvent {
event: Event,
rendered_buffer: Dom<AudioBuffer>,
}
impl OfflineAudioCompletionEvent {
pub(crate) fn new_inherited(rendered_buffer: &AudioBuffer) -> OfflineAudioCompletionEvent {
OfflineAudioCompletionEvent {
event: Event::new_inherited(),
rendered_buffer: Dom::from_ref(rendered_buffer),
}
}
pub(crate) fn new(
window: &Window,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
rendered_buffer: &AudioBuffer,
can_gc: CanGc,
) -> DomRoot<OfflineAudioCompletionEvent> {
Self::new_with_proto(
window,
None,
type_,
bubbles,
cancelable,
rendered_buffer,
can_gc,
)
}
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
rendered_buffer: &AudioBuffer,
can_gc: CanGc,
) -> DomRoot<OfflineAudioCompletionEvent> {
let event = Box::new(OfflineAudioCompletionEvent::new_inherited(rendered_buffer));
let ev = reflect_dom_object_with_proto(event, window, proto, can_gc);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bool::from(bubbles), bool::from(cancelable));
}
ev
}
}
impl OfflineAudioCompletionEventMethods<crate::DomTypeHolder> for OfflineAudioCompletionEvent {
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocompletionevent-offlineaudiocompletionevent
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
type_: DOMString,
init: &OfflineAudioCompletionEventInit,
) -> Fallible<DomRoot<OfflineAudioCompletionEvent>> {
let bubbles = EventBubbles::from(init.parent.bubbles);
let cancelable = EventCancelable::from(init.parent.cancelable);
Ok(OfflineAudioCompletionEvent::new_with_proto(
window,
proto,
Atom::from(type_),
bubbles,
cancelable,
&init.renderedBuffer,
can_gc,
))
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocompletionevent-renderedbuffer
fn RenderedBuffer(&self) -> DomRoot<AudioBuffer> {
DomRoot::from_ref(&*self.rendered_buffer)
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}

View file

@ -0,0 +1,232 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use std::rc::Rc;
use std::sync::{Arc, Mutex, mpsc};
use std::thread::Builder;
use base::id::PipelineId;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::context::OfflineAudioContextOptions as ServoMediaOfflineAudioContextOptions;
use crate::dom::audio::audiobuffer::{AudioBuffer, MAX_SAMPLE_RATE, MIN_SAMPLE_RATE};
use crate::dom::audio::audionode::MAX_CHANNEL_COUNT;
use crate::dom::audio::baseaudiocontext::{BaseAudioContext, BaseAudioContextOptions};
use crate::dom::audio::offlineaudiocompletionevent::OfflineAudioCompletionEvent;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContext_Binding::BaseAudioContextMethods;
use crate::dom::bindings::codegen::Bindings::OfflineAudioContextBinding::{
OfflineAudioContextMethods, OfflineAudioContextOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{DomGlobal, reflect_dom_object_with_proto};
use crate::dom::bindings::root::DomRoot;
use crate::dom::event::{Event, EventBubbles, EventCancelable};
use crate::dom::promise::Promise;
use crate::dom::window::Window;
use crate::realms::InRealm;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct OfflineAudioContext {
context: BaseAudioContext,
channel_count: u32,
length: u32,
rendering_started: Cell<bool>,
#[ignore_malloc_size_of = "promises are hard"]
pending_rendering_promise: DomRefCell<Option<Rc<Promise>>>,
}
impl OfflineAudioContext {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_inherited(
channel_count: u32,
length: u32,
sample_rate: f32,
pipeline_id: PipelineId,
) -> Fallible<OfflineAudioContext> {
let options = ServoMediaOfflineAudioContextOptions {
channels: channel_count as u8,
length: length as usize,
sample_rate,
};
let context = BaseAudioContext::new_inherited(
BaseAudioContextOptions::OfflineAudioContext(options),
pipeline_id,
)?;
Ok(OfflineAudioContext {
context,
channel_count,
length,
rendering_started: Cell::new(false),
pending_rendering_promise: Default::default(),
})
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new(
window: &Window,
proto: Option<HandleObject>,
channel_count: u32,
length: u32,
sample_rate: f32,
can_gc: CanGc,
) -> Fallible<DomRoot<OfflineAudioContext>> {
if channel_count > MAX_CHANNEL_COUNT ||
channel_count == 0 ||
length == 0 ||
!(MIN_SAMPLE_RATE..=MAX_SAMPLE_RATE).contains(&sample_rate)
{
return Err(Error::NotSupported);
}
let pipeline_id = window.pipeline_id();
let context =
OfflineAudioContext::new_inherited(channel_count, length, sample_rate, pipeline_id)?;
Ok(reflect_dom_object_with_proto(
Box::new(context),
window,
proto,
can_gc,
))
}
}
impl OfflineAudioContextMethods<crate::DomTypeHolder> for OfflineAudioContext {
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-offlineaudiocontext
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
options: &OfflineAudioContextOptions,
) -> Fallible<DomRoot<OfflineAudioContext>> {
OfflineAudioContext::new(
window,
proto,
options.numberOfChannels,
options.length,
*options.sampleRate,
can_gc,
)
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-offlineaudiocontext-numberofchannels-length-samplerate
fn Constructor_(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
number_of_channels: u32,
length: u32,
sample_rate: Finite<f32>,
) -> Fallible<DomRoot<OfflineAudioContext>> {
OfflineAudioContext::new(
window,
proto,
number_of_channels,
length,
*sample_rate,
can_gc,
)
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-oncomplete
event_handler!(complete, GetOncomplete, SetOncomplete);
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-length
fn Length(&self) -> u32 {
self.length
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-startrendering
fn StartRendering(&self, comp: InRealm, can_gc: CanGc) -> Rc<Promise> {
let promise = Promise::new_in_current_realm(comp, can_gc);
if self.rendering_started.get() {
promise.reject_error(Error::InvalidState, can_gc);
return promise;
}
self.rendering_started.set(true);
*self.pending_rendering_promise.borrow_mut() = Some(promise.clone());
let processed_audio = Arc::new(Mutex::new(Vec::new()));
let processed_audio_ = processed_audio.clone();
let (sender, receiver) = mpsc::channel();
let sender = Mutex::new(sender);
self.context
.audio_context_impl()
.lock()
.unwrap()
.set_eos_callback(Box::new(move |buffer| {
processed_audio_
.lock()
.unwrap()
.extend_from_slice((*buffer).as_ref());
let _ = sender.lock().unwrap().send(());
}));
let this = Trusted::new(self);
let task_source = self
.global()
.task_manager()
.dom_manipulation_task_source()
.to_sendable();
Builder::new()
.name("OfflineACResolver".to_owned())
.spawn(move || {
let _ = receiver.recv();
task_source.queue(task!(resolve: move || {
let this = this.root();
let processed_audio = processed_audio.lock().unwrap();
let mut processed_audio: Vec<_> = processed_audio
.chunks(this.length as usize)
.map(|channel| channel.to_vec())
.collect();
// it can end up being empty if the task failed
if processed_audio.len() != this.length as usize {
processed_audio.resize(this.length as usize, Vec::new())
}
let buffer = AudioBuffer::new(
this.global().as_window(),
this.channel_count,
this.length,
*this.context.SampleRate(),
Some(processed_audio.as_slice()),
CanGc::note());
(*this.pending_rendering_promise.borrow_mut())
.take()
.unwrap()
.resolve_native(&buffer, CanGc::note());
let global = &this.global();
let window = global.as_window();
let event = OfflineAudioCompletionEvent::new(window,
atom!("complete"),
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable,
&buffer, CanGc::note());
event.upcast::<Event>().fire(this.upcast(), CanGc::note());
}));
})
.unwrap();
if self
.context
.audio_context_impl()
.lock()
.unwrap()
.resume()
.is_err()
{
promise.reject_error(
Error::Type("Could not start offline rendering".to_owned()),
can_gc,
);
}
promise
}
}

View file

@ -0,0 +1,184 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use std::f32;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioNodeType};
use servo_media::audio::oscillator_node::{
OscillatorNodeMessage, OscillatorNodeOptions as ServoMediaOscillatorOptions,
OscillatorType as ServoMediaOscillatorType,
};
use servo_media::audio::param::ParamType;
use crate::conversions::Convert;
use crate::dom::audio::audionode::AudioNodeOptionsHelper;
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::audioscheduledsourcenode::AudioScheduledSourceNode;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use crate::dom::bindings::codegen::Bindings::OscillatorNodeBinding::{
OscillatorNodeMethods, OscillatorOptions, OscillatorType,
};
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct OscillatorNode {
source_node: AudioScheduledSourceNode,
detune: Dom<AudioParam>,
frequency: Dom<AudioParam>,
oscillator_type: Cell<OscillatorType>,
}
impl OscillatorNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &OscillatorOptions,
) -> Fallible<OscillatorNode> {
let node_options =
options
.parent
.unwrap_or(2, ChannelCountMode::Max, ChannelInterpretation::Speakers);
let source_node = AudioScheduledSourceNode::new_inherited(
AudioNodeInit::OscillatorNode(options.convert()),
context,
node_options,
0, /* inputs */
1, /* outputs */
)?;
let node_id = source_node.node().node_id();
let frequency = AudioParam::new(
window,
context,
node_id,
AudioNodeType::OscillatorNode,
ParamType::Frequency,
AutomationRate::A_rate,
440.,
f32::MIN,
f32::MAX,
CanGc::note(),
);
let detune = AudioParam::new(
window,
context,
node_id,
AudioNodeType::OscillatorNode,
ParamType::Detune,
AutomationRate::A_rate,
0.,
-440. / 2.,
440. / 2.,
CanGc::note(),
);
Ok(OscillatorNode {
source_node,
oscillator_type: Cell::new(options.type_),
frequency: Dom::from_ref(&frequency),
detune: Dom::from_ref(&detune),
})
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &OscillatorOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<OscillatorNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &OscillatorOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<OscillatorNode>> {
let node = OscillatorNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl OscillatorNodeMethods<crate::DomTypeHolder> for OscillatorNode {
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-oscillatornode
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &OscillatorOptions,
) -> Fallible<DomRoot<OscillatorNode>> {
OscillatorNode::new_with_proto(window, proto, context, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-frequency
fn Frequency(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.frequency)
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-detune
fn Detune(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.detune)
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-type
fn Type(&self) -> OscillatorType {
self.oscillator_type.get()
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-type
fn SetType(&self, type_: OscillatorType) -> ErrorResult {
if type_ == OscillatorType::Custom {
return Err(Error::InvalidState);
}
self.oscillator_type.set(type_);
self.source_node
.node()
.message(AudioNodeMessage::OscillatorNode(
OscillatorNodeMessage::SetOscillatorType(type_.convert()),
));
Ok(())
}
}
impl Convert<ServoMediaOscillatorOptions> for &OscillatorOptions {
fn convert(self) -> ServoMediaOscillatorOptions {
ServoMediaOscillatorOptions {
oscillator_type: self.type_.convert(),
freq: *self.frequency,
detune: *self.detune,
periodic_wave_options: None, // XXX
}
}
}
impl Convert<ServoMediaOscillatorType> for OscillatorType {
fn convert(self) -> ServoMediaOscillatorType {
match self {
OscillatorType::Sine => ServoMediaOscillatorType::Sine,
OscillatorType::Square => ServoMediaOscillatorType::Square,
OscillatorType::Sawtooth => ServoMediaOscillatorType::Sawtooth,
OscillatorType::Triangle => ServoMediaOscillatorType::Triangle,
OscillatorType::Custom => ServoMediaOscillatorType::Custom,
}
}
}

View file

@ -0,0 +1,421 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use std::cell::Cell;
use std::f32;
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage, AudioNodeType};
use servo_media::audio::panner_node::{
DistanceModel, PannerNodeMessage, PannerNodeOptions, PanningModel,
};
use servo_media::audio::param::{ParamDir, ParamType};
use crate::conversions::Convert;
use crate::dom::audio::audionode::{AudioNode, AudioNodeOptionsHelper};
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::{
AudioParamMethods, AutomationRate,
};
use crate::dom::bindings::codegen::Bindings::PannerNodeBinding::{
DistanceModelType, PannerNodeMethods, PannerOptions, PanningModelType,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct PannerNode {
node: AudioNode,
position_x: Dom<AudioParam>,
position_y: Dom<AudioParam>,
position_z: Dom<AudioParam>,
orientation_x: Dom<AudioParam>,
orientation_y: Dom<AudioParam>,
orientation_z: Dom<AudioParam>,
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
panning_model: Cell<PanningModel>,
#[ignore_malloc_size_of = "servo_media"]
#[no_trace]
distance_model: Cell<DistanceModel>,
ref_distance: Cell<f64>,
max_distance: Cell<f64>,
rolloff_factor: Cell<f64>,
cone_inner_angle: Cell<f64>,
cone_outer_angle: Cell<f64>,
cone_outer_gain: Cell<f64>,
}
impl PannerNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &PannerOptions,
can_gc: CanGc,
) -> Fallible<PannerNode> {
let node_options = options.parent.unwrap_or(
2,
ChannelCountMode::Clamped_max,
ChannelInterpretation::Speakers,
);
if node_options.mode == ChannelCountMode::Max {
return Err(Error::NotSupported);
}
if node_options.count > 2 || node_options.count == 0 {
return Err(Error::NotSupported);
}
if *options.maxDistance <= 0. {
return Err(Error::Range("maxDistance should be positive".into()));
}
if *options.refDistance < 0. {
return Err(Error::Range("refDistance should be non-negative".into()));
}
if *options.rolloffFactor < 0. {
return Err(Error::Range("rolloffFactor should be non-negative".into()));
}
if *options.coneOuterGain < 0. || *options.coneOuterGain > 1. {
return Err(Error::InvalidState);
}
let options = options.convert();
let node = AudioNode::new_inherited(
AudioNodeInit::PannerNode(options),
context,
node_options,
1, // inputs
1, // outputs
)?;
let id = node.node_id();
let position_x = AudioParam::new(
window,
context,
id,
AudioNodeType::PannerNode,
ParamType::Position(ParamDir::X),
AutomationRate::A_rate,
options.position_x, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let position_y = AudioParam::new(
window,
context,
id,
AudioNodeType::PannerNode,
ParamType::Position(ParamDir::Y),
AutomationRate::A_rate,
options.position_y, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let position_z = AudioParam::new(
window,
context,
id,
AudioNodeType::PannerNode,
ParamType::Position(ParamDir::Z),
AutomationRate::A_rate,
options.position_z, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let orientation_x = AudioParam::new(
window,
context,
id,
AudioNodeType::PannerNode,
ParamType::Orientation(ParamDir::X),
AutomationRate::A_rate,
options.orientation_x, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let orientation_y = AudioParam::new(
window,
context,
id,
AudioNodeType::PannerNode,
ParamType::Orientation(ParamDir::Y),
AutomationRate::A_rate,
options.orientation_y, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
let orientation_z = AudioParam::new(
window,
context,
id,
AudioNodeType::PannerNode,
ParamType::Orientation(ParamDir::Z),
AutomationRate::A_rate,
options.orientation_z, // default value
f32::MIN, // min value
f32::MAX, // max value
can_gc,
);
Ok(PannerNode {
node,
position_x: Dom::from_ref(&position_x),
position_y: Dom::from_ref(&position_y),
position_z: Dom::from_ref(&position_z),
orientation_x: Dom::from_ref(&orientation_x),
orientation_y: Dom::from_ref(&orientation_y),
orientation_z: Dom::from_ref(&orientation_z),
panning_model: Cell::new(options.panning_model),
distance_model: Cell::new(options.distance_model),
ref_distance: Cell::new(options.ref_distance),
max_distance: Cell::new(options.max_distance),
rolloff_factor: Cell::new(options.rolloff_factor),
cone_inner_angle: Cell::new(options.cone_inner_angle),
cone_outer_angle: Cell::new(options.cone_outer_angle),
cone_outer_gain: Cell::new(options.cone_outer_gain),
})
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &PannerOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<PannerNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &PannerOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<PannerNode>> {
let node = PannerNode::new_inherited(window, context, options, can_gc)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl PannerNodeMethods<crate::DomTypeHolder> for PannerNode {
// https://webaudio.github.io/web-audio-api/#dom-pannernode-pannernode
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &PannerOptions,
) -> Fallible<DomRoot<PannerNode>> {
PannerNode::new_with_proto(window, proto, context, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-positionx
fn PositionX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_x)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-positiony
fn PositionY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_y)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-positionz
fn PositionZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_z)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-orientationx
fn OrientationX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.orientation_x)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-orientationy
fn OrientationY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.orientation_y)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-orientationz
fn OrientationZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.orientation_z)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-distancemodel
fn DistanceModel(&self) -> DistanceModelType {
match self.distance_model.get() {
DistanceModel::Linear => DistanceModelType::Linear,
DistanceModel::Inverse => DistanceModelType::Inverse,
DistanceModel::Exponential => DistanceModelType::Exponential,
}
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-distancemodel
fn SetDistanceModel(&self, model: DistanceModelType) {
self.distance_model.set(model.convert());
let msg = PannerNodeMessage::SetDistanceModel(self.distance_model.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-panningmodel
fn PanningModel(&self) -> PanningModelType {
match self.panning_model.get() {
PanningModel::EqualPower => PanningModelType::Equalpower,
PanningModel::HRTF => PanningModelType::HRTF,
}
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-panningmodel
fn SetPanningModel(&self, model: PanningModelType) {
self.panning_model.set(model.convert());
let msg = PannerNodeMessage::SetPanningModel(self.panning_model.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-refdistance
fn RefDistance(&self) -> Finite<f64> {
Finite::wrap(self.ref_distance.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-refdistance
fn SetRefDistance(&self, val: Finite<f64>) -> Fallible<()> {
if *val < 0. {
return Err(Error::Range("value should be non-negative".into()));
}
self.ref_distance.set(*val);
let msg = PannerNodeMessage::SetRefDistance(self.ref_distance.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-maxdistance
fn MaxDistance(&self) -> Finite<f64> {
Finite::wrap(self.max_distance.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-maxdistance
fn SetMaxDistance(&self, val: Finite<f64>) -> Fallible<()> {
if *val <= 0. {
return Err(Error::Range("value should be positive".into()));
}
self.max_distance.set(*val);
let msg = PannerNodeMessage::SetMaxDistance(self.max_distance.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-rollofffactor
fn RolloffFactor(&self) -> Finite<f64> {
Finite::wrap(self.rolloff_factor.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-rollofffactor
fn SetRolloffFactor(&self, val: Finite<f64>) -> Fallible<()> {
if *val < 0. {
return Err(Error::Range("value should be non-negative".into()));
}
self.rolloff_factor.set(*val);
let msg = PannerNodeMessage::SetRolloff(self.rolloff_factor.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneinnerangle
fn ConeInnerAngle(&self) -> Finite<f64> {
Finite::wrap(self.cone_inner_angle.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneinnerangle
fn SetConeInnerAngle(&self, val: Finite<f64>) {
self.cone_inner_angle.set(*val);
let msg = PannerNodeMessage::SetConeInner(self.cone_inner_angle.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneouterangle
fn ConeOuterAngle(&self) -> Finite<f64> {
Finite::wrap(self.cone_outer_angle.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneouterangle
fn SetConeOuterAngle(&self, val: Finite<f64>) {
self.cone_outer_angle.set(*val);
let msg = PannerNodeMessage::SetConeOuter(self.cone_outer_angle.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneoutergain
fn ConeOuterGain(&self) -> Finite<f64> {
Finite::wrap(self.cone_outer_gain.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneoutergain
fn SetConeOuterGain(&self, val: Finite<f64>) -> Fallible<()> {
if *val < 0. || *val > 1. {
return Err(Error::InvalidState);
}
self.cone_outer_gain.set(*val);
let msg = PannerNodeMessage::SetConeGain(self.cone_outer_gain.get());
self.upcast::<AudioNode>()
.message(AudioNodeMessage::PannerNode(msg));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-setposition
fn SetPosition(&self, x: Finite<f32>, y: Finite<f32>, z: Finite<f32>) {
self.position_x.SetValue(x);
self.position_y.SetValue(y);
self.position_z.SetValue(z);
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-setorientation
fn SetOrientation(&self, x: Finite<f32>, y: Finite<f32>, z: Finite<f32>) {
self.orientation_x.SetValue(x);
self.orientation_y.SetValue(y);
self.orientation_z.SetValue(z);
}
}
impl Convert<PannerNodeOptions> for &PannerOptions {
fn convert(self) -> PannerNodeOptions {
PannerNodeOptions {
panning_model: self.panningModel.convert(),
distance_model: self.distanceModel.convert(),
position_x: *self.positionX,
position_y: *self.positionY,
position_z: *self.positionZ,
orientation_x: *self.orientationX,
orientation_y: *self.orientationY,
orientation_z: *self.orientationZ,
ref_distance: *self.refDistance,
max_distance: *self.maxDistance,
rolloff_factor: *self.rolloffFactor,
cone_inner_angle: *self.coneInnerAngle,
cone_outer_angle: *self.coneOuterAngle,
cone_outer_gain: *self.coneOuterGain,
}
}
}
impl Convert<DistanceModel> for DistanceModelType {
fn convert(self) -> DistanceModel {
match self {
DistanceModelType::Linear => DistanceModel::Linear,
DistanceModelType::Inverse => DistanceModel::Inverse,
DistanceModelType::Exponential => DistanceModel::Exponential,
}
}
}
impl Convert<PanningModel> for PanningModelType {
fn convert(self) -> PanningModel {
match self {
PanningModelType::Equalpower => PanningModel::EqualPower,
PanningModelType::HRTF => PanningModel::HRTF,
}
}
}

View file

@ -0,0 +1,130 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use dom_struct::dom_struct;
use js::rust::HandleObject;
use servo_media::audio::node::{AudioNodeInit, AudioNodeType};
use servo_media::audio::param::ParamType;
use servo_media::audio::stereo_panner::StereoPannerOptions as ServoMediaStereoPannerOptions;
use crate::conversions::Convert;
use crate::dom::audio::audionode::AudioNodeOptionsHelper;
use crate::dom::audio::audioparam::AudioParam;
use crate::dom::audio::audioscheduledsourcenode::AudioScheduledSourceNode;
use crate::dom::audio::baseaudiocontext::BaseAudioContext;
use crate::dom::bindings::codegen::Bindings::AudioNodeBinding::{
ChannelCountMode, ChannelInterpretation,
};
use crate::dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use crate::dom::bindings::codegen::Bindings::StereoPannerNodeBinding::{
StereoPannerNodeMethods, StereoPannerOptions,
};
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::reflector::reflect_dom_object_with_proto;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::window::Window;
use crate::script_runtime::CanGc;
#[dom_struct]
pub(crate) struct StereoPannerNode {
source_node: AudioScheduledSourceNode,
pan: Dom<AudioParam>,
}
impl StereoPannerNode {
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
pub(crate) fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &StereoPannerOptions,
) -> Fallible<StereoPannerNode> {
let node_options = options.parent.unwrap_or(
2,
ChannelCountMode::Clamped_max,
ChannelInterpretation::Speakers,
);
if node_options.mode == ChannelCountMode::Max {
return Err(Error::NotSupported);
}
if node_options.count > 2 || node_options.count == 0 {
return Err(Error::NotSupported);
}
let pan = *options.pan;
let source_node = AudioScheduledSourceNode::new_inherited(
AudioNodeInit::StereoPannerNode(options.convert()),
context,
node_options,
1, /* inputs */
1, /* outputs */
)?;
let node_id = source_node.node().node_id();
let pan = AudioParam::new(
window,
context,
node_id,
AudioNodeType::StereoPannerNode,
ParamType::Pan,
AutomationRate::A_rate,
pan,
-1.,
1.,
CanGc::note(),
);
Ok(StereoPannerNode {
source_node,
pan: Dom::from_ref(&pan),
})
}
pub(crate) fn new(
window: &Window,
context: &BaseAudioContext,
options: &StereoPannerOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<StereoPannerNode>> {
Self::new_with_proto(window, None, context, options, can_gc)
}
#[cfg_attr(crown, allow(crown::unrooted_must_root))]
fn new_with_proto(
window: &Window,
proto: Option<HandleObject>,
context: &BaseAudioContext,
options: &StereoPannerOptions,
can_gc: CanGc,
) -> Fallible<DomRoot<StereoPannerNode>> {
let node = StereoPannerNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object_with_proto(
Box::new(node),
window,
proto,
can_gc,
))
}
}
impl StereoPannerNodeMethods<crate::DomTypeHolder> for StereoPannerNode {
// https://webaudio.github.io/web-audio-api/#dom-stereopannernode-stereopannernode
fn Constructor(
window: &Window,
proto: Option<HandleObject>,
can_gc: CanGc,
context: &BaseAudioContext,
options: &StereoPannerOptions,
) -> Fallible<DomRoot<StereoPannerNode>> {
StereoPannerNode::new_with_proto(window, proto, context, options, can_gc)
}
// https://webaudio.github.io/web-audio-api/#dom-stereopannernode-pan
fn Pan(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.pan)
}
}
impl Convert<ServoMediaStereoPannerOptions> for StereoPannerOptions {
fn convert(self) -> ServoMediaStereoPannerOptions {
ServoMediaStereoPannerOptions { pan: *self.pan }
}
}