Auto merge of #21502 - Manishearth:listener, r=ferjm

Add AudioListener/AudioPanner DOM interfaces

Seems to work.

I'll need some changes to the servo-media side to support the panner
node getters as well as the older `setPosition()`/etc APIs. I'll get to
those later.

r? @ferjm

<!-- Reviewable:start -->
---
This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/21502)
<!-- Reviewable:end -->
This commit is contained in:
bors-servo 2018-08-24 19:13:05 -04:00 committed by GitHub
commit d827370804
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
28 changed files with 822 additions and 340 deletions

View file

@ -10,7 +10,6 @@ use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
use servo_media::audio::node::AudioNodeInit;
#[dom_struct]
pub struct AudioDestinationNode {
@ -23,9 +22,8 @@ impl AudioDestinationNode {
options: &AudioNodeOptions,
) -> AudioDestinationNode {
AudioDestinationNode {
node: AudioNode::new_inherited(
AudioNodeInit::DestinationNode,
Some(context.destination_node()),
node: AudioNode::new_inherited_for_id(
context.destination_node(),
context,
options,
1,

View file

@ -0,0 +1,190 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::audioparam::AudioParam;
use dom::baseaudiocontext::BaseAudioContext;
use dom::bindings::codegen::Bindings::AudioListenerBinding::{self, AudioListenerMethods};
use dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::bindings::root::{Dom, DomRoot};
use dom::window::Window;
use dom_struct::dom_struct;
use servo_media::audio::param::{ParamType, ParamDir};
use std::f32;
#[dom_struct]
pub struct AudioListener {
reflector_: Reflector,
position_x: Dom<AudioParam>,
position_y: Dom<AudioParam>,
position_z: Dom<AudioParam>,
forward_x: Dom<AudioParam>,
forward_y: Dom<AudioParam>,
forward_z: Dom<AudioParam>,
up_x: Dom<AudioParam>,
up_y: Dom<AudioParam>,
up_z: Dom<AudioParam>,
}
impl AudioListener {
fn new_inherited(
window: &Window,
context: &BaseAudioContext,
) -> AudioListener {
let node = context.listener();
let position_x = AudioParam::new(
window,
context,
node,
ParamType::Position(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let position_y = AudioParam::new(
window,
context,
node,
ParamType::Position(ParamDir::Y),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let position_z = AudioParam::new(
window,
context,
node,
ParamType::Position(ParamDir::Z),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let forward_x = AudioParam::new(
window,
context,
node,
ParamType::Forward(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let forward_y = AudioParam::new(
window,
context,
node,
ParamType::Forward(ParamDir::Y),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let forward_z = AudioParam::new(
window,
context,
node,
ParamType::Forward(ParamDir::Z),
AutomationRate::A_rate,
-1., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let up_x = AudioParam::new(
window,
context,
node,
ParamType::Up(ParamDir::X),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let up_y = AudioParam::new(
window,
context,
node,
ParamType::Up(ParamDir::Y),
AutomationRate::A_rate,
1., // default value
f32::MIN, // min value
f32::MAX, // max value
);
let up_z = AudioParam::new(
window,
context,
node,
ParamType::Up(ParamDir::Z),
AutomationRate::A_rate,
0., // default value
f32::MIN, // min value
f32::MAX, // max value
);
AudioListener {
reflector_: Reflector::new(),
position_x: Dom::from_ref(&position_x),
position_y: Dom::from_ref(&position_y),
position_z: Dom::from_ref(&position_z),
forward_x: Dom::from_ref(&forward_x),
forward_y: Dom::from_ref(&forward_y),
forward_z: Dom::from_ref(&forward_z),
up_x: Dom::from_ref(&up_x),
up_y: Dom::from_ref(&up_y),
up_z: Dom::from_ref(&up_z),
}
}
#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
context: &BaseAudioContext,
) -> DomRoot<AudioListener> {
let node = AudioListener::new_inherited(window, context);
reflect_dom_object(Box::new(node), window, AudioListenerBinding::Wrap)
}
}
impl AudioListenerMethods for AudioListener {
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positionx
fn PositionX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positiony
fn PositionY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-positionz
fn PositionZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_z)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardx
fn ForwardX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardy
fn ForwardY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-forwardz
fn ForwardZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.forward_z)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upx
fn UpX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_x)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upy
fn UpY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_y)
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-upz
fn UpZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.up_z)
}
}

View file

@ -36,17 +36,26 @@ pub struct AudioNode {
channel_interpretation: Cell<ChannelInterpretation>,
}
impl AudioNode {
pub fn new_inherited(
node_type: AudioNodeInit,
node_id: Option<NodeId>,
context: &BaseAudioContext,
options: &AudioNodeOptions,
number_of_inputs: u32,
number_of_outputs: u32,
) -> AudioNode {
let node_id =
node_id.unwrap_or_else(|| context.audio_context_impl().create_node(node_type));
let node_id = context.audio_context_impl().create_node(node_type);
AudioNode::new_inherited_for_id(node_id, context, options, number_of_inputs, number_of_outputs)
}
pub fn new_inherited_for_id(
node_id: NodeId,
context: &BaseAudioContext,
options: &AudioNodeOptions,
number_of_inputs: u32,
number_of_outputs: u32,
) -> AudioNode {
AudioNode {
eventtarget: EventTarget::new_inherited(),
node_id,
@ -204,6 +213,11 @@ impl AudioNodeMethods for AudioNode {
return Err(Error::IndexSize);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::PannerNode) => {
if value > 2 {
return Err(Error::NotSupported)
}
}
// XXX We do not support any of the other AudioNodes with
// constraints yet. Add more cases here as we add support
// for new AudioNodes.
@ -237,6 +251,11 @@ impl AudioNodeMethods for AudioNode {
return Err(Error::InvalidState);
}
},
EventTargetTypeId::AudioNode(AudioNodeTypeId::PannerNode) => {
if value == ChannelCountMode::Max {
return Err(Error::NotSupported)
}
}
// XXX We do not support any of the other AudioNodes with
// constraints yet. Add more cases here as we add support
// for new AudioNodes.

View file

@ -34,7 +34,6 @@ impl AudioScheduledSourceNode {
AudioScheduledSourceNode {
node: AudioNode::new_inherited(
node_type,
None, /* node_id */
context,
options,
number_of_inputs,

View file

@ -5,6 +5,7 @@
use dom::audiobuffer::AudioBuffer;
use dom::audiobuffersourcenode::AudioBufferSourceNode;
use dom::audiodestinationnode::AudioDestinationNode;
use dom::audiolistener::AudioListener;
use dom::audionode::MAX_CHANNEL_COUNT;
use dom::bindings::callback::ExceptionHandling;
use dom::bindings::cell::DomRefCell;
@ -17,6 +18,7 @@ use dom::bindings::codegen::Bindings::BaseAudioContextBinding::DecodeErrorCallba
use dom::bindings::codegen::Bindings::BaseAudioContextBinding::DecodeSuccessCallback;
use dom::bindings::codegen::Bindings::GainNodeBinding::GainOptions;
use dom::bindings::codegen::Bindings::OscillatorNodeBinding::OscillatorOptions;
use dom::bindings::codegen::Bindings::PannerNodeBinding::PannerOptions;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite;
@ -27,6 +29,7 @@ use dom::domexception::{DOMErrorName, DOMException};
use dom::eventtarget::EventTarget;
use dom::gainnode::GainNode;
use dom::oscillatornode::OscillatorNode;
use dom::pannernode::PannerNode;
use dom::promise::Promise;
use dom::window::Window;
use dom_struct::dom_struct;
@ -63,9 +66,10 @@ struct DecodeResolver {
pub struct BaseAudioContext {
eventtarget: EventTarget,
#[ignore_malloc_size_of = "servo_media"]
audio_context_impl: Rc<AudioContext<Backend>>,
audio_context_impl: AudioContext<Backend>,
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination
destination: MutNullableDom<AudioDestinationNode>,
listener: MutNullableDom<AudioListener>,
/// Resume promises which are soon to be fulfilled by a queued task.
#[ignore_malloc_size_of = "promises are hard"]
in_flight_resume_promises_queue: DomRefCell<VecDeque<(Box<[Rc<Promise>]>, ErrorResult)>>,
@ -95,12 +99,11 @@ impl BaseAudioContext {
let context = BaseAudioContext {
eventtarget: EventTarget::new_inherited(),
audio_context_impl: Rc::new(
ServoMedia::get()
.unwrap()
.create_audio_context(options.into()),
),
audio_context_impl: ServoMedia::get()
.unwrap()
.create_audio_context(options.into()),
destination: Default::default(),
listener: Default::default(),
in_flight_resume_promises_queue: Default::default(),
pending_resume_promises: Default::default(),
decode_resolvers: Default::default(),
@ -117,14 +120,18 @@ impl BaseAudioContext {
false
}
pub fn audio_context_impl(&self) -> Rc<AudioContext<Backend>> {
self.audio_context_impl.clone()
pub fn audio_context_impl(&self) -> &AudioContext<Backend> {
&self.audio_context_impl
}
pub fn destination_node(&self) -> NodeId {
self.audio_context_impl.dest_node()
}
pub fn listener(&self) -> NodeId {
self.audio_context_impl.listener()
}
// https://webaudio.github.io/web-audio-api/#allowed-to-start
pub fn is_allowed_to_start(&self) -> bool {
self.state.get() == AudioContextState::Suspended
@ -297,6 +304,15 @@ impl BaseAudioContextMethods for BaseAudioContext {
})
}
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-listener
fn Listener(&self) -> DomRoot<AudioListener> {
let global = self.global();
let window = global.as_window();
self.listener.or_init(|| {
AudioListener::new(&window, self)
})
}
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-onstatechange
event_handler!(statechange, GetOnstatechange, SetOnstatechange);
@ -314,6 +330,12 @@ impl BaseAudioContextMethods for BaseAudioContext {
GainNode::new(&self.global().as_window(), &self, &GainOptions::empty())
}
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createpanner
fn CreatePanner(&self) -> Fallible<DomRoot<PannerNode>> {
PannerNode::new(&self.global().as_window(), &self, &PannerOptions::empty())
}
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer
fn CreateBuffer(
&self,

View file

@ -92,6 +92,7 @@ use servo_media::Backend;
use servo_media::audio::buffer_source_node::AudioBuffer;
use servo_media::audio::context::AudioContext;
use servo_media::audio::graph::NodeId;
use servo_media::audio::panner_node::{DistanceModel, PanningModel};
use servo_media::audio::param::ParamType;
use servo_url::{ImmutableOrigin, MutableOrigin, ServoUrl};
use smallvec::SmallVec;
@ -434,7 +435,7 @@ unsafe_no_jsmanaged_fields!(SourceSet);
unsafe_no_jsmanaged_fields!(AudioBuffer);
unsafe_no_jsmanaged_fields!(AudioContext<Backend>);
unsafe_no_jsmanaged_fields!(NodeId);
unsafe_no_jsmanaged_fields!(ParamType);
unsafe_no_jsmanaged_fields!(DistanceModel, PanningModel, ParamType);
unsafe impl<'a> JSTraceable for &'a str {
#[inline]

View file

@ -38,7 +38,6 @@ impl GainNode {
node_options.channelInterpretation = Some(ChannelInterpretation::Speakers);
let node = AudioNode::new_inherited(
AudioNodeInit::GainNode(gain_options.into()),
None,
context,
&node_options,
1, // inputs

View file

@ -220,6 +220,7 @@ pub mod audiobuffer;
pub mod audiobuffersourcenode;
pub mod audiocontext;
pub mod audiodestinationnode;
pub mod audiolistener;
pub mod audionode;
pub mod audioparam;
pub mod audioscheduledsourcenode;
@ -408,6 +409,7 @@ pub mod pagetransitionevent;
pub mod paintrenderingcontext2d;
pub mod paintsize;
pub mod paintworkletglobalscope;
pub mod pannernode;
pub mod performance;
pub mod performanceentry;
pub mod performancemark;

View file

@ -0,0 +1,354 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::audionode::AudioNode;
use dom::audioparam::AudioParam;
use dom::baseaudiocontext::BaseAudioContext;
use dom::bindings::codegen::Bindings::AudioNodeBinding::{ChannelCountMode, ChannelInterpretation};
use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
use dom::bindings::codegen::Bindings::AudioParamBinding::{AudioParamMethods, AutomationRate};
use dom::bindings::codegen::Bindings::PannerNodeBinding::{self, PannerNodeMethods, PannerOptions};
use dom::bindings::codegen::Bindings::PannerNodeBinding::{DistanceModelType, PanningModelType};
use dom::bindings::error::{Error, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::{Dom, DomRoot};
use dom::window::Window;
use dom_struct::dom_struct;
use servo_media::audio::node::{AudioNodeInit, AudioNodeMessage};
use servo_media::audio::panner_node::{DistanceModel, PannerNodeOptions, PanningModel};
use servo_media::audio::panner_node::PannerNodeMessage;
use servo_media::audio::param::{ParamDir, ParamType};
use std::cell::Cell;
use std::f32;
#[dom_struct]
pub struct PannerNode {
node: AudioNode,
position_x: Dom<AudioParam>,
position_y: Dom<AudioParam>,
position_z: Dom<AudioParam>,
orientation_x: Dom<AudioParam>,
orientation_y: Dom<AudioParam>,
orientation_z: Dom<AudioParam>,
#[ignore_malloc_size_of = "servo_media"]
panning_model: Cell<PanningModel>,
#[ignore_malloc_size_of = "servo_media"]
distance_model: Cell<DistanceModel>,
ref_distance: Cell<f64>,
max_distance: Cell<f64>,
rolloff_factor: Cell<f64>,
cone_inner_angle: Cell<f64>,
cone_outer_angle: Cell<f64>,
cone_outer_gain: Cell<f64>,
}
impl PannerNode {
#[allow(unrooted_must_root)]
pub fn new_inherited(
window: &Window,
context: &BaseAudioContext,
options: &PannerOptions,
) -> Fallible<PannerNode> {
let count = options.parent.channelCount.unwrap_or(2);
let mode = options.parent.channelCountMode.unwrap_or(ChannelCountMode::Clamped_max);
if mode == ChannelCountMode::Max {
return Err(Error::NotSupported)
}
if count > 2 {
return Err(Error::NotSupported)
}
let mut node_options = AudioNodeOptions::empty();
node_options.channelCount = Some(count);
node_options.channelCountMode = Some(mode);
node_options.channelInterpretation = Some(ChannelInterpretation::Speakers);
let options = options.into();
let node = AudioNode::new_inherited(
AudioNodeInit::PannerNode(options),
context,
&node_options,
1, // inputs
1, // outputs
);
let id = node.node_id();
let position_x = AudioParam::new(
window,
context,
id,
ParamType::Position(ParamDir::X),
AutomationRate::A_rate,
options.position_x, // default value
f32::MIN, // min value
f32::MAX, // max value
);
let position_y = AudioParam::new(
window,
context,
id,
ParamType::Position(ParamDir::Y),
AutomationRate::A_rate,
options.position_y, // default value
f32::MIN, // min value
f32::MAX, // max value
);
let position_z = AudioParam::new(
window,
context,
id,
ParamType::Position(ParamDir::Z),
AutomationRate::A_rate,
options.position_z, // default value
f32::MIN, // min value
f32::MAX, // max value
);
let orientation_x = AudioParam::new(
window,
context,
id,
ParamType::Orientation(ParamDir::X),
AutomationRate::A_rate,
options.orientation_x, // default value
f32::MIN, // min value
f32::MAX, // max value
);
let orientation_y = AudioParam::new(
window,
context,
id,
ParamType::Orientation(ParamDir::Y),
AutomationRate::A_rate,
options.orientation_y, // default value
f32::MIN, // min value
f32::MAX, // max value
);
let orientation_z = AudioParam::new(
window,
context,
id,
ParamType::Orientation(ParamDir::Z),
AutomationRate::A_rate,
options.orientation_z, // default value
f32::MIN, // min value
f32::MAX, // max value
);
Ok(PannerNode {
node,
position_x: Dom::from_ref(&position_x),
position_y: Dom::from_ref(&position_y),
position_z: Dom::from_ref(&position_z),
orientation_x: Dom::from_ref(&orientation_x),
orientation_y: Dom::from_ref(&orientation_y),
orientation_z: Dom::from_ref(&orientation_z),
panning_model: Cell::new(options.panning_model),
distance_model: Cell::new(options.distance_model),
ref_distance: Cell::new(options.ref_distance),
max_distance: Cell::new(options.max_distance),
rolloff_factor: Cell::new(options.rolloff_factor),
cone_inner_angle: Cell::new(options.cone_inner_angle),
cone_outer_angle: Cell::new(options.cone_outer_angle),
cone_outer_gain: Cell::new(options.cone_outer_gain),
})
}
#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
context: &BaseAudioContext,
options: &PannerOptions,
) -> Fallible<DomRoot<PannerNode>> {
let node = PannerNode::new_inherited(window, context, options)?;
Ok(reflect_dom_object(Box::new(node), window, PannerNodeBinding::Wrap))
}
pub fn Constructor(
window: &Window,
context: &BaseAudioContext,
options: &PannerOptions,
) -> Fallible<DomRoot<PannerNode>> {
PannerNode::new(window, context, options)
}
}
impl PannerNodeMethods for PannerNode {
// https://webaudio.github.io/web-audio-api/#dom-pannernode-positionx
fn PositionX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_x)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-positiony
fn PositionY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_y)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-positionz
fn PositionZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.position_z)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-orientationx
fn OrientationX(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.orientation_x)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-orientationy
fn OrientationY(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.orientation_y)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-orientationz
fn OrientationZ(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.orientation_z)
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-distancemodel
fn DistanceModel(&self) -> DistanceModelType {
match self.distance_model.get() {
DistanceModel::Linear => DistanceModelType::Linear,
DistanceModel::Inverse => DistanceModelType::Inverse,
DistanceModel::Exponential => DistanceModelType::Exponential,
}
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-distancemodel
fn SetDistanceModel(&self, model: DistanceModelType) {
self.distance_model.set(model.into());
let msg = PannerNodeMessage::SetDistanceModel(self.distance_model.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-panningmodel
fn PanningModel(&self) -> PanningModelType {
match self.panning_model.get() {
PanningModel::EqualPower => PanningModelType::Equalpower,
PanningModel::HRTF => PanningModelType::HRTF,
}
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-panningmodel
fn SetPanningModel(&self, model: PanningModelType) {
self.panning_model.set(model.into());
let msg = PannerNodeMessage::SetPanningModel(self.panning_model.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-refdistance
fn RefDistance(&self) -> Finite<f64> {
Finite::wrap(self.ref_distance.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-refdistance
fn SetRefDistance(&self, val: Finite<f64>) {
self.ref_distance.set(*val);
let msg = PannerNodeMessage::SetRefDistance(self.ref_distance.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-maxdistance
fn MaxDistance(&self) -> Finite<f64> {
Finite::wrap(self.max_distance.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-maxdistance
fn SetMaxDistance(&self, val: Finite<f64>) -> Fallible<()> {
if *val < 0. {
return Err(Error::NotSupported)
}
self.max_distance.set(*val);
let msg = PannerNodeMessage::SetMaxDistance(self.max_distance.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-rollofffactor
fn RolloffFactor(&self) -> Finite<f64> {
Finite::wrap(self.rolloff_factor.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-rollofffactor
fn SetRolloffFactor(&self, val: Finite<f64>) -> Fallible<()> {
if *val < 0. {
return Err(Error::Range("value should be positive".into()))
}
self.rolloff_factor.set(*val);
let msg = PannerNodeMessage::SetRolloff(self.rolloff_factor.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneinnerangle
fn ConeInnerAngle(&self) -> Finite<f64> {
Finite::wrap(self.cone_inner_angle.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneinnerangle
fn SetConeInnerAngle(&self, val: Finite<f64>) {
self.cone_inner_angle.set(*val);
let msg = PannerNodeMessage::SetConeInner(self.cone_inner_angle.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneouterangle
fn ConeOuterAngle(&self) -> Finite<f64> {
Finite::wrap(self.cone_outer_angle.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneouterangle
fn SetConeOuterAngle(&self, val: Finite<f64>) {
self.cone_outer_angle.set(*val);
let msg = PannerNodeMessage::SetConeOuter(self.cone_outer_angle.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneoutergain
fn ConeOuterGain(&self) -> Finite<f64> {
Finite::wrap(self.cone_outer_gain.get())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-coneoutergain
fn SetConeOuterGain(&self, val: Finite<f64>) -> Fallible<()> {
if *val < 0. || *val > 360. {
return Err(Error::InvalidState)
}
self.cone_outer_gain.set(*val);
let msg = PannerNodeMessage::SetConeGain(self.cone_outer_gain.get());
self.upcast::<AudioNode>().message(AudioNodeMessage::PannerNode(msg));
Ok(())
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-setposition
fn SetPosition(&self, x: Finite<f32>, y: Finite<f32>, z: Finite<f32>) {
self.position_x.SetValue(x);
self.position_y.SetValue(y);
self.position_z.SetValue(z);
}
// https://webaudio.github.io/web-audio-api/#dom-pannernode-setorientation
fn SetOrientation(&self, x: Finite<f32>, y: Finite<f32>, z: Finite<f32>) {
self.orientation_x.SetValue(x);
self.orientation_y.SetValue(y);
self.orientation_z.SetValue(z);
}
}
impl<'a> From<&'a PannerOptions> for PannerNodeOptions {
fn from(options: &'a PannerOptions) -> Self {
Self {
panning_model: options.panningModel.into(),
distance_model: options.distanceModel.into(),
position_x: *options.positionX,
position_y: *options.positionY,
position_z: *options.positionZ,
orientation_x: *options.orientationX,
orientation_y: *options.orientationY,
orientation_z: *options.orientationZ,
ref_distance: *options.refDistance,
max_distance: *options.maxDistance,
rolloff_factor: *options.rolloffFactor,
cone_inner_angle: *options.coneInnerAngle,
cone_outer_angle: *options.coneOuterAngle,
cone_outer_gain: *options.coneOuterGain,
}
}
}
impl From<DistanceModelType> for DistanceModel {
fn from(model: DistanceModelType) -> Self {
match model {
DistanceModelType::Linear => DistanceModel::Linear,
DistanceModelType::Inverse => DistanceModel::Inverse,
DistanceModelType::Exponential => DistanceModel::Exponential,
}
}
}
impl From<PanningModelType> for PanningModel {
fn from(model: PanningModelType) -> Self {
match model {
PanningModelType::Equalpower => PanningModel::EqualPower,
PanningModelType::HRTF => PanningModel::HRTF,
}
}
}

View file

@ -0,0 +1,22 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* The origin of this IDL file is
* https://webaudio.github.io/web-audio-api/#audiolistener
*/
[Exposed=Window]
interface AudioListener {
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam forwardX;
readonly attribute AudioParam forwardY;
readonly attribute AudioParam forwardZ;
readonly attribute AudioParam upX;
readonly attribute AudioParam upY;
readonly attribute AudioParam upZ;
// void setPosition (float x, float y, float z);
// void setOrientation (float x, float y, float z, float xUp, float yUp, float zUp);
};

View file

@ -20,7 +20,7 @@ interface BaseAudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
// readonly attribute AudioListener listener;
readonly attribute AudioListener listener;
readonly attribute AudioContextState state;
Promise<void> resume();
attribute EventHandler onstatechange;
@ -42,7 +42,7 @@ interface BaseAudioContext : EventTarget {
// IIRFilterNode createIIRFilter(sequence<double> feedforward,
// sequence<double> feedback);
// WaveShaperNode createWaveShaper();
// PannerNode createPanner();
[Throws] PannerNode createPanner();
// StereoPannerNode createStereoPanner();
// ConvolverNode createConvolver();
// ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs = 6);

View file

@ -0,0 +1,56 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* The origin of this IDL file is
* https://webaudio.github.io/web-audio-api/#pannernode
*/
dictionary PannerOptions : AudioNodeOptions {
PanningModelType panningModel = "equalpower";
DistanceModelType distanceModel = "inverse";
float positionX = 0;
float positionY = 0;
float positionZ = 0;
float orientationX = 1;
float orientationY = 0;
float orientationZ = 0;
double refDistance = 1;
double maxDistance = 10000;
double rolloffFactor = 1;
double coneInnerAngle = 360;
double coneOuterAngle = 360;
double coneOuterGain = 0;
};
enum DistanceModelType {
"linear",
"inverse",
"exponential"
};
enum PanningModelType {
"equalpower",
"HRTF"
};
[Exposed=Window,
Constructor (BaseAudioContext context, optional PannerOptions options)]
interface PannerNode : AudioNode {
attribute PanningModelType panningModel;
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam orientationX;
readonly attribute AudioParam orientationY;
readonly attribute AudioParam orientationZ;
attribute DistanceModelType distanceModel;
attribute double refDistance;
[SetterThrows] attribute double maxDistance;
[SetterThrows] attribute double rolloffFactor;
attribute double coneInnerAngle;
attribute double coneOuterAngle;
[SetterThrows] attribute double coneOuterGain;
void setPosition (float x, float y, float z);
void setOrientation (float x, float y, float z);
};