createBufferSource and buffer setter on buffer source node

This commit is contained in:
Fernando Jiménez Moreno 2018-07-05 11:43:31 +02:00
parent 25a74a75ea
commit 356d7fd7a6
9 changed files with 152 additions and 62 deletions

View file

@ -14,8 +14,8 @@ use dom_struct::dom_struct;
use js::jsapi::{Heap, JSContext, JSObject, JS_StealArrayBufferContents}; use js::jsapi::{Heap, JSContext, JSObject, JS_StealArrayBufferContents};
use js::rust::CustomAutoRooterGuard; use js::rust::CustomAutoRooterGuard;
use js::typedarray::{CreateWith, Float32Array}; use js::typedarray::{CreateWith, Float32Array};
use servo_media::audio::buffer_source_node::AudioBuffer as ServoMediaAudioBuffer;
use std::ptr::{self, NonNull}; use std::ptr::{self, NonNull};
use std::slice;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
type JSAudioChannel = Heap<*mut JSObject>; type JSAudioChannel = Heap<*mut JSObject>;
@ -25,7 +25,7 @@ pub struct AudioBuffer {
reflector_: Reflector, reflector_: Reflector,
js_channels: DomRefCell<Vec<JSAudioChannel>>, js_channels: DomRefCell<Vec<JSAudioChannel>>,
#[ignore_malloc_size_of = "Arc"] #[ignore_malloc_size_of = "Arc"]
shared_channels: Arc<Mutex<Vec<Vec<f32>>>>, shared_channels: Arc<Mutex<ServoMediaAudioBuffer>>,
sample_rate: f32, sample_rate: f32,
length: u32, length: u32,
duration: f64, duration: f64,
@ -53,11 +53,12 @@ impl AudioBuffer {
AudioBuffer { AudioBuffer {
reflector_: Reflector::new(), reflector_: Reflector::new(),
js_channels: DomRefCell::new(js_channels), js_channels: DomRefCell::new(js_channels),
shared_channels: Arc::new(Mutex::new(vec![vec![0.; length as usize]; number_of_channels as usize])), shared_channels: Arc::new(Mutex::new(
sample_rate: sample_rate, ServoMediaAudioBuffer::new(number_of_channels as u8, length as usize))),
length: length, sample_rate: sample_rate,
duration: length as f64 / sample_rate as f64, length: length,
number_of_channels: number_of_channels, duration: length as f64 / sample_rate as f64,
number_of_channels: number_of_channels,
} }
} }
@ -88,7 +89,7 @@ impl AudioBuffer {
// Move the channel data from shared_channels to js_channels. // Move the channel data from shared_channels to js_channels.
rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>()); rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());
let shared_channel = (*self.shared_channels.lock().unwrap()).remove(i); let shared_channel = (*self.shared_channels.lock().unwrap()).buffers.remove(i);
if unsafe { if unsafe {
Float32Array::create(cx, CreateWith::Slice(&shared_channel), array.handle_mut()) Float32Array::create(cx, CreateWith::Slice(&shared_channel), array.handle_mut())
}.is_err() { }.is_err() {
@ -102,7 +103,7 @@ impl AudioBuffer {
/// https://webaudio.github.io/web-audio-api/#acquire-the-content /// https://webaudio.github.io/web-audio-api/#acquire-the-content
#[allow(unsafe_code)] #[allow(unsafe_code)]
pub fn acquire_contents(&self) -> Option<Arc<Mutex<Vec<Vec<f32>>>>> { pub fn acquire_contents(&self) -> Option<Arc<Mutex<ServoMediaAudioBuffer>>> {
let cx = self.global().get_cx(); let cx = self.global().get_cx();
for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() { for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() {
// Step 1. // Step 1.
@ -112,17 +113,25 @@ impl AudioBuffer {
// Step 2. // Step 2.
let channel_data = unsafe { let channel_data = unsafe {
slice::from_raw_parts( typedarray!(in(cx) let array: Float32Array = channel.get());
JS_StealArrayBufferContents(cx, channel.handle()) as *mut f32, if let Ok(array) = array {
self.length as usize // XXX TypedArrays API does not expose a way to steal the buffer's
).to_vec() // content.
let data = array.to_vec();
let _ = JS_StealArrayBufferContents(cx, channel.handle());
data
} else {
return None;
}
}; };
channel.set(ptr::null_mut()); channel.set(ptr::null_mut());
// Step 3 and part of 4 (which will complete turning shared_channels // Step 3.
// data into js_channels ArrayBuffers in restore_js_channel_data). (*self.shared_channels.lock().unwrap()).buffers[i] = channel_data;
(*self.shared_channels.lock().unwrap())[i] = channel_data;
// Step 4 will complete turning shared_channels
// data into js_channels ArrayBuffers in restore_js_channel_data.
} }
self.js_channels.borrow_mut().clear(); self.js_channels.borrow_mut().clear();
@ -167,16 +176,18 @@ impl AudioBufferMethods for AudioBuffer {
} }
fn CopyFromChannel(&self, fn CopyFromChannel(&self,
destination: CustomAutoRooterGuard<Float32Array>, _destination: CustomAutoRooterGuard<Float32Array>,
channel_number: u32, _channel_number: u32,
start_in_channel: u32) -> Fallible<()> { _start_in_channel: u32) -> Fallible<()> {
// XXX
Ok(()) Ok(())
} }
fn CopyToChannel(&self, fn CopyToChannel(&self,
source: CustomAutoRooterGuard<Float32Array>, _source: CustomAutoRooterGuard<Float32Array>,
channel_number: u32, _channel_number: u32,
start_in_channel: u32) -> Fallible<()> { _start_in_channel: u32) -> Fallible<()> {
// XXX
Ok(()) Ok(())
} }
} }

View file

@ -12,10 +12,12 @@ use dom::bindings::codegen::Bindings::AudioBufferSourceNodeBinding::AudioBufferS
use dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate; use dom::bindings::codegen::Bindings::AudioParamBinding::AutomationRate;
use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions; use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
use dom::bindings::codegen::Bindings::AudioNodeBinding::{ChannelCountMode, ChannelInterpretation}; use dom::bindings::codegen::Bindings::AudioNodeBinding::{ChannelCountMode, ChannelInterpretation};
use dom::bindings::error::Fallible; use dom::bindings::codegen::Bindings::AudioScheduledSourceNodeBinding::AudioScheduledSourceNodeBinding::AudioScheduledSourceNodeMethods;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite; use dom::bindings::num::Finite;
use dom::bindings::reflector::reflect_dom_object; use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot; use dom::bindings::root::{DomRoot, MutNullableDom};
use dom::window::Window; use dom::window::Window;
use dom_struct::dom_struct; use dom_struct::dom_struct;
use servo_media::audio::buffer_source_node::AudioBufferSourceNodeMessage; use servo_media::audio::buffer_source_node::AudioBufferSourceNodeMessage;
@ -33,8 +35,8 @@ audio_param_impl!(Detune, AudioBufferSourceNode, AudioBufferSourceNodeMessage, S
#[dom_struct] #[dom_struct]
pub struct AudioBufferSourceNode { pub struct AudioBufferSourceNode {
node: AudioScheduledSourceNode, source_node: AudioScheduledSourceNode,
// buffer: Option<DomRoot<AudioBuffer>>, buffer: MutNullableDom<AudioBuffer>,
playback_rate: DomRoot<AudioParam>, playback_rate: DomRoot<AudioParam>,
detune: DomRoot<AudioParam>, detune: DomRoot<AudioParam>,
loop_enabled: Cell<bool>, loop_enabled: Cell<bool>,
@ -54,28 +56,29 @@ impl AudioBufferSourceNode {
node_options.channelCount = Some(2); node_options.channelCount = Some(2);
node_options.channelCountMode = Some(ChannelCountMode::Max); node_options.channelCountMode = Some(ChannelCountMode::Max);
node_options.channelInterpretation = Some(ChannelInterpretation::Speakers); node_options.channelInterpretation = Some(ChannelInterpretation::Speakers);
let node = AudioScheduledSourceNode::new_inherited( let source_node = AudioScheduledSourceNode::new_inherited(
AudioNodeType::AudioBufferSourceNode(options.into()), AudioNodeType::AudioBufferSourceNode(options.into()),
context, context,
&node_options, &node_options,
0 /* inputs */, 0 /* inputs */,
1 /* outputs */, 1 /* outputs */,
); );
let playback_rate = PlaybackRate::new(context.audio_context_impl(), node.node_id()); let node_id = source_node.node().node_id();
let playback_rate = PlaybackRate::new(context.audio_context_impl(), node_id);
let playback_rate = AudioParam::new(&window, let playback_rate = AudioParam::new(&window,
Box::new(playback_rate), Box::new(playback_rate),
AutomationRate::K_rate, AutomationRate::K_rate,
*options.playbackRate, *options.playbackRate,
f32::MIN, f32::MAX); f32::MIN, f32::MAX);
let detune = Detune::new(context.audio_context_impl(), node.node_id()); let detune = Detune::new(context.audio_context_impl(), node_id);
let detune = AudioParam::new(&window, let detune = AudioParam::new(&window,
Box::new(detune), Box::new(detune),
AutomationRate::K_rate, AutomationRate::K_rate,
*options.detune, *options.detune,
f32::MIN, f32::MAX); f32::MIN, f32::MAX);
AudioBufferSourceNode { AudioBufferSourceNode {
node, source_node,
// buffer: options.buffer, buffer: Default::default(),
playback_rate, playback_rate,
detune, detune,
loop_enabled: Cell::new(options.loop_), loop_enabled: Cell::new(options.loop_),
@ -104,6 +107,31 @@ impl AudioBufferSourceNode {
} }
impl AudioBufferSourceNodeMethods for AudioBufferSourceNode { impl AudioBufferSourceNodeMethods for AudioBufferSourceNode {
/// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
fn GetBuffer(&self) -> Fallible<Option<DomRoot<AudioBuffer>>> {
Ok(self.buffer.get())
}
/// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
fn SetBuffer(&self, new_buffer: Option<&AudioBuffer>) -> Fallible<()> {
if new_buffer.is_some() && self.buffer.get().is_some() {
return Err(Error::InvalidState);
}
self.buffer.set(new_buffer);
if self.source_node.started() {
if let Some(buffer) = self.buffer.get() {
let buffer = buffer.acquire_contents();
self.source_node.node().message(
AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetBuffer(buffer)));
}
}
Ok(())
}
fn PlaybackRate(&self) -> DomRoot<AudioParam> { fn PlaybackRate(&self) -> DomRoot<AudioParam> {
DomRoot::from_ref(&self.playback_rate) DomRoot::from_ref(&self.playback_rate)
} }
@ -136,8 +164,17 @@ impl AudioBufferSourceNodeMethods for AudioBufferSourceNode {
self.loop_end.set(*loop_end) self.loop_end.set(*loop_end)
} }
fn Start(&self, when: Finite<f64>, offset: Option<Finite<f64>>, duration: Option<Finite<f64>>) { fn Start(&self,
// XXX when: Finite<f64>,
_offset: Option<Finite<f64>>,
_duration: Option<Finite<f64>>) -> Fallible<()> {
if let Some(buffer) = self.buffer.get() {
let buffer = buffer.acquire_contents();
self.source_node.node().message(
AudioNodeMessage::AudioBufferSourceNode(
AudioBufferSourceNodeMessage::SetBuffer(buffer)));
}
self.source_node.upcast::<AudioScheduledSourceNode>().Start(when)
} }
} }

View file

@ -5,14 +5,17 @@ use dom::audionode::AudioNode;
use dom::baseaudiocontext::BaseAudioContext; use dom::baseaudiocontext::BaseAudioContext;
use dom::bindings::codegen::Bindings::AudioScheduledSourceNodeBinding::AudioScheduledSourceNodeMethods; use dom::bindings::codegen::Bindings::AudioScheduledSourceNodeBinding::AudioScheduledSourceNodeMethods;
use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions; use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::num::Finite; use dom::bindings::num::Finite;
use dom_struct::dom_struct; use dom_struct::dom_struct;
use servo_media::audio::graph::NodeId;
use servo_media::audio::node::{AudioNodeMessage, AudioNodeType, AudioScheduledSourceNodeMessage}; use servo_media::audio::node::{AudioNodeMessage, AudioNodeType, AudioScheduledSourceNodeMessage};
use std::cell::Cell;
#[dom_struct] #[dom_struct]
pub struct AudioScheduledSourceNode { pub struct AudioScheduledSourceNode {
node: AudioNode, node: AudioNode,
started: Cell<bool>,
stopped: Cell<bool>,
} }
impl AudioScheduledSourceNode { impl AudioScheduledSourceNode {
@ -24,11 +27,17 @@ impl AudioScheduledSourceNode {
AudioScheduledSourceNode { AudioScheduledSourceNode {
node: AudioNode::new_inherited(node_type, None /* node_id */, node: AudioNode::new_inherited(node_type, None /* node_id */,
context, options, number_of_inputs, number_of_outputs), context, options, number_of_inputs, number_of_outputs),
started: Cell::new(false),
stopped: Cell::new(false),
} }
} }
pub fn node_id(&self) -> NodeId { pub fn node(&self) -> &AudioNode {
self.node.node_id() &self.node
}
pub fn started(&self) -> bool {
self.started.get()
} }
} }
@ -37,16 +46,26 @@ impl AudioScheduledSourceNodeMethods for AudioScheduledSourceNode {
event_handler!(ended, GetOnended, SetOnended); event_handler!(ended, GetOnended, SetOnended);
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-start // https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-start
fn Start(&self, when: Finite<f64>) { fn Start(&self, when: Finite<f64>) -> Fallible<()> {
if self.started.get() || self.stopped.get() {
return Err(Error::InvalidState);
}
self.started.set(true);
self.node.message( self.node.message(
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(*when)) AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Start(*when))
); );
Ok(())
} }
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-stop // https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-stop
fn Stop(&self, when: Finite<f64>) { fn Stop(&self, when: Finite<f64>) -> Fallible<()> {
if !self.started.get() {
return Err(Error::InvalidState);
}
self.stopped.set(true);
self.node.message( self.node.message(
AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Stop(*when)) AudioNodeMessage::AudioScheduledSourceNode(AudioScheduledSourceNodeMessage::Stop(*when))
); );
Ok(())
} }
} }

View file

@ -3,15 +3,18 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::audiobuffer::AudioBuffer; use dom::audiobuffer::AudioBuffer;
use dom::audiobuffersourcenode::AudioBufferSourceNode;
use dom::audiodestinationnode::AudioDestinationNode; use dom::audiodestinationnode::AudioDestinationNode;
use dom::audionode::MAX_CHANNEL_COUNT;
use dom::bindings::cell::DomRefCell; use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::AudioBufferSourceNodeBinding::AudioBufferSourceOptions;
use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions; use dom::bindings::codegen::Bindings::AudioNodeBinding::AudioNodeOptions;
use dom::bindings::codegen::Bindings::AudioNodeBinding::{ChannelCountMode, ChannelInterpretation}; use dom::bindings::codegen::Bindings::AudioNodeBinding::{ChannelCountMode, ChannelInterpretation};
use dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContextMethods; use dom::bindings::codegen::Bindings::BaseAudioContextBinding::BaseAudioContextMethods;
use dom::bindings::codegen::Bindings::BaseAudioContextBinding::AudioContextState; use dom::bindings::codegen::Bindings::BaseAudioContextBinding::AudioContextState;
use dom::bindings::codegen::Bindings::GainNodeBinding::GainOptions; use dom::bindings::codegen::Bindings::GainNodeBinding::GainOptions;
use dom::bindings::codegen::Bindings::OscillatorNodeBinding::OscillatorOptions; use dom::bindings::codegen::Bindings::OscillatorNodeBinding::OscillatorOptions;
use dom::bindings::error::{Error, ErrorResult}; use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable; use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite; use dom::bindings::num::Finite;
use dom::bindings::refcounted::Trusted; use dom::bindings::refcounted::Trusted;
@ -207,23 +210,23 @@ impl BaseAudioContext {
} }
impl BaseAudioContextMethods for BaseAudioContext { impl BaseAudioContextMethods for BaseAudioContext {
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-samplerate /// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-samplerate
fn SampleRate(&self) -> Finite<f32> { fn SampleRate(&self) -> Finite<f32> {
Finite::wrap(self.sample_rate) Finite::wrap(self.sample_rate)
} }
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-currenttime /// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-currenttime
fn CurrentTime(&self) -> Finite<f64> { fn CurrentTime(&self) -> Finite<f64> {
let current_time = self.audio_context_impl.current_time(); let current_time = self.audio_context_impl.current_time();
Finite::wrap(current_time) Finite::wrap(current_time)
} }
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state /// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-state
fn State(&self) -> AudioContextState { fn State(&self) -> AudioContextState {
self.state.get() self.state.get()
} }
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-resume /// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-resume
#[allow(unrooted_must_root)] #[allow(unrooted_must_root)]
fn Resume(&self) -> Rc<Promise> { fn Resume(&self) -> Rc<Promise> {
// Step 1. // Step 1.
@ -255,14 +258,15 @@ impl BaseAudioContextMethods for BaseAudioContext {
promise promise
} }
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination /// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-destination
fn Destination(&self) -> DomRoot<AudioDestinationNode> { fn Destination(&self) -> DomRoot<AudioDestinationNode> {
DomRoot::from_ref(self.destination.as_ref().unwrap()) DomRoot::from_ref(self.destination.as_ref().unwrap())
} }
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-onstatechange /// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-onstatechange
event_handler!(statechange, GetOnstatechange, SetOnstatechange); event_handler!(statechange, GetOnstatechange, SetOnstatechange);
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createoscillator
#[allow(unsafe_code)] #[allow(unsafe_code)]
fn CreateOscillator(&self) -> DomRoot<OscillatorNode> { fn CreateOscillator(&self) -> DomRoot<OscillatorNode> {
let global = self.global(); let global = self.global();
@ -271,6 +275,7 @@ impl BaseAudioContextMethods for BaseAudioContext {
OscillatorNode::new(&window, &self, &options) OscillatorNode::new(&window, &self, &options)
} }
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-creategain
#[allow(unsafe_code)] #[allow(unsafe_code)]
fn CreateGain(&self) -> DomRoot<GainNode> { fn CreateGain(&self) -> DomRoot<GainNode> {
let global = self.global(); let global = self.global();
@ -279,12 +284,27 @@ impl BaseAudioContextMethods for BaseAudioContext {
GainNode::new(&window, &self, &options) GainNode::new(&window, &self, &options)
} }
/// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer
fn CreateBuffer(&self, fn CreateBuffer(&self,
number_of_channels: u32, number_of_channels: u32,
length: u32, length: u32,
sample_rate: Finite<f32>) -> DomRoot<AudioBuffer> { sample_rate: Finite<f32>) -> Fallible<DomRoot<AudioBuffer>> {
if number_of_channels <= 0 ||
number_of_channels > MAX_CHANNEL_COUNT ||
length <= 0 ||
*sample_rate <= 0. {
return Err(Error::NotSupported);
}
let global = self.global(); let global = self.global();
AudioBuffer::new(&global.as_window(), number_of_channels, length, *sample_rate) Ok(AudioBuffer::new(&global.as_window(), number_of_channels, length, *sample_rate))
}
#[allow(unsafe_code)]
fn CreateBufferSource(&self) -> DomRoot<AudioBufferSourceNode> {
let global = self.global();
// XXX Can we do this implementing Default?
let options = unsafe { AudioBufferSourceOptions::empty(global.get_cx()) };
AudioBufferSourceNode::new(&global.as_window(), &self, &options)
} }
} }

View file

@ -80,6 +80,7 @@ use offscreen_gl_context::GLLimits;
use parking_lot::RwLock; use parking_lot::RwLock;
use profile_traits::mem::ProfilerChan as MemProfilerChan; use profile_traits::mem::ProfilerChan as MemProfilerChan;
use profile_traits::time::ProfilerChan as TimeProfilerChan; use profile_traits::time::ProfilerChan as TimeProfilerChan;
use servo_media::audio::buffer_source_node::AudioBuffer;
use servo_media::audio::context::AudioContext; use servo_media::audio::context::AudioContext;
use servo_media::audio::graph::NodeId; use servo_media::audio::graph::NodeId;
use script_layout_interface::OpaqueStyleAndLayoutData; use script_layout_interface::OpaqueStyleAndLayoutData;
@ -432,6 +433,7 @@ unsafe_no_jsmanaged_fields!(InteractiveWindow);
unsafe_no_jsmanaged_fields!(CanvasId); unsafe_no_jsmanaged_fields!(CanvasId);
unsafe_no_jsmanaged_fields!(SourceSet); unsafe_no_jsmanaged_fields!(SourceSet);
unsafe_no_jsmanaged_fields!(AudioGraph); unsafe_no_jsmanaged_fields!(AudioGraph);
unsafe_no_jsmanaged_fields!(AudioBuffer);
unsafe_no_jsmanaged_fields!(AudioContext); unsafe_no_jsmanaged_fields!(AudioContext);
unsafe_no_jsmanaged_fields!(NodeId); unsafe_no_jsmanaged_fields!(NodeId);

View file

@ -30,7 +30,7 @@ audio_param_impl!(Detune, OscillatorNode, OscillatorNodeMessage, SetDetune);
#[dom_struct] #[dom_struct]
pub struct OscillatorNode { pub struct OscillatorNode {
node: AudioScheduledSourceNode, source_node: AudioScheduledSourceNode,
oscillator_type: OscillatorType, oscillator_type: OscillatorType,
frequency: DomRoot<AudioParam>, frequency: DomRoot<AudioParam>,
detune: DomRoot<AudioParam>, detune: DomRoot<AudioParam>,
@ -48,26 +48,27 @@ impl OscillatorNode {
node_options.channelCount = Some(2); node_options.channelCount = Some(2);
node_options.channelCountMode = Some(ChannelCountMode::Max); node_options.channelCountMode = Some(ChannelCountMode::Max);
node_options.channelInterpretation = Some(ChannelInterpretation::Speakers); node_options.channelInterpretation = Some(ChannelInterpretation::Speakers);
let node = AudioScheduledSourceNode::new_inherited( let source_node = AudioScheduledSourceNode::new_inherited(
AudioNodeType::OscillatorNode(oscillator_options.into()), AudioNodeType::OscillatorNode(oscillator_options.into()),
context, context,
&node_options, &node_options,
0, /* inputs */ 0, /* inputs */
1, /* outputs */ 1, /* outputs */
); );
let frequency = Frequency::new(context.audio_context_impl(), node.node_id()); let node_id = source_node.node().node_id();
let frequency = Frequency::new(context.audio_context_impl(), node_id);
let frequency = AudioParam::new(window, let frequency = AudioParam::new(window,
Box::new(frequency), Box::new(frequency),
AutomationRate::A_rate, AutomationRate::A_rate,
440., f32::MIN, f32::MAX); 440., f32::MIN, f32::MAX);
let detune = Detune::new(context.audio_context_impl(), node.node_id()); let detune = Detune::new(context.audio_context_impl(), node_id);
let detune = AudioParam::new(window, let detune = AudioParam::new(window,
Box::new(detune), Box::new(detune),
AutomationRate::A_rate, AutomationRate::A_rate,
0., -440. / 2., 440. / 2.); 0., -440. / 2., 440. / 2.);
OscillatorNode { OscillatorNode {
node, source_node,
oscillator_type: oscillator_options.type_, oscillator_type: oscillator_options.type_,
frequency, frequency,
detune, detune,

View file

@ -18,13 +18,13 @@ dictionary AudioBufferSourceOptions {
[Exposed=Window, [Exposed=Window,
Constructor (BaseAudioContext context, optional AudioBufferSourceOptions options)] Constructor (BaseAudioContext context, optional AudioBufferSourceOptions options)]
interface AudioBufferSourceNode : AudioScheduledSourceNode { interface AudioBufferSourceNode : AudioScheduledSourceNode {
// attribute AudioBuffer? buffer; [Throws] attribute AudioBuffer? buffer;
readonly attribute AudioParam playbackRate; readonly attribute AudioParam playbackRate;
readonly attribute AudioParam detune; readonly attribute AudioParam detune;
attribute boolean loop; attribute boolean loop;
attribute double loopStart; attribute double loopStart;
attribute double loopEnd; attribute double loopEnd;
void start(optional double when = 0, [Throws] void start(optional double when = 0,
optional double offset, optional double offset,
optional double duration); optional double duration);
}; };

View file

@ -9,6 +9,6 @@
[Exposed=Window] [Exposed=Window]
interface AudioScheduledSourceNode : AudioNode { interface AudioScheduledSourceNode : AudioNode {
attribute EventHandler onended; attribute EventHandler onended;
void start(optional double when = 0); [Throws] void start(optional double when = 0);
void stop(optional double when = 0); [Throws] void stop(optional double when = 0);
}; };

View file

@ -24,13 +24,13 @@ interface BaseAudioContext : EventTarget {
readonly attribute AudioContextState state; readonly attribute AudioContextState state;
Promise<void> resume(); Promise<void> resume();
attribute EventHandler onstatechange; attribute EventHandler onstatechange;
AudioBuffer createBuffer(unsigned long numberOfChannels, [Throws] AudioBuffer createBuffer(unsigned long numberOfChannels,
unsigned long length, unsigned long length,
float sampleRate); float sampleRate);
// Promise<AudioBuffer> decodeAudioData(ArrayBuffer audioData, // Promise<AudioBuffer> decodeAudioData(ArrayBuffer audioData,
// optional DecodeSuccessCallback successCallback, // optional DecodeSuccessCallback successCallback,
// optional DecodeErrorCallback errorCallback); // optional DecodeErrorCallback errorCallback);
// AudioBufferSourceNode createBufferSource(); AudioBufferSourceNode createBufferSource();
// ConstantSourceNode createConstantSource(); // ConstantSourceNode createConstantSource();
// ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0, // ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
// optional unsigned long numberOfInputChannels = 2, // optional unsigned long numberOfInputChannels = 2,