Added AudioTrack, AudioTrackList, VideoTrack, VideoTrackList, and TrackEvent interfaces

This commit is contained in:
sreeise 2019-01-04 07:55:38 -05:00
parent 4d8d54fc00
commit cac4aa56f7
19 changed files with 974 additions and 138 deletions

View file

@ -4,6 +4,8 @@
use crate::document_loader::{LoadBlocker, LoadType};
use crate::dom::attr::Attr;
use crate::dom::audiotrack::AudioTrack;
use crate::dom::audiotracklist::AudioTrackList;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use crate::dom::bindings::codegen::Bindings::HTMLMediaElementBinding::CanPlayTypeResult;
@ -15,6 +17,7 @@ use crate::dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethod
use crate::dom::bindings::codegen::Bindings::TextTrackBinding::{TextTrackKind, TextTrackMode};
use crate::dom::bindings::codegen::InheritTypes::{ElementTypeId, HTMLElementTypeId};
use crate::dom::bindings::codegen::InheritTypes::{HTMLMediaElementTypeId, NodeTypeId};
use crate::dom::bindings::codegen::UnionTypes::VideoTrackOrAudioTrackOrTextTrack;
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::num::Finite;
@ -25,6 +28,7 @@ use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::blob::Blob;
use crate::dom::document::Document;
use crate::dom::element::{AttributeMutation, Element};
use crate::dom::event::Event;
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlelement::HTMLElement;
@ -37,6 +41,9 @@ use crate::dom::promise::Promise;
use crate::dom::texttrack::TextTrack;
use crate::dom::texttracklist::TextTrackList;
use crate::dom::timeranges::{TimeRanges, TimeRangesContainer};
use crate::dom::trackevent::TrackEvent;
use crate::dom::videotrack::VideoTrack;
use crate::dom::videotracklist::VideoTrackList;
use crate::dom::virtualmethods::VirtualMethods;
use crate::fetch::FetchCanceller;
use crate::microtask::{Microtask, MicrotaskRunnable};
@ -206,6 +213,10 @@ pub struct HTMLMediaElement {
/// https://html.spec.whatwg.org/multipage/#dom-media-played
#[ignore_malloc_size_of = "Rc"]
played: DomRefCell<TimeRangesContainer>,
// https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
audio_tracks_list: MutNullableDom<AudioTrackList>,
// https://html.spec.whatwg.org/multipage/#dom-media-videotracks
video_tracks_list: MutNullableDom<VideoTrackList>,
/// https://html.spec.whatwg.org/multipage/#dom-media-texttracks
text_tracks_list: MutNullableDom<TextTrackList>,
/// Time of last timeupdate notification.
@ -268,6 +279,8 @@ impl HTMLMediaElement {
seeking: Cell::new(false),
resource_url: DomRefCell::new(None),
played: DomRefCell::new(TimeRangesContainer::new()),
audio_tracks_list: Default::default(),
video_tracks_list: Default::default(),
text_tracks_list: Default::default(),
next_timeupdate_event: Cell::new(time::get_time() + Duration::milliseconds(250)),
current_fetch_context: DomRefCell::new(None),
@ -803,7 +816,8 @@ impl HTMLMediaElement {
)));
// Step 2.
// FIXME(nox): Forget the media-resource-specific tracks.
this.AudioTracks().clear();
this.VideoTracks().clear();
// Step 3.
this.network_state.set(NetworkState::NoSource);
@ -904,7 +918,8 @@ impl HTMLMediaElement {
// FIXME(nox): Detach MediaSource media provider object.
// Step 6.4.
// FIXME(nox): Forget the media-resource-specific tracks.
self.AudioTracks().clear();
self.VideoTracks().clear();
// Step 6.5.
if self.ready_state.get() != ReadyState::HaveNothing {
@ -1227,6 +1242,92 @@ impl HTMLMediaElement {
},
PlayerEvent::MetadataUpdated(ref metadata) => {
// https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
// => If the media resource is found to have an audio track
if !metadata.audio_tracks.is_empty() {
for (i, _track) in metadata.audio_tracks.iter().enumerate() {
// Step 1.
let kind = match i {
0 => DOMString::from("main"),
_ => DOMString::new(),
};
let window = window_from_node(self);
let audio_track = AudioTrack::new(
&window,
DOMString::new(),
kind,
DOMString::new(),
DOMString::new(),
);
// Steps 2. & 3.
self.AudioTracks().add(&audio_track);
// Step 4
// https://www.w3.org/TR/media-frags/#media-fragment-syntax
// https://github.com/servo/servo/issues/22366
// Step 5. & 6,
if self.AudioTracks().enabled_index().is_none() {
self.AudioTracks()
.set_enabled(self.AudioTracks().len() - 1, true);
}
// Steps 7.
let event = TrackEvent::new(
&self.global(),
atom!("addtrack"),
false,
false,
&Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(audio_track)),
);
event.upcast::<Event>().fire(self.upcast::<EventTarget>());
}
}
// => If the media resource is found to have a video track
if !metadata.video_tracks.is_empty() {
for (i, _track) in metadata.video_tracks.iter().enumerate() {
// Step 1.
let kind = match i {
0 => DOMString::from("main"),
_ => DOMString::new(),
};
let window = window_from_node(self);
let video_track = VideoTrack::new(
&window,
DOMString::new(),
kind,
DOMString::new(),
DOMString::new(),
);
// Steps 2. & 3.
self.VideoTracks().add(&video_track);
// Step 4.
// https://www.w3.org/TR/media-frags/#media-fragment-syntax
// https://github.com/servo/servo/issues/22366
// Step 5. & 6.
if self.VideoTracks().selected_index().is_none() {
self.VideoTracks()
.set_selected(self.VideoTracks().len() - 1, true);
}
// Steps 7.
let event = TrackEvent::new(
&self.global(),
atom!("addtrack"),
false,
false,
&Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(video_track)),
);
event.upcast::<Event>().fire(self.upcast::<EventTarget>());
}
}
// => "Once enough of the media data has been fetched to determine the duration..."
// Step 1.
// servo-media owns the media timeline.
@ -1287,7 +1388,7 @@ impl HTMLMediaElement {
// https://www.w3.org/TR/media-frags/#media-fragment-syntax
// https://github.com/servo/media/issues/156
// XXX Steps 12 and 13 require audio and video tracks support.
// Step 12 & 13 are already handled by the earlier media track processing.
},
PlayerEvent::NeedData => {
// The player needs more data.
@ -1702,6 +1803,20 @@ impl HTMLMediaElementMethods for HTMLMediaElement {
TimeRanges::new(self.global().as_window(), buffered)
}
// https://html.spec.whatwg.org/multipage/#dom-media-audiotracks
fn AudioTracks(&self) -> DomRoot<AudioTrackList> {
let window = window_from_node(self);
self.audio_tracks_list
.or_init(|| AudioTrackList::new(&window, &[]))
}
// https://html.spec.whatwg.org/multipage/#dom-media-videotracks
fn VideoTracks(&self) -> DomRoot<VideoTrackList> {
let window = window_from_node(self);
self.video_tracks_list
.or_init(|| VideoTrackList::new(&window, &[]))
}
// https://html.spec.whatwg.org/multipage/#dom-media-texttracks
fn TextTracks(&self) -> DomRoot<TextTrackList> {
let window = window_from_node(self);