Run the async HTML Tokenizer on a new thread

This commit is contained in:
Nikhil Shagrithaya 2017-06-24 13:27:09 +05:30
parent 3629efba75
commit 8ebd70d402
5 changed files with 469 additions and 223 deletions

8
Cargo.lock generated
View file

@ -1671,7 +1671,7 @@ dependencies = [
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "string_cache 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tendril 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "tendril 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -2607,6 +2607,8 @@ dependencies = [
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize_derive 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3199,7 +3201,7 @@ dependencies = [
[[package]] [[package]]
name = "tendril" name = "tendril"
version = "0.3.0" version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3873,7 +3875,7 @@ dependencies = [
"checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047" "checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047"
"checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791" "checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791"
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
"checksum tendril 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "01576be96a211e017bf90b1603b1272baf9fe93a1bf9b4845257c4ba09c9b25f" "checksum tendril 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1b72f8e2f5b73b65c315b1a70c730f24b9d7a25f39e98de8acbe2bb795caea"
"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989" "checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a" "checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773" "checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"

View file

@ -7,9 +7,8 @@
use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods; use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods; use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable; use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableJS, Root}; use dom::bindings::js::{JS, Root};
use dom::bindings::str::DOMString; use dom::bindings::str::DOMString;
use dom::bindings::trace::JSTraceable;
use dom::comment::Comment; use dom::comment::Comment;
use dom::document::Document; use dom::document::Document;
use dom::documenttype::DocumentType; use dom::documenttype::DocumentType;
@ -20,25 +19,167 @@ use dom::htmltemplateelement::HTMLTemplateElement;
use dom::node::Node; use dom::node::Node;
use dom::processinginstruction::ProcessingInstruction; use dom::processinginstruction::ProcessingInstruction;
use dom::virtualmethods::vtable_for; use dom::virtualmethods::vtable_for;
use html5ever::{Attribute, LocalName, QualName, ExpandedName}; use html5ever::{Attribute as HtmlAttribute, ExpandedName, LocalName, QualName};
use html5ever::buffer_queue::BufferQueue; use html5ever::buffer_queue::BufferQueue;
use html5ever::tendril::StrTendril; use html5ever::tendril::{SendTendril, StrTendril, Tendril};
use html5ever::tendril::fmt::UTF8;
use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult}; use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult};
use html5ever::tree_builder::{NodeOrText, TreeSink, NextParserState, QuirksMode, ElementFlags}; use html5ever::tree_builder::{ElementFlags, NodeOrText as HtmlNodeOrText, NextParserState, QuirksMode, TreeSink};
use html5ever::tree_builder::{Tracer as HtmlTracer, TreeBuilder, TreeBuilderOpts}; use html5ever::tree_builder::{TreeBuilder, TreeBuilderOpts};
use js::jsapi::JSTracer;
use servo_url::ServoUrl; use servo_url::ServoUrl;
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::Cell; use std::cell::Cell;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::vec_deque::VecDeque;
use std::sync::mpsc::{channel, Receiver, Sender};
use std::thread;
use style::context::QuirksMode as ServoQuirksMode; use style::context::QuirksMode as ServoQuirksMode;
type ParseNodeId = usize;
#[derive(Clone, HeapSizeOf, JSTraceable)]
pub struct ParseNode {
id: ParseNodeId,
qual_name: Option<QualName>,
}
#[derive(HeapSizeOf, JSTraceable)]
enum NodeOrText {
Node(ParseNode),
Text(String),
}
#[derive(HeapSizeOf, JSTraceable)]
struct Attribute {
name: QualName,
value: String,
}
#[derive(HeapSizeOf, JSTraceable)]
enum ParseOperation {
GetTemplateContents { target: ParseNodeId, contents: ParseNodeId },
CreateElement {
node: ParseNodeId,
name: QualName,
attrs: Vec<Attribute>,
current_line: u64
},
CreateComment { text: String, node: ParseNodeId },
AppendBeforeSibling { sibling: ParseNodeId, node: NodeOrText },
Append { parent: ParseNodeId, node: NodeOrText },
AppendDoctypeToDocument {
name: String,
public_id: String,
system_id: String
},
AddAttrsIfMissing { target: ParseNodeId, attrs: Vec<Attribute> },
RemoveFromParent { target: ParseNodeId },
MarkScriptAlreadyStarted { node: ParseNodeId },
ReparentChildren { parent: ParseNodeId, new_parent: ParseNodeId },
AssociateWithForm { target: ParseNodeId, form: ParseNodeId },
CreatePI {
node: ParseNodeId,
target: String,
data: String
},
Pop { node: ParseNodeId },
SetQuirksMode {
#[ignore_heap_size_of = "Defined in style"]
mode: ServoQuirksMode
},
}
#[derive(HeapSizeOf)]
enum ToTokenizerMsg {
// From HtmlTokenizer
TokenizerResultDone {
#[ignore_heap_size_of = "Defined in html5ever"]
updated_input: VecDeque<SendTendril<UTF8>>
},
TokenizerResultScript {
script: ParseNode,
#[ignore_heap_size_of = "Defined in html5ever"]
updated_input: VecDeque<SendTendril<UTF8>>
},
End, // Sent to Tokenizer to signify HtmlTokenizer's end method has returned
// From Sink
ProcessOperation(ParseOperation),
IsSameTree(ParseNodeId, ParseNodeId),
HasParentNode(ParseNodeId),
}
#[derive(HeapSizeOf)]
enum ToHtmlTokenizerMsg {
Feed {
#[ignore_heap_size_of = "Defined in html5ever"]
input: VecDeque<SendTendril<UTF8>>
},
End,
SetPlainTextState,
}
// Responses to the queries asked by the the Sink to the Tokenizer,
// using the messages types in FromSinkMsg.
#[derive(HeapSizeOf)]
enum ToSinkMsg {
IsSameTree(bool),
HasParentNode(bool),
}
fn create_buffer_queue(mut buffers: VecDeque<SendTendril<UTF8>>) -> BufferQueue {
let mut buffer_queue = BufferQueue::new();
while let Some(st) = buffers.pop_front() {
buffer_queue.push_back(StrTendril::from(st));
}
buffer_queue
}
// The async HTML Tokenizer consists of two separate types working together: the Tokenizer
// (defined below), which lives on the main thread, and the HtmlTokenizer, defined in html5ever, which
// lives on the parser thread.
// Steps:
// 1. A call to Tokenizer::new will spin up a new parser thread, creating an HtmlTokenizer instance,
// which starts listening for messages from Tokenizer.
// 2. Upon receiving an input from ServoParser, the Tokenizer forwards it to HtmlTokenizer, where it starts
// creating the necessary tree actions based on the input.
// 3. HtmlTokenizer sends these tree actions to the Tokenizer as soon as it creates them. The Tokenizer
// then executes the received actions.
//
// _____________ _______________
// | | ToHtmlTokenizerMsg | |
// | |------------------------>| |
// | | | |
// | | ToTokenizerMsg | HtmlTokenizer |
// | |<------------------------| |
// | Tokenizer | | |
// | | ToTokenizerMsg | ________ |
// | |<------------------------|---| | |
// | | | | Sink | |
// | | ToSinkMsg | | | |
// | |-------------------------|-->|________| |
// |_____________| |_______________|
//
#[derive(HeapSizeOf, JSTraceable)] #[derive(HeapSizeOf, JSTraceable)]
#[must_root] #[must_root]
pub struct Tokenizer { pub struct Tokenizer {
#[ignore_heap_size_of = "Defined in html5ever"] document: JS<Document>,
inner: HtmlTokenizer<TreeBuilder<ParseNode, Sink>>, #[ignore_heap_size_of = "Defined in std"]
receiver: Receiver<ToTokenizerMsg>,
#[ignore_heap_size_of = "Defined in std"]
html_tokenizer_sender: Sender<ToHtmlTokenizerMsg>,
#[ignore_heap_size_of = "Defined in std"]
sink_sender: Sender<ToSinkMsg>,
nodes: HashMap<ParseNodeId, JS<Node>>,
url: ServoUrl,
} }
impl Tokenizer { impl Tokenizer {
@ -47,171 +188,125 @@ impl Tokenizer {
url: ServoUrl, url: ServoUrl,
fragment_context: Option<super::FragmentContext>) fragment_context: Option<super::FragmentContext>)
-> Self { -> Self {
let mut sink = Sink::new(url, document); // Messages from the Tokenizer (main thread) to HtmlTokenizer (parser thread)
let (to_html_tokenizer_sender, html_tokenizer_receiver) = channel();
// Messages from the Tokenizer (main thread) to Sink (parser thread)
let (to_sink_sender, sink_receiver) = channel();
// Messages from HtmlTokenizer and Sink (parser thread) to Tokenizer (main thread)
let (to_tokenizer_sender, tokenizer_receiver) = channel();
let options = TreeBuilderOpts { let mut tokenizer = Tokenizer {
ignore_missing_rules: true, document: JS::from_ref(document),
.. Default::default() receiver: tokenizer_receiver,
html_tokenizer_sender: to_html_tokenizer_sender,
sink_sender: to_sink_sender,
nodes: HashMap::new(),
url: url
}; };
tokenizer.insert_node(0, JS::from_ref(document.upcast()));
let inner = if let Some(fc) = fragment_context { let mut sink = Sink::new(to_tokenizer_sender.clone(), sink_receiver);
let ctxt_parse_node = sink.new_parse_node(); let mut ctxt_parse_node = None;
sink.nodes.insert(ctxt_parse_node.id, JS::from_ref(fc.context_elem)); let mut form_parse_node = None;
let mut fragment_context_is_some = false;
if let Some(fc) = fragment_context {
let node = sink.new_parse_node();
tokenizer.insert_node(node.id, JS::from_ref(fc.context_elem));
ctxt_parse_node = Some(node);
let form_parse_node = fc.form_elem.map(|form_elem| { form_parse_node = fc.form_elem.map(|form_elem| {
let node = sink.new_parse_node(); let node = sink.new_parse_node();
sink.nodes.insert(node.id, JS::from_ref(form_elem)); tokenizer.insert_node(node.id, JS::from_ref(form_elem));
node node
}); });
let tb = TreeBuilder::new_for_fragment( fragment_context_is_some = true;
sink,
ctxt_parse_node,
form_parse_node,
options);
let tok_options = TokenizerOpts {
initial_state: Some(tb.tokenizer_state_for_context_elem()),
.. Default::default()
};
HtmlTokenizer::new(tb, tok_options)
} else {
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
}; };
Tokenizer { // Create new thread for HtmlTokenizer. This is where parser actions
inner: inner, // will be generated from the input provided. These parser actions are then passed
} // onto the main thread to be executed.
thread::Builder::new().name(String::from("HTML Parser")).spawn(move || {
run(sink,
fragment_context_is_some,
ctxt_parse_node,
form_parse_node,
to_tokenizer_sender,
html_tokenizer_receiver);
}).expect("HTML Parser thread spawning failed");
tokenizer
} }
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), Root<HTMLScriptElement>> { pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), Root<HTMLScriptElement>> {
match self.inner.feed(input) { let mut send_tendrils = VecDeque::new();
TokenizerResult::Done => Ok(()), while let Some(str) = input.pop_front() {
TokenizerResult::Script(script) => { send_tendrils.push_back(SendTendril::from(str));
let nodes = &self.inner.sink.sink.nodes; }
let script = nodes.get(&script.id).unwrap();
Err(Root::from_ref(script.downcast().unwrap())) // Send message to parser thread, asking it to start reading from the input.
}, // Parser operation messages will be sent to main thread as they are evaluated.
self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::Feed { input: send_tendrils }).unwrap();
loop {
match self.receiver.recv().expect("Unexpected channel panic in main thread.") {
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
ToTokenizerMsg::IsSameTree(ref x_id, ref y_id) => {
let x = self.get_node(x_id);
let y = self.get_node(y_id);
let x = x.downcast::<Element>().expect("Element node expected");
let y = y.downcast::<Element>().expect("Element node expected");
self.sink_sender.send(ToSinkMsg::IsSameTree(x.is_in_same_home_subtree(y))).unwrap();
},
ToTokenizerMsg::HasParentNode(ref id) => {
let res = self.get_node(id).GetParentNode().is_some();
self.sink_sender.send(ToSinkMsg::HasParentNode(res)).unwrap();
},
ToTokenizerMsg::TokenizerResultDone { updated_input } => {
let buffer_queue = create_buffer_queue(updated_input);
*input = buffer_queue;
return Ok(());
},
ToTokenizerMsg::TokenizerResultScript { script, updated_input } => {
let buffer_queue = create_buffer_queue(updated_input);
*input = buffer_queue;
let script = self.get_node(&script.id);
return Err(Root::from_ref(script.downcast().unwrap()));
}
ToTokenizerMsg::End => unreachable!(),
};
} }
} }
pub fn end(&mut self) { pub fn end(&mut self) {
self.inner.end(); self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::End).unwrap();
loop {
match self.receiver.recv().expect("Unexpected channel panic in main thread.") {
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
ToTokenizerMsg::IsSameTree(ref x_id, ref y_id) => {
let x = self.get_node(x_id);
let y = self.get_node(y_id);
let x = x.downcast::<Element>().expect("Element node expected");
let y = y.downcast::<Element>().expect("Element node expected");
self.sink_sender.send(ToSinkMsg::IsSameTree(x.is_in_same_home_subtree(y))).unwrap();
},
ToTokenizerMsg::HasParentNode(ref id) => {
let res = self.get_node(id).GetParentNode().is_some();
self.sink_sender.send(ToSinkMsg::HasParentNode(res)).unwrap();
},
ToTokenizerMsg::End => return,
_ => unreachable!(),
};
}
} }
pub fn url(&self) -> &ServoUrl { pub fn url(&self) -> &ServoUrl {
&self.inner.sink.sink.base_url &self.url
} }
pub fn set_plaintext_state(&mut self) { pub fn set_plaintext_state(&mut self) {
self.inner.set_plaintext_state(); self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::SetPlainTextState).unwrap();
}
}
#[allow(unsafe_code)]
unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<ParseNode, Sink>> {
unsafe fn trace(&self, trc: *mut JSTracer) {
struct Tracer(*mut JSTracer);
let tracer = Tracer(trc);
impl HtmlTracer for Tracer {
type Handle = ParseNode;
#[allow(unrooted_must_root)]
fn trace_handle(&self, node: &ParseNode) {
unsafe { node.trace(self.0); }
}
}
let tree_builder = &self.sink;
tree_builder.trace_handles(&tracer);
tree_builder.sink.trace(trc);
}
}
type ParseNodeId = usize;
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub struct ParseNode {
id: ParseNodeId,
qual_name: Option<QualName>,
}
#[derive(JSTraceable, HeapSizeOf)]
struct ParseNodeData {
contents: Option<ParseNode>,
is_integration_point: bool,
}
impl Default for ParseNodeData {
fn default() -> ParseNodeData {
ParseNodeData {
contents: None,
is_integration_point: false,
}
}
}
enum ParseOperation {
GetTemplateContents(ParseNodeId, ParseNodeId),
CreateElement(ParseNodeId, QualName, Vec<Attribute>),
CreateComment(StrTendril, ParseNodeId),
// sibling, node to be inserted
AppendBeforeSibling(ParseNodeId, NodeOrText<ParseNode>),
// parent, node to be inserted
Append(ParseNodeId, NodeOrText<ParseNode>),
AppendDoctypeToDocument(StrTendril, StrTendril, StrTendril),
AddAttrsIfMissing(ParseNodeId, Vec<Attribute>),
RemoveFromParent(ParseNodeId),
MarkScriptAlreadyStarted(ParseNodeId),
ReparentChildren(ParseNodeId, ParseNodeId),
AssociateWithForm(ParseNodeId, ParseNodeId),
CreatePI(ParseNodeId, StrTendril, StrTendril),
Pop(ParseNodeId),
}
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct Sink {
base_url: ServoUrl,
document: JS<Document>,
current_line: u64,
script: MutNullableJS<HTMLScriptElement>,
parse_node_data: HashMap<ParseNodeId, ParseNodeData>,
next_parse_node_id: Cell<ParseNodeId>,
nodes: HashMap<ParseNodeId, JS<Node>>,
document_node: ParseNode,
}
impl Sink {
fn new(base_url: ServoUrl, document: &Document) -> Sink {
let mut sink = Sink {
base_url: base_url,
document: JS::from_ref(document),
current_line: 1,
script: Default::default(),
parse_node_data: HashMap::new(),
next_parse_node_id: Cell::new(1),
nodes: HashMap::new(),
document_node: ParseNode {
id: 0,
qual_name: None,
}
};
let data = ParseNodeData::default();
sink.insert_parse_node_data(0, data);
sink.insert_node(0, JS::from_ref(document.upcast()));
sink
}
fn new_parse_node(&mut self) -> ParseNode {
let id = self.next_parse_node_id.get();
let data = ParseNodeData::default();
self.insert_parse_node_data(id, data);
self.next_parse_node_id.set(id + 1);
ParseNode {
id: id,
qual_name: None,
}
} }
fn insert_node(&mut self, id: ParseNodeId, node: JS<Node>) { fn insert_node(&mut self, id: ParseNodeId, node: JS<Node>) {
@ -222,29 +317,17 @@ impl Sink {
self.nodes.get(id).expect("Node not found!") self.nodes.get(id).expect("Node not found!")
} }
fn insert_parse_node_data(&mut self, id: ParseNodeId, data: ParseNodeData) {
assert!(self.parse_node_data.insert(id, data).is_none());
}
fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> &'a ParseNodeData {
self.parse_node_data.get(id).expect("Parse Node data not found!")
}
fn get_parse_node_data_mut<'a>(&'a mut self, id: &'a ParseNodeId) -> &'a mut ParseNodeData {
self.parse_node_data.get_mut(id).expect("Parse Node data not found!")
}
fn process_operation(&mut self, op: ParseOperation) { fn process_operation(&mut self, op: ParseOperation) {
let document = Root::from_ref(&**self.get_node(&0)); let document = Root::from_ref(&**self.get_node(&0));
let document = document.downcast::<Document>().expect("Document node should be downcasted!"); let document = document.downcast::<Document>().expect("Document node should be downcasted!");
match op { match op {
ParseOperation::GetTemplateContents(target, contents) => { ParseOperation::GetTemplateContents { target, contents } => {
let target = Root::from_ref(&**self.get_node(&target)); let target = Root::from_ref(&**self.get_node(&target));
let template = target.downcast::<HTMLTemplateElement>().expect( let template = target.downcast::<HTMLTemplateElement>().expect(
"Tried to extract contents from non-template element while parsing"); "Tried to extract contents from non-template element while parsing");
self.insert_node(contents, JS::from_ref(template.Content().upcast())); self.insert_node(contents, JS::from_ref(template.Content().upcast()));
} }
ParseOperation::CreateElement(id, name, attrs) => { ParseOperation::CreateElement { node, name, attrs, current_line } => {
let is = attrs.iter() let is = attrs.iter()
.find(|attr| attr.name.local.eq_str_ignore_ascii_case("is")) .find(|attr| attr.name.local.eq_str_ignore_ascii_case("is"))
.map(|attr| LocalName::from(&*attr.value)); .map(|attr| LocalName::from(&*attr.value));
@ -252,68 +335,72 @@ impl Sink {
let elem = Element::create(name, let elem = Element::create(name,
is, is,
&*self.document, &*self.document,
ElementCreator::ParserCreated(self.current_line), ElementCreator::ParserCreated(current_line),
CustomElementCreationMode::Synchronous); CustomElementCreationMode::Synchronous);
for attr in attrs { for attr in attrs {
elem.set_attribute_from_parser(attr.name, DOMString::from(String::from(attr.value)), None); elem.set_attribute_from_parser(attr.name, DOMString::from(attr.value), None);
} }
self.insert_node(id, JS::from_ref(elem.upcast())); self.insert_node(node, JS::from_ref(elem.upcast()));
} }
ParseOperation::CreateComment(text, id) => { ParseOperation::CreateComment { text, node } => {
let comment = Comment::new(DOMString::from(String::from(text)), document); let comment = Comment::new(DOMString::from(text), document);
self.insert_node(id, JS::from_ref(&comment.upcast())); self.insert_node(node, JS::from_ref(&comment.upcast()));
} }
ParseOperation::AppendBeforeSibling(sibling, node) => { ParseOperation::AppendBeforeSibling { sibling, node } => {
let node = match node { let node = match node {
NodeOrText::AppendNode(n) => NodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))), NodeOrText::Node(n) => HtmlNodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))),
NodeOrText::AppendText(text) => NodeOrText::AppendText(text) NodeOrText::Text(text) => HtmlNodeOrText::AppendText(
Tendril::from(text)
)
}; };
let sibling = &**self.get_node(&sibling); let sibling = &**self.get_node(&sibling);
let parent = &*sibling.GetParentNode().expect("append_before_sibling called on node without parent"); let parent = &*sibling.GetParentNode().expect("append_before_sibling called on node without parent");
super::insert(parent, Some(sibling), node); super::insert(parent, Some(sibling), node);
} }
ParseOperation::Append(parent, node) => { ParseOperation::Append { parent, node } => {
let node = match node { let node = match node {
NodeOrText::AppendNode(n) => NodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))), NodeOrText::Node(n) => HtmlNodeOrText::AppendNode(JS::from_ref(&**self.get_node(&n.id))),
NodeOrText::AppendText(text) => NodeOrText::AppendText(text) NodeOrText::Text(text) => HtmlNodeOrText::AppendText(
Tendril::from(text)
)
}; };
let parent = &**self.get_node(&parent); let parent = &**self.get_node(&parent);
super::insert(parent, None, node); super::insert(parent, None, node);
} }
ParseOperation::AppendDoctypeToDocument(name, public_id, system_id) => { ParseOperation::AppendDoctypeToDocument { name, public_id, system_id } => {
let doctype = DocumentType::new( let doctype = DocumentType::new(
DOMString::from(String::from(name)), Some(DOMString::from(String::from(public_id))), DOMString::from(String::from(name)), Some(DOMString::from(public_id)),
Some(DOMString::from(String::from(system_id))), document); Some(DOMString::from(system_id)), document);
document.upcast::<Node>().AppendChild(doctype.upcast()).expect("Appending failed"); document.upcast::<Node>().AppendChild(doctype.upcast()).expect("Appending failed");
} }
ParseOperation::AddAttrsIfMissing(target_id, attrs) => { ParseOperation::AddAttrsIfMissing { target, attrs } => {
let elem = self.get_node(&target_id).downcast::<Element>() let elem = self.get_node(&target).downcast::<Element>()
.expect("tried to set attrs on non-Element in HTML parsing"); .expect("tried to set attrs on non-Element in HTML parsing");
for attr in attrs { for attr in attrs {
elem.set_attribute_from_parser(attr.name, DOMString::from(String::from(attr.value)), None); elem.set_attribute_from_parser(attr.name, DOMString::from(attr.value), None);
} }
} }
ParseOperation::RemoveFromParent(target) => { ParseOperation::RemoveFromParent { target } => {
if let Some(ref parent) = self.get_node(&target).GetParentNode() { if let Some(ref parent) = self.get_node(&target).GetParentNode() {
parent.RemoveChild(&**self.get_node(&target)).unwrap(); parent.RemoveChild(&**self.get_node(&target)).unwrap();
} }
} }
ParseOperation::MarkScriptAlreadyStarted(node) => { ParseOperation::MarkScriptAlreadyStarted { node } => {
let script = self.get_node(&node).downcast::<HTMLScriptElement>(); let script = self.get_node(&node).downcast::<HTMLScriptElement>();
script.map(|script| script.set_already_started(true)); script.map(|script| script.set_already_started(true));
} }
ParseOperation::ReparentChildren(parent, new_parent) => { ParseOperation::ReparentChildren { parent, new_parent } => {
let parent = self.get_node(&parent); let parent = self.get_node(&parent);
let new_parent = self.get_node(&new_parent); let new_parent = self.get_node(&new_parent);
while let Some(child) = parent.GetFirstChild() { while let Some(child) = parent.GetFirstChild() {
new_parent.AppendChild(&child).unwrap(); new_parent.AppendChild(&child).unwrap();
} }
} }
ParseOperation::AssociateWithForm(target, form) => { ParseOperation::AssociateWithForm { target, form } => {
let form = self.get_node(&form); let form = self.get_node(&form);
let form = Root::downcast::<HTMLFormElement>(Root::from_ref(&**form)) let form = Root::downcast::<HTMLFormElement>(Root::from_ref(&**form))
.expect("Owner must be a form element"); .expect("Owner must be a form element");
@ -329,20 +416,141 @@ impl Sink {
assert!(node.NodeName() == "KEYGEN", "Unknown form-associatable element"); assert!(node.NodeName() == "KEYGEN", "Unknown form-associatable element");
} }
} }
ParseOperation::Pop(node) => { ParseOperation::Pop { node } => {
vtable_for(self.get_node(&node)).pop(); vtable_for(self.get_node(&node)).pop();
} }
ParseOperation::CreatePI(node, target, data) => { ParseOperation::CreatePI { node, target, data } => {
let pi = ProcessingInstruction::new( let pi = ProcessingInstruction::new(
DOMString::from(String::from(target)), DOMString::from(target),
DOMString::from(String::from(data)), DOMString::from(data),
document); document);
self.insert_node(node, JS::from_ref(pi.upcast())); self.insert_node(node, JS::from_ref(pi.upcast()));
} }
ParseOperation::SetQuirksMode { mode } => {
document.set_quirks_mode(mode);
}
} }
} }
} }
fn run(sink: Sink,
fragment_context_is_some: bool,
ctxt_parse_node: Option<ParseNode>,
form_parse_node: Option<ParseNode>,
sender: Sender<ToTokenizerMsg>,
receiver: Receiver<ToHtmlTokenizerMsg>) {
let options = TreeBuilderOpts {
ignore_missing_rules: true,
.. Default::default()
};
let mut html_tokenizer = if fragment_context_is_some {
let tb = TreeBuilder::new_for_fragment(
sink,
ctxt_parse_node.unwrap(),
form_parse_node,
options);
let tok_options = TokenizerOpts {
initial_state: Some(tb.tokenizer_state_for_context_elem()),
.. Default::default()
};
HtmlTokenizer::new(tb, tok_options)
} else {
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
};
loop {
match receiver.recv().expect("Unexpected channel panic in html parser thread") {
ToHtmlTokenizerMsg::Feed { input } => {
let mut input = create_buffer_queue(input);
let res = html_tokenizer.feed(&mut input);
// Gather changes to 'input' and place them in 'updated_input',
// which will be sent to the main thread to update feed method's 'input'
let mut updated_input = VecDeque::new();
while let Some(st) = input.pop_front() {
updated_input.push_back(SendTendril::from(st));
}
let res = match res {
TokenizerResult::Done => ToTokenizerMsg::TokenizerResultDone { updated_input },
TokenizerResult::Script(script) => ToTokenizerMsg::TokenizerResultScript { script, updated_input }
};
sender.send(res).unwrap();
},
ToHtmlTokenizerMsg::End => {
html_tokenizer.end();
sender.send(ToTokenizerMsg::End).unwrap();
break;
},
ToHtmlTokenizerMsg::SetPlainTextState => html_tokenizer.set_plaintext_state()
};
}
}
#[derive(JSTraceable, HeapSizeOf, Default)]
struct ParseNodeData {
contents: Option<ParseNode>,
is_integration_point: bool,
}
pub struct Sink {
current_line: u64,
parse_node_data: HashMap<ParseNodeId, ParseNodeData>,
next_parse_node_id: Cell<ParseNodeId>,
document_node: ParseNode,
sender: Sender<ToTokenizerMsg>,
receiver: Receiver<ToSinkMsg>,
}
impl Sink {
fn new(sender: Sender<ToTokenizerMsg>, receiver: Receiver<ToSinkMsg>) -> Sink {
let mut sink = Sink {
current_line: 1,
parse_node_data: HashMap::new(),
next_parse_node_id: Cell::new(1),
document_node: ParseNode {
id: 0,
qual_name: None,
},
sender: sender,
receiver: receiver,
};
let data = ParseNodeData::default();
sink.insert_parse_node_data(0, data);
sink
}
fn new_parse_node(&mut self) -> ParseNode {
let id = self.next_parse_node_id.get();
let data = ParseNodeData::default();
self.insert_parse_node_data(id, data);
self.next_parse_node_id.set(id + 1);
ParseNode {
id: id,
qual_name: None,
}
}
fn send_op(&self, op: ParseOperation) {
self.sender.send(ToTokenizerMsg::ProcessOperation(op)).unwrap();
}
fn insert_parse_node_data(&mut self, id: ParseNodeId, data: ParseNodeData) {
assert!(self.parse_node_data.insert(id, data).is_none());
}
fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> &'a ParseNodeData {
self.parse_node_data.get(id).expect("Parse Node data not found!")
}
fn get_parse_node_data_mut<'a>(&'a mut self, id: &'a ParseNodeId) -> &'a mut ParseNodeData {
self.parse_node_data.get_mut(id).expect("Parse Node data not found!")
}
}
#[allow(unrooted_must_root)] #[allow(unrooted_must_root)]
impl TreeSink for Sink { impl TreeSink for Sink {
type Output = Self; type Output = Self;
@ -363,7 +571,7 @@ impl TreeSink for Sink {
let mut data = self.get_parse_node_data_mut(&target.id); let mut data = self.get_parse_node_data_mut(&target.id);
data.contents = Some(node.clone()); data.contents = Some(node.clone());
} }
self.process_operation(ParseOperation::GetTemplateContents(target.id, node.id)); self.send_op(ParseOperation::GetTemplateContents { target: target.id, contents: node.id });
node node
} }
@ -376,21 +584,20 @@ impl TreeSink for Sink {
} }
fn same_tree(&self, x: &Self::Handle, y: &Self::Handle) -> bool { fn same_tree(&self, x: &Self::Handle, y: &Self::Handle) -> bool {
let x = self.get_node(&x.id); self.sender.send(ToTokenizerMsg::IsSameTree(x.id, y.id)).unwrap();
let y = self.get_node(&y.id); match self.receiver.recv().expect("Unexpected channel panic in html parser thread.") {
ToSinkMsg::IsSameTree(result) => result,
let x = x.downcast::<Element>().expect("Element node expected"); _ => unreachable!(),
let y = y.downcast::<Element>().expect("Element node expected"); }
x.is_in_same_home_subtree(y)
} }
fn create_element(&mut self, name: QualName, attrs: Vec<Attribute>, _flags: ElementFlags) fn create_element(&mut self, name: QualName, html_attrs: Vec<HtmlAttribute>, _flags: ElementFlags)
-> Self::Handle { -> Self::Handle {
let mut node = self.new_parse_node(); let mut node = self.new_parse_node();
node.qual_name = Some(name.clone()); node.qual_name = Some(name.clone());
{ {
let mut node_data = self.get_parse_node_data_mut(&node.id); let mut node_data = self.get_parse_node_data_mut(&node.id);
node_data.is_integration_point = attrs.iter() node_data.is_integration_point = html_attrs.iter()
.any(|attr| { .any(|attr| {
let attr_value = &String::from(attr.value.clone()); let attr_value = &String::from(attr.value.clone());
(attr.name.local == local_name!("encoding") && attr.name.ns == ns!()) && (attr.name.local == local_name!("encoding") && attr.name.ns == ns!()) &&
@ -398,34 +605,57 @@ impl TreeSink for Sink {
attr_value.eq_ignore_ascii_case("application/xhtml+xml")) attr_value.eq_ignore_ascii_case("application/xhtml+xml"))
}); });
} }
self.process_operation(ParseOperation::CreateElement(node.id, name, attrs)); let attrs = html_attrs.into_iter()
.map(|attr| Attribute { name: attr.name, value: String::from(attr.value) }).collect();
self.send_op(ParseOperation::CreateElement {
node: node.id,
name,
attrs,
current_line: self.current_line
});
node node
} }
fn create_comment(&mut self, text: StrTendril) -> Self::Handle { fn create_comment(&mut self, text: StrTendril) -> Self::Handle {
let node = self.new_parse_node(); let node = self.new_parse_node();
self.process_operation(ParseOperation::CreateComment(text, node.id)); self.send_op(ParseOperation::CreateComment { text: String::from(text), node: node.id });
node node
} }
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> ParseNode { fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> ParseNode {
let node = self.new_parse_node(); let node = self.new_parse_node();
self.process_operation(ParseOperation::CreatePI(node.id, target, data)); self.send_op(ParseOperation::CreatePI {
node: node.id,
target: String::from(target),
data: String::from(data)
});
node node
} }
fn has_parent_node(&self, node: &Self::Handle) -> bool { fn has_parent_node(&self, node: &Self::Handle) -> bool {
self.get_node(&node.id).GetParentNode().is_some() self.sender.send(ToTokenizerMsg::HasParentNode(node.id)).unwrap();
match self.receiver.recv().expect("Unexpected channel panic in html parser thread.") {
ToSinkMsg::HasParentNode(result) => result,
_ => unreachable!(),
}
} }
fn associate_with_form(&mut self, target: &Self::Handle, form: &Self::Handle) { fn associate_with_form(&mut self, target: &Self::Handle, form: &Self::Handle) {
self.process_operation(ParseOperation::AssociateWithForm(target.id, form.id)); self.send_op(ParseOperation::AssociateWithForm {
target: target.id,
form: form.id
});
} }
fn append_before_sibling(&mut self, fn append_before_sibling(&mut self,
sibling: &Self::Handle, sibling: &Self::Handle,
new_node: NodeOrText<Self::Handle>) { new_node: HtmlNodeOrText<Self::Handle>) {
self.process_operation(ParseOperation::AppendBeforeSibling(sibling.id, new_node)); let new_node = match new_node {
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text))
};
self.send_op(ParseOperation::AppendBeforeSibling { sibling: sibling.id, node: new_node });
} }
fn parse_error(&mut self, msg: Cow<'static, str>) { fn parse_error(&mut self, msg: Cow<'static, str>) {
@ -438,28 +668,38 @@ impl TreeSink for Sink {
QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks, QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
QuirksMode::NoQuirks => ServoQuirksMode::NoQuirks, QuirksMode::NoQuirks => ServoQuirksMode::NoQuirks,
}; };
self.document.set_quirks_mode(mode); self.send_op(ParseOperation::SetQuirksMode { mode });
} }
fn append(&mut self, parent: &Self::Handle, child: NodeOrText<Self::Handle>) { fn append(&mut self, parent: &Self::Handle, child: HtmlNodeOrText<Self::Handle>) {
self.process_operation(ParseOperation::Append(parent.id, child)); let child = match child {
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text))
};
self.send_op(ParseOperation::Append { parent: parent.id, node: child });
} }
fn append_doctype_to_document(&mut self, name: StrTendril, public_id: StrTendril, fn append_doctype_to_document(&mut self, name: StrTendril, public_id: StrTendril,
system_id: StrTendril) { system_id: StrTendril) {
self.process_operation(ParseOperation::AppendDoctypeToDocument(name, public_id, system_id)); self.send_op(ParseOperation::AppendDoctypeToDocument {
name: String::from(name),
public_id: String::from(public_id),
system_id: String::from(system_id)
});
} }
fn add_attrs_if_missing(&mut self, target: &Self::Handle, attrs: Vec<Attribute>) { fn add_attrs_if_missing(&mut self, target: &Self::Handle, html_attrs: Vec<HtmlAttribute>) {
self.process_operation(ParseOperation::AddAttrsIfMissing(target.id, attrs)); let attrs = html_attrs.into_iter()
.map(|attr| Attribute { name: attr.name, value: String::from(attr.value) }).collect();
self.send_op(ParseOperation::AddAttrsIfMissing { target: target.id, attrs });
} }
fn remove_from_parent(&mut self, target: &Self::Handle) { fn remove_from_parent(&mut self, target: &Self::Handle) {
self.process_operation(ParseOperation::RemoveFromParent(target.id)); self.send_op(ParseOperation::RemoveFromParent { target: target.id });
} }
fn mark_script_already_started(&mut self, node: &Self::Handle) { fn mark_script_already_started(&mut self, node: &Self::Handle) {
self.process_operation(ParseOperation::MarkScriptAlreadyStarted(node.id)); self.send_op(ParseOperation::MarkScriptAlreadyStarted { node: node.id });
} }
fn complete_script(&mut self, _: &Self::Handle) -> NextParserState { fn complete_script(&mut self, _: &Self::Handle) -> NextParserState {
@ -467,7 +707,7 @@ impl TreeSink for Sink {
} }
fn reparent_children(&mut self, parent: &Self::Handle, new_parent: &Self::Handle) { fn reparent_children(&mut self, parent: &Self::Handle, new_parent: &Self::Handle) {
self.process_operation(ParseOperation::ReparentChildren(parent.id, new_parent.id)); self.send_op(ParseOperation::ReparentChildren { parent: parent.id, new_parent: new_parent.id });
} }
/// https://html.spec.whatwg.org/multipage/#html-integration-point /// https://html.spec.whatwg.org/multipage/#html-integration-point
@ -482,6 +722,6 @@ impl TreeSink for Sink {
} }
fn pop(&mut self, node: &Self::Handle) { fn pop(&mut self, node: &Self::Handle) {
self.process_operation(ParseOperation::Pop(node.id)); self.send_op(ParseOperation::Pop { node: node.id });
} }
} }

View file

@ -26,6 +26,8 @@ unstable = []
bitflags = "0.7" bitflags = "0.7"
matches = "0.1" matches = "0.1"
cssparser = "0.18" cssparser = "0.18"
heapsize = "0.4"
heapsize_derive = "0.1"
log = "0.3" log = "0.3"
fnv = "1.0" fnv = "1.0"
phf = "0.7.18" phf = "0.7.18"

View file

@ -48,7 +48,7 @@ pub enum VisitedHandlingMode {
/// Which quirks mode is this document in. /// Which quirks mode is this document in.
/// ///
/// See: https://quirks.spec.whatwg.org/ /// See: https://quirks.spec.whatwg.org/
#[derive(PartialEq, Eq, Copy, Clone, Hash, Debug)] #[derive(PartialEq, Eq, Copy, Clone, Hash, Debug, HeapSizeOf)]
pub enum QuirksMode { pub enum QuirksMode {
/// Quirks mode. /// Quirks mode.
Quirks, Quirks,

View file

@ -7,9 +7,11 @@
#[macro_use] extern crate bitflags; #[macro_use] extern crate bitflags;
#[macro_use] extern crate cssparser; #[macro_use] extern crate cssparser;
#[macro_use] extern crate heapsize_derive;
#[macro_use] extern crate log; #[macro_use] extern crate log;
#[macro_use] extern crate matches; #[macro_use] extern crate matches;
extern crate fnv; extern crate fnv;
extern crate heapsize;
extern crate phf; extern crate phf;
extern crate precomputed_hash; extern crate precomputed_hash;
#[cfg(test)] #[macro_use] extern crate size_of_test; #[cfg(test)] #[macro_use] extern crate size_of_test;