mirror of
https://github.com/servo/servo.git
synced 2025-08-05 21:50:18 +01:00
Format script component
This commit is contained in:
parent
2ca7a13473
commit
c37a345dc9
357 changed files with 25485 additions and 18076 deletions
|
@ -58,49 +58,77 @@ struct Attribute {
|
|||
|
||||
#[derive(JSTraceable, MallocSizeOf)]
|
||||
enum ParseOperation {
|
||||
GetTemplateContents { target: ParseNodeId, contents: ParseNodeId },
|
||||
GetTemplateContents {
|
||||
target: ParseNodeId,
|
||||
contents: ParseNodeId,
|
||||
},
|
||||
|
||||
CreateElement {
|
||||
node: ParseNodeId,
|
||||
name: QualName,
|
||||
attrs: Vec<Attribute>,
|
||||
current_line: u64
|
||||
current_line: u64,
|
||||
},
|
||||
|
||||
CreateComment { text: String, node: ParseNodeId },
|
||||
AppendBeforeSibling { sibling: ParseNodeId, node: NodeOrText },
|
||||
AppendBasedOnParentNode { element: ParseNodeId, prev_element: ParseNodeId, node: NodeOrText },
|
||||
Append { parent: ParseNodeId, node: NodeOrText },
|
||||
CreateComment {
|
||||
text: String,
|
||||
node: ParseNodeId,
|
||||
},
|
||||
AppendBeforeSibling {
|
||||
sibling: ParseNodeId,
|
||||
node: NodeOrText,
|
||||
},
|
||||
AppendBasedOnParentNode {
|
||||
element: ParseNodeId,
|
||||
prev_element: ParseNodeId,
|
||||
node: NodeOrText,
|
||||
},
|
||||
Append {
|
||||
parent: ParseNodeId,
|
||||
node: NodeOrText,
|
||||
},
|
||||
|
||||
AppendDoctypeToDocument {
|
||||
name: String,
|
||||
public_id: String,
|
||||
system_id: String
|
||||
system_id: String,
|
||||
},
|
||||
|
||||
AddAttrsIfMissing { target: ParseNodeId, attrs: Vec<Attribute> },
|
||||
RemoveFromParent { target: ParseNodeId },
|
||||
MarkScriptAlreadyStarted { node: ParseNodeId },
|
||||
ReparentChildren { parent: ParseNodeId, new_parent: ParseNodeId },
|
||||
AddAttrsIfMissing {
|
||||
target: ParseNodeId,
|
||||
attrs: Vec<Attribute>,
|
||||
},
|
||||
RemoveFromParent {
|
||||
target: ParseNodeId,
|
||||
},
|
||||
MarkScriptAlreadyStarted {
|
||||
node: ParseNodeId,
|
||||
},
|
||||
ReparentChildren {
|
||||
parent: ParseNodeId,
|
||||
new_parent: ParseNodeId,
|
||||
},
|
||||
|
||||
AssociateWithForm {
|
||||
target: ParseNodeId,
|
||||
form: ParseNodeId,
|
||||
element: ParseNodeId,
|
||||
prev_element: Option<ParseNodeId>
|
||||
prev_element: Option<ParseNodeId>,
|
||||
},
|
||||
|
||||
CreatePI {
|
||||
node: ParseNodeId,
|
||||
target: String,
|
||||
data: String
|
||||
data: String,
|
||||
},
|
||||
|
||||
Pop { node: ParseNodeId },
|
||||
Pop {
|
||||
node: ParseNodeId,
|
||||
},
|
||||
|
||||
SetQuirksMode {
|
||||
#[ignore_malloc_size_of = "Defined in style"]
|
||||
mode: ServoQuirksMode
|
||||
mode: ServoQuirksMode,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -109,12 +137,12 @@ enum ToTokenizerMsg {
|
|||
// From HtmlTokenizer
|
||||
TokenizerResultDone {
|
||||
#[ignore_malloc_size_of = "Defined in html5ever"]
|
||||
updated_input: VecDeque<SendTendril<UTF8>>
|
||||
updated_input: VecDeque<SendTendril<UTF8>>,
|
||||
},
|
||||
TokenizerResultScript {
|
||||
script: ParseNode,
|
||||
#[ignore_malloc_size_of = "Defined in html5ever"]
|
||||
updated_input: VecDeque<SendTendril<UTF8>>
|
||||
updated_input: VecDeque<SendTendril<UTF8>>,
|
||||
},
|
||||
End, // Sent to Tokenizer to signify HtmlTokenizer's end method has returned
|
||||
|
||||
|
@ -126,7 +154,7 @@ enum ToTokenizerMsg {
|
|||
enum ToHtmlTokenizerMsg {
|
||||
Feed {
|
||||
#[ignore_malloc_size_of = "Defined in html5ever"]
|
||||
input: VecDeque<SendTendril<UTF8>>
|
||||
input: VecDeque<SendTendril<UTF8>>,
|
||||
},
|
||||
End,
|
||||
SetPlainTextState,
|
||||
|
@ -180,10 +208,10 @@ pub struct Tokenizer {
|
|||
|
||||
impl Tokenizer {
|
||||
pub fn new(
|
||||
document: &Document,
|
||||
url: ServoUrl,
|
||||
fragment_context: Option<super::FragmentContext>)
|
||||
-> Self {
|
||||
document: &Document,
|
||||
url: ServoUrl,
|
||||
fragment_context: Option<super::FragmentContext>,
|
||||
) -> Self {
|
||||
// Messages from the Tokenizer (main thread) to HtmlTokenizer (parser thread)
|
||||
let (to_html_tokenizer_sender, html_tokenizer_receiver) = channel();
|
||||
// Messages from HtmlTokenizer and Sink (parser thread) to Tokenizer (main thread)
|
||||
|
@ -194,7 +222,7 @@ impl Tokenizer {
|
|||
receiver: tokenizer_receiver,
|
||||
html_tokenizer_sender: to_html_tokenizer_sender,
|
||||
nodes: HashMap::new(),
|
||||
url: url
|
||||
url: url,
|
||||
};
|
||||
tokenizer.insert_node(0, Dom::from_ref(document.upcast()));
|
||||
|
||||
|
@ -218,14 +246,18 @@ impl Tokenizer {
|
|||
// Create new thread for HtmlTokenizer. This is where parser actions
|
||||
// will be generated from the input provided. These parser actions are then passed
|
||||
// onto the main thread to be executed.
|
||||
thread::Builder::new().name(String::from("HTML Parser")).spawn(move || {
|
||||
run(sink,
|
||||
fragment_context_is_some,
|
||||
ctxt_parse_node,
|
||||
form_parse_node,
|
||||
to_tokenizer_sender,
|
||||
html_tokenizer_receiver);
|
||||
}).expect("HTML Parser thread spawning failed");
|
||||
thread::Builder::new()
|
||||
.name(String::from("HTML Parser"))
|
||||
.spawn(move || {
|
||||
run(
|
||||
sink,
|
||||
fragment_context_is_some,
|
||||
ctxt_parse_node,
|
||||
form_parse_node,
|
||||
to_tokenizer_sender,
|
||||
html_tokenizer_receiver,
|
||||
);
|
||||
}).expect("HTML Parser thread spawning failed");
|
||||
|
||||
tokenizer
|
||||
}
|
||||
|
@ -238,31 +270,47 @@ impl Tokenizer {
|
|||
|
||||
// Send message to parser thread, asking it to start reading from the input.
|
||||
// Parser operation messages will be sent to main thread as they are evaluated.
|
||||
self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::Feed { input: send_tendrils }).unwrap();
|
||||
self.html_tokenizer_sender
|
||||
.send(ToHtmlTokenizerMsg::Feed {
|
||||
input: send_tendrils,
|
||||
}).unwrap();
|
||||
|
||||
loop {
|
||||
match self.receiver.recv().expect("Unexpected channel panic in main thread.") {
|
||||
match self
|
||||
.receiver
|
||||
.recv()
|
||||
.expect("Unexpected channel panic in main thread.")
|
||||
{
|
||||
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
|
||||
ToTokenizerMsg::TokenizerResultDone { updated_input } => {
|
||||
let buffer_queue = create_buffer_queue(updated_input);
|
||||
*input = buffer_queue;
|
||||
return Ok(());
|
||||
},
|
||||
ToTokenizerMsg::TokenizerResultScript { script, updated_input } => {
|
||||
ToTokenizerMsg::TokenizerResultScript {
|
||||
script,
|
||||
updated_input,
|
||||
} => {
|
||||
let buffer_queue = create_buffer_queue(updated_input);
|
||||
*input = buffer_queue;
|
||||
let script = self.get_node(&script.id);
|
||||
return Err(DomRoot::from_ref(script.downcast().unwrap()));
|
||||
}
|
||||
},
|
||||
ToTokenizerMsg::End => unreachable!(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn end(&mut self) {
|
||||
self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::End).unwrap();
|
||||
self.html_tokenizer_sender
|
||||
.send(ToHtmlTokenizerMsg::End)
|
||||
.unwrap();
|
||||
loop {
|
||||
match self.receiver.recv().expect("Unexpected channel panic in main thread.") {
|
||||
match self
|
||||
.receiver
|
||||
.recv()
|
||||
.expect("Unexpected channel panic in main thread.")
|
||||
{
|
||||
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
|
||||
ToTokenizerMsg::End => return,
|
||||
_ => unreachable!(),
|
||||
|
@ -275,7 +323,9 @@ impl Tokenizer {
|
|||
}
|
||||
|
||||
pub fn set_plaintext_state(&mut self) {
|
||||
self.html_tokenizer_sender.send(ToHtmlTokenizerMsg::SetPlainTextState).unwrap();
|
||||
self.html_tokenizer_sender
|
||||
.send(ToHtmlTokenizerMsg::SetPlainTextState)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn insert_node(&mut self, id: ParseNodeId, node: Dom<Node>) {
|
||||
|
@ -286,26 +336,27 @@ impl Tokenizer {
|
|||
self.nodes.get(id).expect("Node not found!")
|
||||
}
|
||||
|
||||
|
||||
fn append_before_sibling(&mut self, sibling: ParseNodeId, node: NodeOrText) {
|
||||
let node = match node {
|
||||
NodeOrText::Node(n) => HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id))),
|
||||
NodeOrText::Text(text) => HtmlNodeOrText::AppendText(
|
||||
Tendril::from(text)
|
||||
)
|
||||
NodeOrText::Node(n) => {
|
||||
HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id)))
|
||||
},
|
||||
NodeOrText::Text(text) => HtmlNodeOrText::AppendText(Tendril::from(text)),
|
||||
};
|
||||
let sibling = &**self.get_node(&sibling);
|
||||
let parent = &*sibling.GetParentNode().expect("append_before_sibling called on node without parent");
|
||||
let parent = &*sibling
|
||||
.GetParentNode()
|
||||
.expect("append_before_sibling called on node without parent");
|
||||
|
||||
super::insert(parent, Some(sibling), node);
|
||||
}
|
||||
|
||||
fn append(&mut self, parent: ParseNodeId, node: NodeOrText) {
|
||||
let node = match node {
|
||||
NodeOrText::Node(n) => HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id))),
|
||||
NodeOrText::Text(text) => HtmlNodeOrText::AppendText(
|
||||
Tendril::from(text)
|
||||
)
|
||||
NodeOrText::Node(n) => {
|
||||
HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id)))
|
||||
},
|
||||
NodeOrText::Text(text) => HtmlNodeOrText::AppendText(Tendril::from(text)),
|
||||
};
|
||||
|
||||
let parent = &**self.get_node(&parent);
|
||||
|
@ -327,15 +378,23 @@ impl Tokenizer {
|
|||
|
||||
fn process_operation(&mut self, op: ParseOperation) {
|
||||
let document = DomRoot::from_ref(&**self.get_node(&0));
|
||||
let document = document.downcast::<Document>().expect("Document node should be downcasted!");
|
||||
let document = document
|
||||
.downcast::<Document>()
|
||||
.expect("Document node should be downcasted!");
|
||||
match op {
|
||||
ParseOperation::GetTemplateContents { target, contents } => {
|
||||
let target = DomRoot::from_ref(&**self.get_node(&target));
|
||||
let template = target.downcast::<HTMLTemplateElement>().expect(
|
||||
"Tried to extract contents from non-template element while parsing");
|
||||
let template = target
|
||||
.downcast::<HTMLTemplateElement>()
|
||||
.expect("Tried to extract contents from non-template element while parsing");
|
||||
self.insert_node(contents, Dom::from_ref(template.Content().upcast()));
|
||||
}
|
||||
ParseOperation::CreateElement { node, name, attrs, current_line } => {
|
||||
},
|
||||
ParseOperation::CreateElement {
|
||||
node,
|
||||
name,
|
||||
attrs,
|
||||
current_line,
|
||||
} => {
|
||||
let attrs = attrs
|
||||
.into_iter()
|
||||
.map(|attr| ElementAttribute::new(attr.name, DOMString::from(attr.value)))
|
||||
|
@ -345,60 +404,85 @@ impl Tokenizer {
|
|||
attrs,
|
||||
&*self.document,
|
||||
ElementCreator::ParserCreated(current_line),
|
||||
ParsingAlgorithm::Normal
|
||||
ParsingAlgorithm::Normal,
|
||||
);
|
||||
self.insert_node(node, Dom::from_ref(element.upcast()));
|
||||
}
|
||||
},
|
||||
ParseOperation::CreateComment { text, node } => {
|
||||
let comment = Comment::new(DOMString::from(text), document);
|
||||
self.insert_node(node, Dom::from_ref(&comment.upcast()));
|
||||
}
|
||||
},
|
||||
ParseOperation::AppendBeforeSibling { sibling, node } => {
|
||||
self.append_before_sibling(sibling, node);
|
||||
}
|
||||
},
|
||||
ParseOperation::Append { parent, node } => {
|
||||
self.append(parent, node);
|
||||
}
|
||||
ParseOperation::AppendBasedOnParentNode { element, prev_element, node } => {
|
||||
},
|
||||
ParseOperation::AppendBasedOnParentNode {
|
||||
element,
|
||||
prev_element,
|
||||
node,
|
||||
} => {
|
||||
if self.has_parent_node(element) {
|
||||
self.append_before_sibling(element, node);
|
||||
} else {
|
||||
self.append(prev_element, node);
|
||||
}
|
||||
}
|
||||
ParseOperation::AppendDoctypeToDocument { name, public_id, system_id } => {
|
||||
},
|
||||
ParseOperation::AppendDoctypeToDocument {
|
||||
name,
|
||||
public_id,
|
||||
system_id,
|
||||
} => {
|
||||
let doctype = DocumentType::new(
|
||||
DOMString::from(String::from(name)), Some(DOMString::from(public_id)),
|
||||
Some(DOMString::from(system_id)), document);
|
||||
DOMString::from(String::from(name)),
|
||||
Some(DOMString::from(public_id)),
|
||||
Some(DOMString::from(system_id)),
|
||||
document,
|
||||
);
|
||||
|
||||
document.upcast::<Node>().AppendChild(doctype.upcast()).expect("Appending failed");
|
||||
}
|
||||
document
|
||||
.upcast::<Node>()
|
||||
.AppendChild(doctype.upcast())
|
||||
.expect("Appending failed");
|
||||
},
|
||||
ParseOperation::AddAttrsIfMissing { target, attrs } => {
|
||||
let elem = self.get_node(&target).downcast::<Element>()
|
||||
let elem = self
|
||||
.get_node(&target)
|
||||
.downcast::<Element>()
|
||||
.expect("tried to set attrs on non-Element in HTML parsing");
|
||||
for attr in attrs {
|
||||
elem.set_attribute_from_parser(attr.name, DOMString::from(attr.value), None);
|
||||
}
|
||||
}
|
||||
},
|
||||
ParseOperation::RemoveFromParent { target } => {
|
||||
if let Some(ref parent) = self.get_node(&target).GetParentNode() {
|
||||
parent.RemoveChild(&**self.get_node(&target)).unwrap();
|
||||
}
|
||||
}
|
||||
},
|
||||
ParseOperation::MarkScriptAlreadyStarted { node } => {
|
||||
let script = self.get_node(&node).downcast::<HTMLScriptElement>();
|
||||
script.map(|script| script.set_already_started(true));
|
||||
}
|
||||
},
|
||||
ParseOperation::ReparentChildren { parent, new_parent } => {
|
||||
let parent = self.get_node(&parent);
|
||||
let new_parent = self.get_node(&new_parent);
|
||||
while let Some(child) = parent.GetFirstChild() {
|
||||
new_parent.AppendChild(&child).unwrap();
|
||||
}
|
||||
}
|
||||
ParseOperation::AssociateWithForm { target, form, element, prev_element } => {
|
||||
},
|
||||
ParseOperation::AssociateWithForm {
|
||||
target,
|
||||
form,
|
||||
element,
|
||||
prev_element,
|
||||
} => {
|
||||
let tree_node = prev_element.map_or(element, |prev| {
|
||||
if self.has_parent_node(element) { element } else { prev }
|
||||
if self.has_parent_node(element) {
|
||||
element
|
||||
} else {
|
||||
prev
|
||||
}
|
||||
});
|
||||
|
||||
if !self.same_tree(tree_node, form) {
|
||||
|
@ -416,47 +500,51 @@ impl Tokenizer {
|
|||
control.set_form_owner_from_parser(&form);
|
||||
} else {
|
||||
// TODO remove this code when keygen is implemented.
|
||||
assert_eq!(node.NodeName(), "KEYGEN", "Unknown form-associatable element");
|
||||
assert_eq!(
|
||||
node.NodeName(),
|
||||
"KEYGEN",
|
||||
"Unknown form-associatable element"
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
ParseOperation::Pop { node } => {
|
||||
vtable_for(self.get_node(&node)).pop();
|
||||
}
|
||||
},
|
||||
ParseOperation::CreatePI { node, target, data } => {
|
||||
let pi = ProcessingInstruction::new(
|
||||
DOMString::from(target),
|
||||
DOMString::from(data),
|
||||
document);
|
||||
document,
|
||||
);
|
||||
self.insert_node(node, Dom::from_ref(pi.upcast()));
|
||||
}
|
||||
},
|
||||
ParseOperation::SetQuirksMode { mode } => {
|
||||
document.set_quirks_mode(mode);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run(sink: Sink,
|
||||
fragment_context_is_some: bool,
|
||||
ctxt_parse_node: Option<ParseNode>,
|
||||
form_parse_node: Option<ParseNode>,
|
||||
sender: Sender<ToTokenizerMsg>,
|
||||
receiver: Receiver<ToHtmlTokenizerMsg>) {
|
||||
fn run(
|
||||
sink: Sink,
|
||||
fragment_context_is_some: bool,
|
||||
ctxt_parse_node: Option<ParseNode>,
|
||||
form_parse_node: Option<ParseNode>,
|
||||
sender: Sender<ToTokenizerMsg>,
|
||||
receiver: Receiver<ToHtmlTokenizerMsg>,
|
||||
) {
|
||||
let options = TreeBuilderOpts {
|
||||
ignore_missing_rules: true,
|
||||
.. Default::default()
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut html_tokenizer = if fragment_context_is_some {
|
||||
let tb = TreeBuilder::new_for_fragment(
|
||||
sink,
|
||||
ctxt_parse_node.unwrap(),
|
||||
form_parse_node,
|
||||
options);
|
||||
let tb =
|
||||
TreeBuilder::new_for_fragment(sink, ctxt_parse_node.unwrap(), form_parse_node, options);
|
||||
|
||||
let tok_options = TokenizerOpts {
|
||||
initial_state: Some(tb.tokenizer_state_for_context_elem()),
|
||||
.. Default::default()
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
HtmlTokenizer::new(tb, tok_options)
|
||||
|
@ -465,7 +553,10 @@ fn run(sink: Sink,
|
|||
};
|
||||
|
||||
loop {
|
||||
match receiver.recv().expect("Unexpected channel panic in html parser thread") {
|
||||
match receiver
|
||||
.recv()
|
||||
.expect("Unexpected channel panic in html parser thread")
|
||||
{
|
||||
ToHtmlTokenizerMsg::Feed { input } => {
|
||||
let mut input = create_buffer_queue(input);
|
||||
let res = html_tokenizer.feed(&mut input);
|
||||
|
@ -479,7 +570,10 @@ fn run(sink: Sink,
|
|||
|
||||
let res = match res {
|
||||
TokenizerResult::Done => ToTokenizerMsg::TokenizerResultDone { updated_input },
|
||||
TokenizerResult::Script(script) => ToTokenizerMsg::TokenizerResultScript { script, updated_input }
|
||||
TokenizerResult::Script(script) => ToTokenizerMsg::TokenizerResultScript {
|
||||
script,
|
||||
updated_input,
|
||||
},
|
||||
};
|
||||
sender.send(res).unwrap();
|
||||
},
|
||||
|
@ -488,7 +582,7 @@ fn run(sink: Sink,
|
|||
sender.send(ToTokenizerMsg::End).unwrap();
|
||||
break;
|
||||
},
|
||||
ToHtmlTokenizerMsg::SetPlainTextState => html_tokenizer.set_plaintext_state()
|
||||
ToHtmlTokenizerMsg::SetPlainTextState => html_tokenizer.set_plaintext_state(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -517,7 +611,7 @@ impl Sink {
|
|||
id: 0,
|
||||
qual_name: None,
|
||||
},
|
||||
sender: sender
|
||||
sender: sender,
|
||||
};
|
||||
let data = ParseNodeData::default();
|
||||
sink.insert_parse_node_data(0, data);
|
||||
|
@ -536,7 +630,9 @@ impl Sink {
|
|||
}
|
||||
|
||||
fn send_op(&self, op: ParseOperation) {
|
||||
self.sender.send(ToTokenizerMsg::ProcessOperation(op)).unwrap();
|
||||
self.sender
|
||||
.send(ToTokenizerMsg::ProcessOperation(op))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn insert_parse_node_data(&mut self, id: ParseNodeId, data: ParseNodeData) {
|
||||
|
@ -544,18 +640,24 @@ impl Sink {
|
|||
}
|
||||
|
||||
fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> &'a ParseNodeData {
|
||||
self.parse_node_data.get(id).expect("Parse Node data not found!")
|
||||
self.parse_node_data
|
||||
.get(id)
|
||||
.expect("Parse Node data not found!")
|
||||
}
|
||||
|
||||
fn get_parse_node_data_mut<'a>(&'a mut self, id: &'a ParseNodeId) -> &'a mut ParseNodeData {
|
||||
self.parse_node_data.get_mut(id).expect("Parse Node data not found!")
|
||||
self.parse_node_data
|
||||
.get_mut(id)
|
||||
.expect("Parse Node data not found!")
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unrooted_must_root)]
|
||||
impl TreeSink for Sink {
|
||||
type Output = Self;
|
||||
fn finish(self) -> Self { self }
|
||||
fn finish(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
type Handle = ParseNode;
|
||||
|
||||
|
@ -572,7 +674,10 @@ impl TreeSink for Sink {
|
|||
let data = self.get_parse_node_data_mut(&target.id);
|
||||
data.contents = Some(node.clone());
|
||||
}
|
||||
self.send_op(ParseOperation::GetTemplateContents { target: target.id, contents: node.id });
|
||||
self.send_op(ParseOperation::GetTemplateContents {
|
||||
target: target.id,
|
||||
contents: node.id,
|
||||
});
|
||||
node
|
||||
}
|
||||
|
||||
|
@ -581,38 +686,52 @@ impl TreeSink for Sink {
|
|||
}
|
||||
|
||||
fn elem_name<'a>(&self, target: &'a Self::Handle) -> ExpandedName<'a> {
|
||||
target.qual_name.as_ref().expect("Expected qual name of node!").expanded()
|
||||
target
|
||||
.qual_name
|
||||
.as_ref()
|
||||
.expect("Expected qual name of node!")
|
||||
.expanded()
|
||||
}
|
||||
|
||||
fn create_element(&mut self, name: QualName, html_attrs: Vec<HtmlAttribute>, _flags: ElementFlags)
|
||||
-> Self::Handle {
|
||||
fn create_element(
|
||||
&mut self,
|
||||
name: QualName,
|
||||
html_attrs: Vec<HtmlAttribute>,
|
||||
_flags: ElementFlags,
|
||||
) -> Self::Handle {
|
||||
let mut node = self.new_parse_node();
|
||||
node.qual_name = Some(name.clone());
|
||||
{
|
||||
let node_data = self.get_parse_node_data_mut(&node.id);
|
||||
node_data.is_integration_point = html_attrs.iter()
|
||||
.any(|attr| {
|
||||
node_data.is_integration_point = html_attrs.iter().any(|attr| {
|
||||
let attr_value = &String::from(attr.value.clone());
|
||||
(attr.name.local == local_name!("encoding") && attr.name.ns == ns!()) &&
|
||||
(attr_value.eq_ignore_ascii_case("text/html") ||
|
||||
attr_value.eq_ignore_ascii_case("application/xhtml+xml"))
|
||||
(attr_value.eq_ignore_ascii_case("text/html") ||
|
||||
attr_value.eq_ignore_ascii_case("application/xhtml+xml"))
|
||||
});
|
||||
}
|
||||
let attrs = html_attrs.into_iter()
|
||||
.map(|attr| Attribute { name: attr.name, value: String::from(attr.value) }).collect();
|
||||
let attrs = html_attrs
|
||||
.into_iter()
|
||||
.map(|attr| Attribute {
|
||||
name: attr.name,
|
||||
value: String::from(attr.value),
|
||||
}).collect();
|
||||
|
||||
self.send_op(ParseOperation::CreateElement {
|
||||
node: node.id,
|
||||
name,
|
||||
attrs,
|
||||
current_line: self.current_line
|
||||
current_line: self.current_line,
|
||||
});
|
||||
node
|
||||
}
|
||||
|
||||
fn create_comment(&mut self, text: StrTendril) -> Self::Handle {
|
||||
let node = self.new_parse_node();
|
||||
self.send_op(ParseOperation::CreateComment { text: String::from(text), node: node.id });
|
||||
self.send_op(ParseOperation::CreateComment {
|
||||
text: String::from(text),
|
||||
node: node.id,
|
||||
});
|
||||
node
|
||||
}
|
||||
|
||||
|
@ -621,7 +740,7 @@ impl TreeSink for Sink {
|
|||
self.send_op(ParseOperation::CreatePI {
|
||||
node: node.id,
|
||||
target: String::from(target),
|
||||
data: String::from(data)
|
||||
data: String::from(data),
|
||||
});
|
||||
node
|
||||
}
|
||||
|
@ -641,14 +760,19 @@ impl TreeSink for Sink {
|
|||
});
|
||||
}
|
||||
|
||||
fn append_before_sibling(&mut self,
|
||||
sibling: &Self::Handle,
|
||||
new_node: HtmlNodeOrText<Self::Handle>) {
|
||||
fn append_before_sibling(
|
||||
&mut self,
|
||||
sibling: &Self::Handle,
|
||||
new_node: HtmlNodeOrText<Self::Handle>,
|
||||
) {
|
||||
let new_node = match new_node {
|
||||
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text))
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text)),
|
||||
};
|
||||
self.send_op(ParseOperation::AppendBeforeSibling { sibling: sibling.id, node: new_node });
|
||||
self.send_op(ParseOperation::AppendBeforeSibling {
|
||||
sibling: sibling.id,
|
||||
node: new_node,
|
||||
});
|
||||
}
|
||||
|
||||
fn append_based_on_parent_node(
|
||||
|
@ -659,12 +783,12 @@ impl TreeSink for Sink {
|
|||
) {
|
||||
let child = match child {
|
||||
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text))
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text)),
|
||||
};
|
||||
self.send_op(ParseOperation::AppendBasedOnParentNode {
|
||||
element: elem.id,
|
||||
prev_element: prev_elem.id,
|
||||
node: child
|
||||
node: child,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -684,24 +808,38 @@ impl TreeSink for Sink {
|
|||
fn append(&mut self, parent: &Self::Handle, child: HtmlNodeOrText<Self::Handle>) {
|
||||
let child = match child {
|
||||
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text))
|
||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text)),
|
||||
};
|
||||
self.send_op(ParseOperation::Append { parent: parent.id, node: child });
|
||||
self.send_op(ParseOperation::Append {
|
||||
parent: parent.id,
|
||||
node: child,
|
||||
});
|
||||
}
|
||||
|
||||
fn append_doctype_to_document(&mut self, name: StrTendril, public_id: StrTendril,
|
||||
system_id: StrTendril) {
|
||||
fn append_doctype_to_document(
|
||||
&mut self,
|
||||
name: StrTendril,
|
||||
public_id: StrTendril,
|
||||
system_id: StrTendril,
|
||||
) {
|
||||
self.send_op(ParseOperation::AppendDoctypeToDocument {
|
||||
name: String::from(name),
|
||||
public_id: String::from(public_id),
|
||||
system_id: String::from(system_id)
|
||||
system_id: String::from(system_id),
|
||||
});
|
||||
}
|
||||
|
||||
fn add_attrs_if_missing(&mut self, target: &Self::Handle, html_attrs: Vec<HtmlAttribute>) {
|
||||
let attrs = html_attrs.into_iter()
|
||||
.map(|attr| Attribute { name: attr.name, value: String::from(attr.value) }).collect();
|
||||
self.send_op(ParseOperation::AddAttrsIfMissing { target: target.id, attrs });
|
||||
let attrs = html_attrs
|
||||
.into_iter()
|
||||
.map(|attr| Attribute {
|
||||
name: attr.name,
|
||||
value: String::from(attr.value),
|
||||
}).collect();
|
||||
self.send_op(ParseOperation::AddAttrsIfMissing {
|
||||
target: target.id,
|
||||
attrs,
|
||||
});
|
||||
}
|
||||
|
||||
fn remove_from_parent(&mut self, target: &Self::Handle) {
|
||||
|
@ -717,7 +855,10 @@ impl TreeSink for Sink {
|
|||
}
|
||||
|
||||
fn reparent_children(&mut self, parent: &Self::Handle, new_parent: &Self::Handle) {
|
||||
self.send_op(ParseOperation::ReparentChildren { parent: parent.id, new_parent: new_parent.id });
|
||||
self.send_op(ParseOperation::ReparentChildren {
|
||||
parent: parent.id,
|
||||
new_parent: new_parent.id,
|
||||
});
|
||||
}
|
||||
|
||||
/// <https://html.spec.whatwg.org/multipage/#html-integration-point>
|
||||
|
|
|
@ -37,11 +37,11 @@ pub struct Tokenizer {
|
|||
|
||||
impl Tokenizer {
|
||||
pub fn new(
|
||||
document: &Document,
|
||||
url: ServoUrl,
|
||||
fragment_context: Option<super::FragmentContext>,
|
||||
parsing_algorithm: ParsingAlgorithm)
|
||||
-> Self {
|
||||
document: &Document,
|
||||
url: ServoUrl,
|
||||
fragment_context: Option<super::FragmentContext>,
|
||||
parsing_algorithm: ParsingAlgorithm,
|
||||
) -> Self {
|
||||
let sink = Sink {
|
||||
base_url: url,
|
||||
document: Dom::from_ref(document),
|
||||
|
@ -52,7 +52,7 @@ impl Tokenizer {
|
|||
|
||||
let options = TreeBuilderOpts {
|
||||
ignore_missing_rules: true,
|
||||
.. Default::default()
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let inner = if let Some(fc) = fragment_context {
|
||||
|
@ -60,11 +60,12 @@ impl Tokenizer {
|
|||
sink,
|
||||
Dom::from_ref(fc.context_elem),
|
||||
fc.form_elem.map(|n| Dom::from_ref(n)),
|
||||
options);
|
||||
options,
|
||||
);
|
||||
|
||||
let tok_options = TokenizerOpts {
|
||||
initial_state: Some(tb.tokenizer_state_for_context_elem()),
|
||||
.. Default::default()
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
HtmlTokenizer::new(tb, tok_options)
|
||||
|
@ -72,9 +73,7 @@ impl Tokenizer {
|
|||
HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default())
|
||||
};
|
||||
|
||||
Tokenizer {
|
||||
inner: inner,
|
||||
}
|
||||
Tokenizer { inner: inner }
|
||||
}
|
||||
|
||||
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
|
||||
|
@ -107,7 +106,9 @@ unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>> {
|
|||
type Handle = Dom<Node>;
|
||||
#[allow(unrooted_must_root)]
|
||||
fn trace_handle(&self, node: &Dom<Node>) {
|
||||
unsafe { node.trace(self.0); }
|
||||
unsafe {
|
||||
node.trace(self.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,14 +119,15 @@ unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<Dom<Node>, Sink>> {
|
|||
}
|
||||
|
||||
fn start_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
|
||||
let name = QualName::new(None, node.namespace().clone(),
|
||||
node.local_name().clone());
|
||||
let attrs = node.attrs().iter().map(|attr| {
|
||||
let qname = QualName::new(None, attr.namespace().clone(),
|
||||
attr.local_name().clone());
|
||||
let value = attr.value().clone();
|
||||
(qname, value)
|
||||
}).collect::<Vec<_>>();
|
||||
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
|
||||
let attrs = node
|
||||
.attrs()
|
||||
.iter()
|
||||
.map(|attr| {
|
||||
let qname = QualName::new(None, attr.namespace().clone(), attr.local_name().clone());
|
||||
let value = attr.value().clone();
|
||||
(qname, value)
|
||||
}).collect::<Vec<_>>();
|
||||
let attr_refs = attrs.iter().map(|&(ref qname, ref value)| {
|
||||
let ar: AttrRef = (&qname, &**value);
|
||||
ar
|
||||
|
@ -135,12 +137,10 @@ fn start_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Resul
|
|||
}
|
||||
|
||||
fn end_element<S: Serializer>(node: &Element, serializer: &mut S) -> io::Result<()> {
|
||||
let name = QualName::new(None, node.namespace().clone(),
|
||||
node.local_name().clone());
|
||||
let name = QualName::new(None, node.namespace().clone(), node.local_name().clone());
|
||||
serializer.end_elem(name)
|
||||
}
|
||||
|
||||
|
||||
enum SerializationCommand {
|
||||
OpenElement(DomRoot<Element>),
|
||||
CloseElement(DomRoot<Element>),
|
||||
|
@ -151,7 +151,7 @@ struct SerializationIterator {
|
|||
stack: Vec<SerializationCommand>,
|
||||
}
|
||||
|
||||
fn rev_children_iter(n: &Node) -> impl Iterator<Item=DomRoot<Node>>{
|
||||
fn rev_children_iter(n: &Node) -> impl Iterator<Item = DomRoot<Node>> {
|
||||
match n.downcast::<HTMLTemplateElement>() {
|
||||
Some(t) => t.Content().upcast::<Node>().rev_children(),
|
||||
None => n.rev_children(),
|
||||
|
@ -160,9 +160,7 @@ fn rev_children_iter(n: &Node) -> impl Iterator<Item=DomRoot<Node>>{
|
|||
|
||||
impl SerializationIterator {
|
||||
fn new(node: &Node, skip_first: bool) -> SerializationIterator {
|
||||
let mut ret = SerializationIterator {
|
||||
stack: vec![],
|
||||
};
|
||||
let mut ret = SerializationIterator { stack: vec![] };
|
||||
if skip_first {
|
||||
for c in rev_children_iter(node) {
|
||||
ret.push_node(&*c);
|
||||
|
@ -175,8 +173,12 @@ impl SerializationIterator {
|
|||
|
||||
fn push_node(&mut self, n: &Node) {
|
||||
match n.downcast::<Element>() {
|
||||
Some(e) => self.stack.push(SerializationCommand::OpenElement(DomRoot::from_ref(e))),
|
||||
None => self.stack.push(SerializationCommand::SerializeNonelement(DomRoot::from_ref(n))),
|
||||
Some(e) => self
|
||||
.stack
|
||||
.push(SerializationCommand::OpenElement(DomRoot::from_ref(e))),
|
||||
None => self.stack.push(SerializationCommand::SerializeNonelement(
|
||||
DomRoot::from_ref(n),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -188,7 +190,8 @@ impl Iterator for SerializationIterator {
|
|||
let res = self.stack.pop();
|
||||
|
||||
if let Some(SerializationCommand::OpenElement(ref e)) = res {
|
||||
self.stack.push(SerializationCommand::CloseElement(e.clone()));
|
||||
self.stack
|
||||
.push(SerializationCommand::CloseElement(e.clone()));
|
||||
for c in rev_children_iter(&*e.upcast::<Node>()) {
|
||||
self.push_node(&c);
|
||||
}
|
||||
|
@ -199,52 +202,52 @@ impl Iterator for SerializationIterator {
|
|||
}
|
||||
|
||||
impl<'a> Serialize for &'a Node {
|
||||
fn serialize<S: Serializer>(&self, serializer: &mut S,
|
||||
traversal_scope: TraversalScope) -> io::Result<()> {
|
||||
fn serialize<S: Serializer>(
|
||||
&self,
|
||||
serializer: &mut S,
|
||||
traversal_scope: TraversalScope,
|
||||
) -> io::Result<()> {
|
||||
let node = *self;
|
||||
|
||||
|
||||
let iter = SerializationIterator::new(node, traversal_scope != IncludeNode);
|
||||
|
||||
for cmd in iter {
|
||||
match cmd {
|
||||
SerializationCommand::OpenElement(n) => {
|
||||
start_element(&n, serializer)?;
|
||||
}
|
||||
},
|
||||
|
||||
SerializationCommand::CloseElement(n) => {
|
||||
end_element(&&n, serializer)?;
|
||||
}
|
||||
},
|
||||
|
||||
SerializationCommand::SerializeNonelement(n) => {
|
||||
match n.type_id() {
|
||||
NodeTypeId::DocumentType => {
|
||||
let doctype = n.downcast::<DocumentType>().unwrap();
|
||||
serializer.write_doctype(&doctype.name())?;
|
||||
},
|
||||
SerializationCommand::SerializeNonelement(n) => match n.type_id() {
|
||||
NodeTypeId::DocumentType => {
|
||||
let doctype = n.downcast::<DocumentType>().unwrap();
|
||||
serializer.write_doctype(&doctype.name())?;
|
||||
},
|
||||
|
||||
NodeTypeId::CharacterData(CharacterDataTypeId::Text) => {
|
||||
let cdata = n.downcast::<CharacterData>().unwrap();
|
||||
serializer.write_text(&cdata.data())?;
|
||||
},
|
||||
NodeTypeId::CharacterData(CharacterDataTypeId::Text) => {
|
||||
let cdata = n.downcast::<CharacterData>().unwrap();
|
||||
serializer.write_text(&cdata.data())?;
|
||||
},
|
||||
|
||||
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) => {
|
||||
let cdata = n.downcast::<CharacterData>().unwrap();
|
||||
serializer.write_comment(&cdata.data())?;
|
||||
},
|
||||
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) => {
|
||||
let cdata = n.downcast::<CharacterData>().unwrap();
|
||||
serializer.write_comment(&cdata.data())?;
|
||||
},
|
||||
|
||||
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) => {
|
||||
let pi = n.downcast::<ProcessingInstruction>().unwrap();
|
||||
let data = pi.upcast::<CharacterData>().data();
|
||||
serializer.write_processing_instruction(&pi.target(), &data)?;
|
||||
},
|
||||
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) => {
|
||||
let pi = n.downcast::<ProcessingInstruction>().unwrap();
|
||||
let data = pi.upcast::<CharacterData>().data();
|
||||
serializer.write_processing_instruction(&pi.target(), &data)?;
|
||||
},
|
||||
|
||||
NodeTypeId::DocumentFragment => {}
|
||||
NodeTypeId::DocumentFragment => {},
|
||||
|
||||
NodeTypeId::Document(_) => panic!("Can't serialize Document node itself"),
|
||||
NodeTypeId::Element(_) => panic!("Element shouldn't appear here"),
|
||||
}
|
||||
}
|
||||
NodeTypeId::Document(_) => panic!("Can't serialize Document node itself"),
|
||||
NodeTypeId::Element(_) => panic!("Element shouldn't appear here"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ enum LastChunkState {
|
|||
|
||||
pub struct ElementAttribute {
|
||||
name: QualName,
|
||||
value: DOMString
|
||||
value: DOMString,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
|
||||
|
@ -117,7 +117,7 @@ impl ElementAttribute {
|
|||
pub fn new(name: QualName, value: DOMString) -> ElementAttribute {
|
||||
ElementAttribute {
|
||||
name: name,
|
||||
value: value
|
||||
value: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -128,49 +128,70 @@ impl ServoParser {
|
|||
}
|
||||
|
||||
pub fn parse_html_document(document: &Document, input: DOMString, url: ServoUrl) {
|
||||
let parser = if PREFS.get("dom.servoparser.async_html_tokenizer.enabled").as_boolean().unwrap() {
|
||||
ServoParser::new(document,
|
||||
Tokenizer::AsyncHtml(self::async_html::Tokenizer::new(document, url, None)),
|
||||
LastChunkState::NotReceived,
|
||||
ParserKind::Normal)
|
||||
let parser = if PREFS
|
||||
.get("dom.servoparser.async_html_tokenizer.enabled")
|
||||
.as_boolean()
|
||||
.unwrap()
|
||||
{
|
||||
ServoParser::new(
|
||||
document,
|
||||
Tokenizer::AsyncHtml(self::async_html::Tokenizer::new(document, url, None)),
|
||||
LastChunkState::NotReceived,
|
||||
ParserKind::Normal,
|
||||
)
|
||||
} else {
|
||||
ServoParser::new(document,
|
||||
Tokenizer::Html(self::html::Tokenizer::new(document, url, None, ParsingAlgorithm::Normal)),
|
||||
LastChunkState::NotReceived,
|
||||
ParserKind::Normal)
|
||||
ServoParser::new(
|
||||
document,
|
||||
Tokenizer::Html(self::html::Tokenizer::new(
|
||||
document,
|
||||
url,
|
||||
None,
|
||||
ParsingAlgorithm::Normal,
|
||||
)),
|
||||
LastChunkState::NotReceived,
|
||||
ParserKind::Normal,
|
||||
)
|
||||
};
|
||||
parser.parse_string_chunk(String::from(input));
|
||||
}
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/#parsing-html-fragments
|
||||
pub fn parse_html_fragment(context: &Element, input: DOMString) -> impl Iterator<Item=DomRoot<Node>> {
|
||||
pub fn parse_html_fragment(
|
||||
context: &Element,
|
||||
input: DOMString,
|
||||
) -> impl Iterator<Item = DomRoot<Node>> {
|
||||
let context_node = context.upcast::<Node>();
|
||||
let context_document = context_node.owner_doc();
|
||||
let window = context_document.window();
|
||||
let url = context_document.url();
|
||||
|
||||
// Step 1.
|
||||
let loader = DocumentLoader::new_with_threads(context_document.loader().resource_threads().clone(),
|
||||
Some(url.clone()));
|
||||
let document = Document::new(window,
|
||||
HasBrowsingContext::No,
|
||||
Some(url.clone()),
|
||||
context_document.origin().clone(),
|
||||
IsHTMLDocument::HTMLDocument,
|
||||
None,
|
||||
None,
|
||||
DocumentActivity::Inactive,
|
||||
DocumentSource::FromParser,
|
||||
loader,
|
||||
None,
|
||||
None,
|
||||
Default::default());
|
||||
let loader = DocumentLoader::new_with_threads(
|
||||
context_document.loader().resource_threads().clone(),
|
||||
Some(url.clone()),
|
||||
);
|
||||
let document = Document::new(
|
||||
window,
|
||||
HasBrowsingContext::No,
|
||||
Some(url.clone()),
|
||||
context_document.origin().clone(),
|
||||
IsHTMLDocument::HTMLDocument,
|
||||
None,
|
||||
None,
|
||||
DocumentActivity::Inactive,
|
||||
DocumentSource::FromParser,
|
||||
loader,
|
||||
None,
|
||||
None,
|
||||
Default::default(),
|
||||
);
|
||||
|
||||
// Step 2.
|
||||
document.set_quirks_mode(context_document.quirks_mode());
|
||||
|
||||
// Step 11.
|
||||
let form = context_node.inclusive_ancestors()
|
||||
let form = context_node
|
||||
.inclusive_ancestors()
|
||||
.find(|element| element.is::<HTMLFormElement>());
|
||||
|
||||
let fragment_context = FragmentContext {
|
||||
|
@ -178,13 +199,17 @@ impl ServoParser {
|
|||
form_elem: form.r(),
|
||||
};
|
||||
|
||||
let parser = ServoParser::new(&document,
|
||||
Tokenizer::Html(self::html::Tokenizer::new(&document,
|
||||
url,
|
||||
Some(fragment_context),
|
||||
ParsingAlgorithm::Fragment)),
|
||||
LastChunkState::Received,
|
||||
ParserKind::Normal);
|
||||
let parser = ServoParser::new(
|
||||
&document,
|
||||
Tokenizer::Html(self::html::Tokenizer::new(
|
||||
&document,
|
||||
url,
|
||||
Some(fragment_context),
|
||||
ParsingAlgorithm::Fragment,
|
||||
)),
|
||||
LastChunkState::Received,
|
||||
ParserKind::Normal,
|
||||
);
|
||||
parser.parse_string_chunk(String::from(input));
|
||||
|
||||
// Step 14.
|
||||
|
@ -214,10 +239,12 @@ impl ServoParser {
|
|||
}
|
||||
|
||||
pub fn parse_xml_document(document: &Document, input: DOMString, url: ServoUrl) {
|
||||
let parser = ServoParser::new(document,
|
||||
Tokenizer::Xml(self::xml::Tokenizer::new(document, url)),
|
||||
LastChunkState::NotReceived,
|
||||
ParserKind::Normal);
|
||||
let parser = ServoParser::new(
|
||||
document,
|
||||
Tokenizer::Xml(self::xml::Tokenizer::new(document, url)),
|
||||
LastChunkState::NotReceived,
|
||||
ParserKind::Normal,
|
||||
);
|
||||
parser.parse_string_chunk(String::from(input));
|
||||
}
|
||||
|
||||
|
@ -243,12 +270,18 @@ impl ServoParser {
|
|||
/// ^
|
||||
/// insertion point
|
||||
/// ```
|
||||
pub fn resume_with_pending_parsing_blocking_script(&self, script: &HTMLScriptElement, result: ScriptResult) {
|
||||
pub fn resume_with_pending_parsing_blocking_script(
|
||||
&self,
|
||||
script: &HTMLScriptElement,
|
||||
result: ScriptResult,
|
||||
) {
|
||||
assert!(self.suspended.get());
|
||||
self.suspended.set(false);
|
||||
|
||||
mem::swap(&mut *self.script_input.borrow_mut(),
|
||||
&mut *self.network_input.borrow_mut());
|
||||
mem::swap(
|
||||
&mut *self.script_input.borrow_mut(),
|
||||
&mut *self.network_input.borrow_mut(),
|
||||
);
|
||||
while let Some(chunk) = self.script_input.borrow_mut().pop_front() {
|
||||
self.network_input.borrow_mut().push_back(chunk);
|
||||
}
|
||||
|
@ -278,7 +311,9 @@ impl ServoParser {
|
|||
// parser is suspended, we just append everything to the
|
||||
// script input and abort these steps.
|
||||
for chunk in text {
|
||||
self.script_input.borrow_mut().push_back(String::from(chunk).into());
|
||||
self.script_input
|
||||
.borrow_mut()
|
||||
.push_back(String::from(chunk).into());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -334,14 +369,16 @@ impl ServoParser {
|
|||
*self.network_input.borrow_mut() = BufferQueue::new();
|
||||
|
||||
// Step 2.
|
||||
self.document.set_ready_state(DocumentReadyState::Interactive);
|
||||
self.document
|
||||
.set_ready_state(DocumentReadyState::Interactive);
|
||||
|
||||
// Step 3.
|
||||
self.tokenizer.borrow_mut().end();
|
||||
self.document.set_current_parser(None);
|
||||
|
||||
// Step 4.
|
||||
self.document.set_ready_state(DocumentReadyState::Interactive);
|
||||
self.document
|
||||
.set_ready_state(DocumentReadyState::Interactive);
|
||||
}
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/#active-parser
|
||||
|
@ -350,11 +387,12 @@ impl ServoParser {
|
|||
}
|
||||
|
||||
#[allow(unrooted_must_root)]
|
||||
fn new_inherited(document: &Document,
|
||||
tokenizer: Tokenizer,
|
||||
last_chunk_state: LastChunkState,
|
||||
kind: ParserKind)
|
||||
-> Self {
|
||||
fn new_inherited(
|
||||
document: &Document,
|
||||
tokenizer: Tokenizer,
|
||||
last_chunk_state: LastChunkState,
|
||||
kind: ParserKind,
|
||||
) -> Self {
|
||||
ServoParser {
|
||||
reflector: Reflector::new(),
|
||||
document: Dom::from_ref(document),
|
||||
|
@ -371,14 +409,22 @@ impl ServoParser {
|
|||
}
|
||||
|
||||
#[allow(unrooted_must_root)]
|
||||
fn new(document: &Document,
|
||||
tokenizer: Tokenizer,
|
||||
last_chunk_state: LastChunkState,
|
||||
kind: ParserKind)
|
||||
-> DomRoot<Self> {
|
||||
reflect_dom_object(Box::new(ServoParser::new_inherited(document, tokenizer, last_chunk_state, kind)),
|
||||
document.window(),
|
||||
ServoParserBinding::Wrap)
|
||||
fn new(
|
||||
document: &Document,
|
||||
tokenizer: Tokenizer,
|
||||
last_chunk_state: LastChunkState,
|
||||
kind: ParserKind,
|
||||
) -> DomRoot<Self> {
|
||||
reflect_dom_object(
|
||||
Box::new(ServoParser::new_inherited(
|
||||
document,
|
||||
tokenizer,
|
||||
last_chunk_state,
|
||||
kind,
|
||||
)),
|
||||
document.window(),
|
||||
ServoParserBinding::Wrap,
|
||||
)
|
||||
}
|
||||
|
||||
fn push_bytes_input_chunk(&self, chunk: Vec<u8>) {
|
||||
|
@ -391,11 +437,9 @@ impl ServoParser {
|
|||
match result {
|
||||
Err(()) => {
|
||||
*incomplete_utf8 = Some(incomplete);
|
||||
return
|
||||
}
|
||||
Ok(remaining) => {
|
||||
chunk = remaining
|
||||
}
|
||||
return;
|
||||
},
|
||||
Ok(remaining) => chunk = remaining,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -413,10 +457,16 @@ impl ServoParser {
|
|||
incremental: TimerMetadataReflowType::FirstReflow,
|
||||
};
|
||||
let profiler_category = self.tokenizer.borrow().profiler_category();
|
||||
profile(profiler_category,
|
||||
Some(metadata),
|
||||
self.document.window().upcast::<GlobalScope>().time_profiler_chan().clone(),
|
||||
|| self.do_parse_sync())
|
||||
profile(
|
||||
profiler_category,
|
||||
Some(metadata),
|
||||
self.document
|
||||
.window()
|
||||
.upcast::<GlobalScope>()
|
||||
.time_profiler_chan()
|
||||
.clone(),
|
||||
|| self.do_parse_sync(),
|
||||
)
|
||||
}
|
||||
|
||||
fn do_parse_sync(&self) {
|
||||
|
@ -427,7 +477,9 @@ impl ServoParser {
|
|||
|
||||
if self.last_chunk_received.get() {
|
||||
if let Some(_) = self.incomplete_utf8.borrow_mut().take() {
|
||||
self.network_input.borrow_mut().push_back(StrTendril::from("\u{FFFD}"))
|
||||
self.network_input
|
||||
.borrow_mut()
|
||||
.push_back(StrTendril::from("\u{FFFD}"))
|
||||
}
|
||||
}
|
||||
self.tokenize(|tokenizer| tokenizer.feed(&mut *self.network_input.borrow_mut()));
|
||||
|
@ -460,7 +512,8 @@ impl ServoParser {
|
|||
}
|
||||
|
||||
fn tokenize<F>(&self, mut feed: F)
|
||||
where F: FnMut(&mut Tokenizer) -> Result<(), DomRoot<HTMLScriptElement>>,
|
||||
where
|
||||
F: FnMut(&mut Tokenizer) -> Result<(), DomRoot<HTMLScriptElement>>,
|
||||
{
|
||||
loop {
|
||||
assert!(!self.suspended.get());
|
||||
|
@ -497,7 +550,8 @@ impl ServoParser {
|
|||
assert!(self.incomplete_utf8.borrow().is_none());
|
||||
|
||||
// Step 1.
|
||||
self.document.set_ready_state(DocumentReadyState::Interactive);
|
||||
self.document
|
||||
.set_ready_state(DocumentReadyState::Interactive);
|
||||
|
||||
// Step 2.
|
||||
self.tokenizer.borrow_mut().end();
|
||||
|
@ -510,13 +564,15 @@ impl ServoParser {
|
|||
}
|
||||
|
||||
struct FragmentParsingResult<I>
|
||||
where I: Iterator<Item=DomRoot<Node>>
|
||||
where
|
||||
I: Iterator<Item = DomRoot<Node>>,
|
||||
{
|
||||
inner: I,
|
||||
}
|
||||
|
||||
impl<I> Iterator for FragmentParsingResult<I>
|
||||
where I: Iterator<Item=DomRoot<Node>>
|
||||
where
|
||||
I: Iterator<Item = DomRoot<Node>>,
|
||||
{
|
||||
type Item = DomRoot<Node>;
|
||||
|
||||
|
@ -621,12 +677,10 @@ impl FetchResponseListener for ParserContext {
|
|||
let mut ssl_error = None;
|
||||
let mut network_error = None;
|
||||
let metadata = match meta_result {
|
||||
Ok(meta) => {
|
||||
Some(match meta {
|
||||
FetchMetadata::Unfiltered(m) => m,
|
||||
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
|
||||
})
|
||||
},
|
||||
Ok(meta) => Some(match meta {
|
||||
FetchMetadata::Unfiltered(m) => m,
|
||||
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
|
||||
}),
|
||||
Err(NetworkError::SslValidation(url, reason)) => {
|
||||
ssl_error = Some(reason);
|
||||
let mut meta = Metadata::default(url);
|
||||
|
@ -643,7 +697,10 @@ impl FetchResponseListener for ParserContext {
|
|||
},
|
||||
Err(_) => None,
|
||||
};
|
||||
let content_type = metadata.clone().and_then(|meta| meta.content_type).map(Serde::into_inner);
|
||||
let content_type = metadata
|
||||
.clone()
|
||||
.and_then(|meta| meta.content_type)
|
||||
.map(Serde::into_inner);
|
||||
let parser = match ScriptThread::page_headers_available(&self.id, metadata) {
|
||||
Some(parser) => parser,
|
||||
None => return,
|
||||
|
@ -795,14 +852,16 @@ impl Sink {
|
|||
}
|
||||
|
||||
fn has_parent_node(&self, node: &Dom<Node>) -> bool {
|
||||
node.GetParentNode().is_some()
|
||||
node.GetParentNode().is_some()
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unrooted_must_root)] // FIXME: really?
|
||||
#[allow(unrooted_must_root)] // FIXME: really?
|
||||
impl TreeSink for Sink {
|
||||
type Output = Self;
|
||||
fn finish(self) -> Self { self }
|
||||
fn finish(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
type Handle = Dom<Node>;
|
||||
|
||||
|
@ -811,7 +870,8 @@ impl TreeSink for Sink {
|
|||
}
|
||||
|
||||
fn get_template_contents(&mut self, target: &Dom<Node>) -> Dom<Node> {
|
||||
let template = target.downcast::<HTMLTemplateElement>()
|
||||
let template = target
|
||||
.downcast::<HTMLTemplateElement>()
|
||||
.expect("tried to get template contents of non-HTMLTemplateElement in HTML parsing");
|
||||
Dom::from_ref(template.Content().upcast())
|
||||
}
|
||||
|
@ -821,7 +881,8 @@ impl TreeSink for Sink {
|
|||
}
|
||||
|
||||
fn elem_name<'a>(&self, target: &'a Dom<Node>) -> ExpandedName<'a> {
|
||||
let elem = target.downcast::<Element>()
|
||||
let elem = target
|
||||
.downcast::<Element>()
|
||||
.expect("tried to get name of non-Element in HTML parsing");
|
||||
ExpandedName {
|
||||
ns: elem.namespace(),
|
||||
|
@ -829,8 +890,12 @@ impl TreeSink for Sink {
|
|||
}
|
||||
}
|
||||
|
||||
fn create_element(&mut self, name: QualName, attrs: Vec<Attribute>, _flags: ElementFlags)
|
||||
-> Dom<Node> {
|
||||
fn create_element(
|
||||
&mut self,
|
||||
name: QualName,
|
||||
attrs: Vec<Attribute>,
|
||||
_flags: ElementFlags,
|
||||
) -> Dom<Node> {
|
||||
let attrs = attrs
|
||||
.into_iter()
|
||||
.map(|attr| ElementAttribute::new(attr.name, DOMString::from(String::from(attr.value))))
|
||||
|
@ -853,15 +918,26 @@ impl TreeSink for Sink {
|
|||
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Dom<Node> {
|
||||
let doc = &*self.document;
|
||||
let pi = ProcessingInstruction::new(
|
||||
DOMString::from(String::from(target)), DOMString::from(String::from(data)),
|
||||
doc);
|
||||
DOMString::from(String::from(target)),
|
||||
DOMString::from(String::from(data)),
|
||||
doc,
|
||||
);
|
||||
Dom::from_ref(pi.upcast())
|
||||
}
|
||||
|
||||
fn associate_with_form(&mut self, target: &Dom<Node>, form: &Dom<Node>, nodes: (&Dom<Node>, Option<&Dom<Node>>)) {
|
||||
fn associate_with_form(
|
||||
&mut self,
|
||||
target: &Dom<Node>,
|
||||
form: &Dom<Node>,
|
||||
nodes: (&Dom<Node>, Option<&Dom<Node>>),
|
||||
) {
|
||||
let (element, prev_element) = nodes;
|
||||
let tree_node = prev_element.map_or(element, |prev| {
|
||||
if self.has_parent_node(element) { element } else { prev }
|
||||
if self.has_parent_node(element) {
|
||||
element
|
||||
} else {
|
||||
prev
|
||||
}
|
||||
});
|
||||
if !self.same_tree(tree_node, form) {
|
||||
return;
|
||||
|
@ -878,14 +954,17 @@ impl TreeSink for Sink {
|
|||
control.set_form_owner_from_parser(&form);
|
||||
} else {
|
||||
// TODO remove this code when keygen is implemented.
|
||||
assert_eq!(node.NodeName(), "KEYGEN", "Unknown form-associatable element");
|
||||
assert_eq!(
|
||||
node.NodeName(),
|
||||
"KEYGEN",
|
||||
"Unknown form-associatable element"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_before_sibling(&mut self,
|
||||
sibling: &Dom<Node>,
|
||||
new_node: NodeOrText<Dom<Node>>) {
|
||||
let parent = sibling.GetParentNode()
|
||||
fn append_before_sibling(&mut self, sibling: &Dom<Node>, new_node: NodeOrText<Dom<Node>>) {
|
||||
let parent = sibling
|
||||
.GetParentNode()
|
||||
.expect("append_before_sibling called on node without parent");
|
||||
|
||||
insert(&parent, Some(&*sibling), new_node);
|
||||
|
@ -921,20 +1000,34 @@ impl TreeSink for Sink {
|
|||
}
|
||||
}
|
||||
|
||||
fn append_doctype_to_document(&mut self, name: StrTendril, public_id: StrTendril,
|
||||
system_id: StrTendril) {
|
||||
fn append_doctype_to_document(
|
||||
&mut self,
|
||||
name: StrTendril,
|
||||
public_id: StrTendril,
|
||||
system_id: StrTendril,
|
||||
) {
|
||||
let doc = &*self.document;
|
||||
let doctype = DocumentType::new(
|
||||
DOMString::from(String::from(name)), Some(DOMString::from(String::from(public_id))),
|
||||
Some(DOMString::from(String::from(system_id))), doc);
|
||||
doc.upcast::<Node>().AppendChild(doctype.upcast()).expect("Appending failed");
|
||||
DOMString::from(String::from(name)),
|
||||
Some(DOMString::from(String::from(public_id))),
|
||||
Some(DOMString::from(String::from(system_id))),
|
||||
doc,
|
||||
);
|
||||
doc.upcast::<Node>()
|
||||
.AppendChild(doctype.upcast())
|
||||
.expect("Appending failed");
|
||||
}
|
||||
|
||||
fn add_attrs_if_missing(&mut self, target: &Dom<Node>, attrs: Vec<Attribute>) {
|
||||
let elem = target.downcast::<Element>()
|
||||
let elem = target
|
||||
.downcast::<Element>()
|
||||
.expect("tried to set attrs on non-Element in HTML parsing");
|
||||
for attr in attrs {
|
||||
elem.set_attribute_from_parser(attr.name, DOMString::from(String::from(attr.value)), None);
|
||||
elem.set_attribute_from_parser(
|
||||
attr.name,
|
||||
DOMString::from(String::from(attr.value)),
|
||||
None,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -968,10 +1061,11 @@ impl TreeSink for Sink {
|
|||
/// Specifically, the <annotation-xml> cases.
|
||||
fn is_mathml_annotation_xml_integration_point(&self, handle: &Dom<Node>) -> bool {
|
||||
let elem = handle.downcast::<Element>().unwrap();
|
||||
elem.get_attribute(&ns!(), &local_name!("encoding")).map_or(false, |attr| {
|
||||
attr.value().eq_ignore_ascii_case("text/html")
|
||||
|| attr.value().eq_ignore_ascii_case("application/xhtml+xml")
|
||||
})
|
||||
elem.get_attribute(&ns!(), &local_name!("encoding"))
|
||||
.map_or(false, |attr| {
|
||||
attr.value().eq_ignore_ascii_case("text/html") ||
|
||||
attr.value().eq_ignore_ascii_case("application/xhtml+xml")
|
||||
})
|
||||
}
|
||||
|
||||
fn set_current_line(&mut self, line_number: u64) {
|
||||
|
@ -993,7 +1087,8 @@ fn create_element_for_token(
|
|||
parsing_algorithm: ParsingAlgorithm,
|
||||
) -> DomRoot<Element> {
|
||||
// Step 3.
|
||||
let is = attrs.iter()
|
||||
let is = attrs
|
||||
.iter()
|
||||
.find(|attr| attr.name.local.eq_str_ignore_ascii_case("is"))
|
||||
.map(|attr| LocalName::from(&*attr.value));
|
||||
|
||||
|
@ -1001,7 +1096,8 @@ fn create_element_for_token(
|
|||
let definition = document.lookup_custom_element_definition(&name.ns, &name.local, is.as_ref());
|
||||
|
||||
// Step 5.
|
||||
let will_execute_script = definition.is_some() && parsing_algorithm != ParsingAlgorithm::Fragment;
|
||||
let will_execute_script =
|
||||
definition.is_some() && parsing_algorithm != ParsingAlgorithm::Fragment;
|
||||
|
||||
// Step 6.
|
||||
if will_execute_script {
|
||||
|
@ -1009,7 +1105,10 @@ fn create_element_for_token(
|
|||
document.increment_throw_on_dynamic_markup_insertion_counter();
|
||||
// Step 6.2
|
||||
if is_execution_stack_empty() {
|
||||
document.window().upcast::<GlobalScope>().perform_a_microtask_checkpoint();
|
||||
document
|
||||
.window()
|
||||
.upcast::<GlobalScope>()
|
||||
.perform_a_microtask_checkpoint();
|
||||
}
|
||||
// Step 6.3
|
||||
ScriptThread::push_new_element_queue()
|
||||
|
|
|
@ -36,9 +36,7 @@ impl Tokenizer {
|
|||
let tb = XmlTreeBuilder::new(sink, Default::default());
|
||||
let tok = XmlTokenizer::new(tb, Default::default());
|
||||
|
||||
Tokenizer {
|
||||
inner: tok,
|
||||
}
|
||||
Tokenizer { inner: tok }
|
||||
}
|
||||
|
||||
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
|
||||
|
@ -77,7 +75,9 @@ unsafe impl JSTraceable for XmlTokenizer<XmlTreeBuilder<Dom<Node>, Sink>> {
|
|||
type Handle = Dom<Node>;
|
||||
#[allow(unrooted_must_root)]
|
||||
fn trace_handle(&self, node: &Dom<Node>) {
|
||||
unsafe { node.trace(self.0); }
|
||||
unsafe {
|
||||
node.trace(self.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue