mirror of
https://github.com/servo/servo.git
synced 2025-06-06 16:45:39 +00:00
Support HTML parser reentrancy (#32820)
* Update parser interface for reentrancy. Signed-off-by: Josh Matthews <josh@joshmatthews.net> * Remove assertions around invoking scripts with active parser. Signed-off-by: Josh Matthews <josh@joshmatthews.net> * Add regression test. Signed-off-by: Josh Matthews <josh@joshmatthews.net> * Run test with normal and async html parser. Signed-off-by: Josh Matthews <josh@joshmatthews.net> --------- Signed-off-by: Josh Matthews <josh@joshmatthews.net>
This commit is contained in:
parent
d44c0f7e5d
commit
4df7a1af25
13 changed files with 248 additions and 211 deletions
40
Cargo.lock
generated
40
Cargo.lock
generated
|
@ -1300,7 +1300,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive_common"
|
name = "derive_common"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
|
@ -1468,7 +1468,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dom"
|
name = "dom"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
]
|
]
|
||||||
|
@ -2852,9 +2852,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "html5ever"
|
name = "html5ever"
|
||||||
version = "0.27.0"
|
version = "0.28.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4"
|
checksum = "0ff6858c1f7e2a470c5403091866fa95b36fe0dbac5d771f932c15e5ff1ee501"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"mac",
|
"mac",
|
||||||
|
@ -4073,7 +4073,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "malloc_size_of"
|
name = "malloc_size_of"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"accountable-refcell",
|
"accountable-refcell",
|
||||||
"app_units",
|
"app_units",
|
||||||
|
@ -4123,9 +4123,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "markup5ever"
|
name = "markup5ever"
|
||||||
version = "0.12.1"
|
version = "0.13.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45"
|
checksum = "d581ff8be69d08a2efa23a959d81aa22b739073f749f067348bd4f4ba4b69195"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"phf 0.11.2",
|
"phf 0.11.2",
|
||||||
|
@ -5756,7 +5756,7 @@ checksum = "0495e4577c672de8254beb68d01a9b62d0e8a13c099edecdbedccce3223cd29f"
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "selectors"
|
name = "selectors"
|
||||||
version = "0.24.0"
|
version = "0.24.0"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
"cssparser",
|
"cssparser",
|
||||||
|
@ -6070,7 +6070,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "servo_arc"
|
name = "servo_arc"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"stable_deref_trait",
|
"stable_deref_trait",
|
||||||
|
@ -6079,7 +6079,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "servo_atoms"
|
name = "servo_atoms"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"string_cache",
|
"string_cache",
|
||||||
"string_cache_codegen",
|
"string_cache_codegen",
|
||||||
|
@ -6277,7 +6277,7 @@ checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "size_of_test"
|
name = "size_of_test"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"static_assertions",
|
"static_assertions",
|
||||||
]
|
]
|
||||||
|
@ -6418,7 +6418,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "static_prefs"
|
name = "static_prefs"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strck"
|
name = "strck"
|
||||||
|
@ -6471,7 +6471,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "style"
|
name = "style"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"app_units",
|
"app_units",
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
|
@ -6530,7 +6530,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "style_config"
|
name = "style_config"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
]
|
]
|
||||||
|
@ -6538,7 +6538,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "style_derive"
|
name = "style_derive"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"derive_common",
|
"derive_common",
|
||||||
|
@ -6569,7 +6569,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "style_traits"
|
name = "style_traits"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"app_units",
|
"app_units",
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
|
@ -6918,7 +6918,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "to_shmem"
|
name = "to_shmem"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cssparser",
|
"cssparser",
|
||||||
"servo_arc",
|
"servo_arc",
|
||||||
|
@ -6931,7 +6931,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "to_shmem_derive"
|
name = "to_shmem_derive"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14"
|
source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"derive_common",
|
"derive_common",
|
||||||
|
@ -8431,9 +8431,9 @@ checksum = "539a77ee7c0de333dcc6da69b177380a0b81e0dacfa4f7344c465a36871ee601"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xml5ever"
|
name = "xml5ever"
|
||||||
version = "0.18.1"
|
version = "0.19.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9bbb26405d8e919bc1547a5aa9abc95cbfa438f04844f5fdd9dc7596b748bf69"
|
checksum = "d7b906d34d867d216b2d79fb0e9470aaa7f4948ea86b44c27846efedd596076c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"mac",
|
"mac",
|
||||||
|
|
|
@ -58,7 +58,7 @@ gstreamer-gl-sys = "0.22"
|
||||||
gstreamer-sys = "0.22"
|
gstreamer-sys = "0.22"
|
||||||
gstreamer-video = "0.22"
|
gstreamer-video = "0.22"
|
||||||
headers = "0.3"
|
headers = "0.3"
|
||||||
html5ever = "0.27"
|
html5ever = "0.28"
|
||||||
http = "0.2"
|
http = "0.2"
|
||||||
hyper = "0.14"
|
hyper = "0.14"
|
||||||
hyper-rustls = { version = "0.24", default-features = false, features = ["acceptor", "http1", "http2", "logging", "tls12", "webpki-tokio"] }
|
hyper-rustls = { version = "0.24", default-features = false, features = ["acceptor", "http1", "http2", "logging", "tls12", "webpki-tokio"] }
|
||||||
|
@ -142,7 +142,7 @@ wgpu-core = { git = "https://github.com/gfx-rs/wgpu", rev = "781b54a8b9cee1a2cb2
|
||||||
wgpu-types = { git = "https://github.com/gfx-rs/wgpu", rev = "781b54a8b9cee1a2cb22bda565662edec52eb70e" }
|
wgpu-types = { git = "https://github.com/gfx-rs/wgpu", rev = "781b54a8b9cee1a2cb22bda565662edec52eb70e" }
|
||||||
windows-sys = "0.59"
|
windows-sys = "0.59"
|
||||||
xi-unicode = "0.1.0"
|
xi-unicode = "0.1.0"
|
||||||
xml5ever = "0.18"
|
xml5ever = "0.19"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
|
|
@ -2670,18 +2670,6 @@ impl Document {
|
||||||
self.current_parser.get()
|
self.current_parser.get()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn can_invoke_script(&self) -> bool {
|
|
||||||
match self.get_current_parser() {
|
|
||||||
Some(parser) => {
|
|
||||||
// It is safe to run script if the parser is not actively parsing,
|
|
||||||
// or if it is impossible to interact with the token stream.
|
|
||||||
parser.parser_is_not_active() ||
|
|
||||||
self.throw_on_dynamic_markup_insertion_counter.get() > 0
|
|
||||||
},
|
|
||||||
None => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over all iframes in the document.
|
/// Iterate over all iframes in the document.
|
||||||
pub fn iter_iframes(&self) -> impl Iterator<Item = DomRoot<HTMLIFrameElement>> {
|
pub fn iter_iframes(&self) -> impl Iterator<Item = DomRoot<HTMLIFrameElement>> {
|
||||||
self.upcast::<Node>()
|
self.upcast::<Node>()
|
||||||
|
|
|
@ -395,11 +395,6 @@ impl EventTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dispatch_event(&self, event: &Event) -> EventStatus {
|
pub fn dispatch_event(&self, event: &Event) -> EventStatus {
|
||||||
if let Some(window) = self.global().downcast::<Window>() {
|
|
||||||
if window.has_document() {
|
|
||||||
assert!(window.Document().can_invoke_script());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
event.dispatch(self, false)
|
event.dispatch(self, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
#![allow(crown::unrooted_must_root)]
|
#![allow(crown::unrooted_must_root)]
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cell::Cell;
|
use std::cell::{Cell, Ref, RefCell, RefMut};
|
||||||
use std::collections::vec_deque::VecDeque;
|
use std::collections::vec_deque::VecDeque;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
|
@ -171,7 +171,7 @@ enum ToHtmlTokenizerMsg {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_buffer_queue(mut buffers: VecDeque<SendTendril<UTF8>>) -> BufferQueue {
|
fn create_buffer_queue(mut buffers: VecDeque<SendTendril<UTF8>>) -> BufferQueue {
|
||||||
let mut buffer_queue = BufferQueue::default();
|
let buffer_queue = BufferQueue::default();
|
||||||
while let Some(st) = buffers.pop_front() {
|
while let Some(st) = buffers.pop_front() {
|
||||||
buffer_queue.push_back(StrTendril::from(st));
|
buffer_queue.push_back(StrTendril::from(st));
|
||||||
}
|
}
|
||||||
|
@ -214,7 +214,7 @@ pub struct Tokenizer {
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
html_tokenizer_sender: Sender<ToHtmlTokenizerMsg>,
|
html_tokenizer_sender: Sender<ToHtmlTokenizerMsg>,
|
||||||
#[ignore_malloc_size_of = "Defined in std"]
|
#[ignore_malloc_size_of = "Defined in std"]
|
||||||
nodes: HashMap<ParseNodeId, Dom<Node>>,
|
nodes: RefCell<HashMap<ParseNodeId, Dom<Node>>>,
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
url: ServoUrl,
|
url: ServoUrl,
|
||||||
parsing_algorithm: ParsingAlgorithm,
|
parsing_algorithm: ParsingAlgorithm,
|
||||||
|
@ -236,17 +236,17 @@ impl Tokenizer {
|
||||||
None => ParsingAlgorithm::Normal,
|
None => ParsingAlgorithm::Normal,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut tokenizer = Tokenizer {
|
let tokenizer = Tokenizer {
|
||||||
document: Dom::from_ref(document),
|
document: Dom::from_ref(document),
|
||||||
receiver: tokenizer_receiver,
|
receiver: tokenizer_receiver,
|
||||||
html_tokenizer_sender: to_html_tokenizer_sender,
|
html_tokenizer_sender: to_html_tokenizer_sender,
|
||||||
nodes: HashMap::new(),
|
nodes: RefCell::new(HashMap::new()),
|
||||||
url,
|
url,
|
||||||
parsing_algorithm: algorithm,
|
parsing_algorithm: algorithm,
|
||||||
};
|
};
|
||||||
tokenizer.insert_node(0, Dom::from_ref(document.upcast()));
|
tokenizer.insert_node(0, Dom::from_ref(document.upcast()));
|
||||||
|
|
||||||
let mut sink = Sink::new(to_tokenizer_sender.clone());
|
let sink = Sink::new(to_tokenizer_sender.clone());
|
||||||
let mut ctxt_parse_node = None;
|
let mut ctxt_parse_node = None;
|
||||||
let mut form_parse_node = None;
|
let mut form_parse_node = None;
|
||||||
let mut fragment_context_is_some = false;
|
let mut fragment_context_is_some = false;
|
||||||
|
@ -283,7 +283,7 @@ impl Tokenizer {
|
||||||
tokenizer
|
tokenizer
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
pub fn feed(&self, input: &BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
||||||
let mut send_tendrils = VecDeque::new();
|
let mut send_tendrils = VecDeque::new();
|
||||||
while let Some(str) = input.pop_front() {
|
while let Some(str) = input.pop_front() {
|
||||||
send_tendrils.push_back(SendTendril::from(str));
|
send_tendrils.push_back(SendTendril::from(str));
|
||||||
|
@ -306,7 +306,7 @@ impl Tokenizer {
|
||||||
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
|
ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op),
|
||||||
ToTokenizerMsg::TokenizerResultDone { updated_input } => {
|
ToTokenizerMsg::TokenizerResultDone { updated_input } => {
|
||||||
let buffer_queue = create_buffer_queue(updated_input);
|
let buffer_queue = create_buffer_queue(updated_input);
|
||||||
*input = buffer_queue;
|
input.replace_with(buffer_queue);
|
||||||
return TokenizerResult::Done;
|
return TokenizerResult::Done;
|
||||||
},
|
},
|
||||||
ToTokenizerMsg::TokenizerResultScript {
|
ToTokenizerMsg::TokenizerResultScript {
|
||||||
|
@ -314,7 +314,7 @@ impl Tokenizer {
|
||||||
updated_input,
|
updated_input,
|
||||||
} => {
|
} => {
|
||||||
let buffer_queue = create_buffer_queue(updated_input);
|
let buffer_queue = create_buffer_queue(updated_input);
|
||||||
*input = buffer_queue;
|
input.replace_with(buffer_queue);
|
||||||
let script = self.get_node(&script.id);
|
let script = self.get_node(&script.id);
|
||||||
return TokenizerResult::Script(DomRoot::from_ref(script.downcast().unwrap()));
|
return TokenizerResult::Script(DomRoot::from_ref(script.downcast().unwrap()));
|
||||||
},
|
},
|
||||||
|
@ -323,7 +323,7 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn end(&mut self) {
|
pub fn end(&self) {
|
||||||
self.html_tokenizer_sender
|
self.html_tokenizer_sender
|
||||||
.send(ToHtmlTokenizerMsg::End)
|
.send(ToHtmlTokenizerMsg::End)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -344,21 +344,23 @@ impl Tokenizer {
|
||||||
&self.url
|
&self.url
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_plaintext_state(&mut self) {
|
pub fn set_plaintext_state(&self) {
|
||||||
self.html_tokenizer_sender
|
self.html_tokenizer_sender
|
||||||
.send(ToHtmlTokenizerMsg::SetPlainTextState)
|
.send(ToHtmlTokenizerMsg::SetPlainTextState)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_node(&mut self, id: ParseNodeId, node: Dom<Node>) {
|
fn insert_node(&self, id: ParseNodeId, node: Dom<Node>) {
|
||||||
assert!(self.nodes.insert(id, node).is_none());
|
assert!(self.nodes.borrow_mut().insert(id, node).is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_node<'a>(&'a self, id: &ParseNodeId) -> &'a Dom<Node> {
|
fn get_node<'a>(&'a self, id: &ParseNodeId) -> Ref<'a, Dom<Node>> {
|
||||||
self.nodes.get(id).expect("Node not found!")
|
Ref::map(self.nodes.borrow(), |nodes| {
|
||||||
|
nodes.get(id).expect("Node not found!")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_before_sibling(&mut self, sibling: ParseNodeId, node: NodeOrText) {
|
fn append_before_sibling(&self, sibling: ParseNodeId, node: NodeOrText) {
|
||||||
let node = match node {
|
let node = match node {
|
||||||
NodeOrText::Node(n) => {
|
NodeOrText::Node(n) => {
|
||||||
HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id)))
|
HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id)))
|
||||||
|
@ -373,7 +375,7 @@ impl Tokenizer {
|
||||||
super::insert(parent, Some(sibling), node, self.parsing_algorithm);
|
super::insert(parent, Some(sibling), node, self.parsing_algorithm);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append(&mut self, parent: ParseNodeId, node: NodeOrText) {
|
fn append(&self, parent: ParseNodeId, node: NodeOrText) {
|
||||||
let node = match node {
|
let node = match node {
|
||||||
NodeOrText::Node(n) => {
|
NodeOrText::Node(n) => {
|
||||||
HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id)))
|
HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id)))
|
||||||
|
@ -398,7 +400,7 @@ impl Tokenizer {
|
||||||
x.is_in_same_home_subtree(y)
|
x.is_in_same_home_subtree(y)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_operation(&mut self, op: ParseOperation) {
|
fn process_operation(&self, op: ParseOperation) {
|
||||||
let document = DomRoot::from_ref(&**self.get_node(&0));
|
let document = DomRoot::from_ref(&**self.get_node(&0));
|
||||||
let document = document
|
let document = document
|
||||||
.downcast::<Document>()
|
.downcast::<Document>()
|
||||||
|
@ -469,8 +471,8 @@ impl Tokenizer {
|
||||||
.expect("Appending failed");
|
.expect("Appending failed");
|
||||||
},
|
},
|
||||||
ParseOperation::AddAttrsIfMissing { target, attrs } => {
|
ParseOperation::AddAttrsIfMissing { target, attrs } => {
|
||||||
let elem = self
|
let node = self.get_node(&target);
|
||||||
.get_node(&target)
|
let elem = node
|
||||||
.downcast::<Element>()
|
.downcast::<Element>()
|
||||||
.expect("tried to set attrs on non-Element in HTML parsing");
|
.expect("tried to set attrs on non-Element in HTML parsing");
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
|
@ -479,11 +481,12 @@ impl Tokenizer {
|
||||||
},
|
},
|
||||||
ParseOperation::RemoveFromParent { target } => {
|
ParseOperation::RemoveFromParent { target } => {
|
||||||
if let Some(ref parent) = self.get_node(&target).GetParentNode() {
|
if let Some(ref parent) = self.get_node(&target).GetParentNode() {
|
||||||
parent.RemoveChild(self.get_node(&target)).unwrap();
|
parent.RemoveChild(&self.get_node(&target)).unwrap();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ParseOperation::MarkScriptAlreadyStarted { node } => {
|
ParseOperation::MarkScriptAlreadyStarted { node } => {
|
||||||
let script = self.get_node(&node).downcast::<HTMLScriptElement>();
|
let node = self.get_node(&node);
|
||||||
|
let script = node.downcast::<HTMLScriptElement>();
|
||||||
if let Some(script) = script {
|
if let Some(script) = script {
|
||||||
script.set_already_started(true)
|
script.set_already_started(true)
|
||||||
}
|
}
|
||||||
|
@ -525,7 +528,7 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ParseOperation::Pop { node } => {
|
ParseOperation::Pop { node } => {
|
||||||
vtable_for(self.get_node(&node)).pop();
|
vtable_for(&self.get_node(&node)).pop();
|
||||||
},
|
},
|
||||||
ParseOperation::CreatePI { node, target, data } => {
|
ParseOperation::CreatePI { node, target, data } => {
|
||||||
let pi = ProcessingInstruction::new(
|
let pi = ProcessingInstruction::new(
|
||||||
|
@ -555,7 +558,7 @@ fn run(
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut html_tokenizer = if fragment_context_is_some {
|
let html_tokenizer = if fragment_context_is_some {
|
||||||
let tb =
|
let tb =
|
||||||
TreeBuilder::new_for_fragment(sink, ctxt_parse_node.unwrap(), form_parse_node, options);
|
TreeBuilder::new_for_fragment(sink, ctxt_parse_node.unwrap(), form_parse_node, options);
|
||||||
|
|
||||||
|
@ -575,8 +578,8 @@ fn run(
|
||||||
.expect("Unexpected channel panic in html parser thread")
|
.expect("Unexpected channel panic in html parser thread")
|
||||||
{
|
{
|
||||||
ToHtmlTokenizerMsg::Feed { input } => {
|
ToHtmlTokenizerMsg::Feed { input } => {
|
||||||
let mut input = create_buffer_queue(input);
|
let input = create_buffer_queue(input);
|
||||||
let res = html_tokenizer.feed(&mut input);
|
let res = html_tokenizer.feed(&input);
|
||||||
|
|
||||||
// Gather changes to 'input' and place them in 'updated_input',
|
// Gather changes to 'input' and place them in 'updated_input',
|
||||||
// which will be sent to the main thread to update feed method's 'input'
|
// which will be sent to the main thread to update feed method's 'input'
|
||||||
|
@ -611,8 +614,8 @@ struct ParseNodeData {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Sink {
|
pub struct Sink {
|
||||||
current_line: u64,
|
current_line: Cell<u64>,
|
||||||
parse_node_data: HashMap<ParseNodeId, ParseNodeData>,
|
parse_node_data: RefCell<HashMap<ParseNodeId, ParseNodeData>>,
|
||||||
next_parse_node_id: Cell<ParseNodeId>,
|
next_parse_node_id: Cell<ParseNodeId>,
|
||||||
document_node: ParseNode,
|
document_node: ParseNode,
|
||||||
sender: Sender<ToTokenizerMsg>,
|
sender: Sender<ToTokenizerMsg>,
|
||||||
|
@ -620,9 +623,9 @@ pub struct Sink {
|
||||||
|
|
||||||
impl Sink {
|
impl Sink {
|
||||||
fn new(sender: Sender<ToTokenizerMsg>) -> Sink {
|
fn new(sender: Sender<ToTokenizerMsg>) -> Sink {
|
||||||
let mut sink = Sink {
|
let sink = Sink {
|
||||||
current_line: 1,
|
current_line: Cell::new(1),
|
||||||
parse_node_data: HashMap::new(),
|
parse_node_data: RefCell::new(HashMap::new()),
|
||||||
next_parse_node_id: Cell::new(1),
|
next_parse_node_id: Cell::new(1),
|
||||||
document_node: ParseNode {
|
document_node: ParseNode {
|
||||||
id: 0,
|
id: 0,
|
||||||
|
@ -635,7 +638,7 @@ impl Sink {
|
||||||
sink
|
sink
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_parse_node(&mut self) -> ParseNode {
|
fn new_parse_node(&self) -> ParseNode {
|
||||||
let id = self.next_parse_node_id.get();
|
let id = self.next_parse_node_id.get();
|
||||||
let data = ParseNodeData::default();
|
let data = ParseNodeData::default();
|
||||||
self.insert_parse_node_data(id, data);
|
self.insert_parse_node_data(id, data);
|
||||||
|
@ -652,20 +655,20 @@ impl Sink {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_parse_node_data(&mut self, id: ParseNodeId, data: ParseNodeData) {
|
fn insert_parse_node_data(&self, id: ParseNodeId, data: ParseNodeData) {
|
||||||
assert!(self.parse_node_data.insert(id, data).is_none());
|
assert!(self.parse_node_data.borrow_mut().insert(id, data).is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> &'a ParseNodeData {
|
fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> Ref<'a, ParseNodeData> {
|
||||||
self.parse_node_data
|
Ref::map(self.parse_node_data.borrow(), |data| {
|
||||||
.get(id)
|
data.get(id).expect("Parse Node data not found!")
|
||||||
.expect("Parse Node data not found!")
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_parse_node_data_mut<'a>(&'a mut self, id: &'a ParseNodeId) -> &'a mut ParseNodeData {
|
fn get_parse_node_data_mut<'a>(&'a self, id: &'a ParseNodeId) -> RefMut<'a, ParseNodeData> {
|
||||||
self.parse_node_data
|
RefMut::map(self.parse_node_data.borrow_mut(), |data| {
|
||||||
.get_mut(id)
|
data.get_mut(id).expect("Parse Node data not found!")
|
||||||
.expect("Parse Node data not found!")
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -678,17 +681,17 @@ impl TreeSink for Sink {
|
||||||
|
|
||||||
type Handle = ParseNode;
|
type Handle = ParseNode;
|
||||||
|
|
||||||
fn get_document(&mut self) -> Self::Handle {
|
fn get_document(&self) -> Self::Handle {
|
||||||
self.document_node.clone()
|
self.document_node.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_template_contents(&mut self, target: &Self::Handle) -> Self::Handle {
|
fn get_template_contents(&self, target: &Self::Handle) -> Self::Handle {
|
||||||
if let Some(ref contents) = self.get_parse_node_data(&target.id).contents {
|
if let Some(ref contents) = self.get_parse_node_data(&target.id).contents {
|
||||||
return contents.clone();
|
return contents.clone();
|
||||||
}
|
}
|
||||||
let node = self.new_parse_node();
|
let node = self.new_parse_node();
|
||||||
{
|
{
|
||||||
let data = self.get_parse_node_data_mut(&target.id);
|
let mut data = self.get_parse_node_data_mut(&target.id);
|
||||||
data.contents = Some(node.clone());
|
data.contents = Some(node.clone());
|
||||||
}
|
}
|
||||||
self.send_op(ParseOperation::GetTemplateContents {
|
self.send_op(ParseOperation::GetTemplateContents {
|
||||||
|
@ -711,7 +714,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_element(
|
fn create_element(
|
||||||
&mut self,
|
&self,
|
||||||
name: QualName,
|
name: QualName,
|
||||||
html_attrs: Vec<HtmlAttribute>,
|
html_attrs: Vec<HtmlAttribute>,
|
||||||
_flags: ElementFlags,
|
_flags: ElementFlags,
|
||||||
|
@ -719,7 +722,7 @@ impl TreeSink for Sink {
|
||||||
let mut node = self.new_parse_node();
|
let mut node = self.new_parse_node();
|
||||||
node.qual_name = Some(name.clone());
|
node.qual_name = Some(name.clone());
|
||||||
{
|
{
|
||||||
let node_data = self.get_parse_node_data_mut(&node.id);
|
let mut node_data = self.get_parse_node_data_mut(&node.id);
|
||||||
node_data.is_integration_point = html_attrs.iter().any(|attr| {
|
node_data.is_integration_point = html_attrs.iter().any(|attr| {
|
||||||
let attr_value = &String::from(attr.value.clone());
|
let attr_value = &String::from(attr.value.clone());
|
||||||
(attr.name.local == local_name!("encoding") && attr.name.ns == ns!()) &&
|
(attr.name.local == local_name!("encoding") && attr.name.ns == ns!()) &&
|
||||||
|
@ -739,12 +742,12 @@ impl TreeSink for Sink {
|
||||||
node: node.id,
|
node: node.id,
|
||||||
name,
|
name,
|
||||||
attrs,
|
attrs,
|
||||||
current_line: self.current_line,
|
current_line: self.current_line.get(),
|
||||||
});
|
});
|
||||||
node
|
node
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_comment(&mut self, text: StrTendril) -> Self::Handle {
|
fn create_comment(&self, text: StrTendril) -> Self::Handle {
|
||||||
let node = self.new_parse_node();
|
let node = self.new_parse_node();
|
||||||
self.send_op(ParseOperation::CreateComment {
|
self.send_op(ParseOperation::CreateComment {
|
||||||
text: String::from(text),
|
text: String::from(text),
|
||||||
|
@ -753,7 +756,7 @@ impl TreeSink for Sink {
|
||||||
node
|
node
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> ParseNode {
|
fn create_pi(&self, target: StrTendril, data: StrTendril) -> ParseNode {
|
||||||
let node = self.new_parse_node();
|
let node = self.new_parse_node();
|
||||||
self.send_op(ParseOperation::CreatePI {
|
self.send_op(ParseOperation::CreatePI {
|
||||||
node: node.id,
|
node: node.id,
|
||||||
|
@ -764,7 +767,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn associate_with_form(
|
fn associate_with_form(
|
||||||
&mut self,
|
&self,
|
||||||
target: &Self::Handle,
|
target: &Self::Handle,
|
||||||
form: &Self::Handle,
|
form: &Self::Handle,
|
||||||
nodes: (&Self::Handle, Option<&Self::Handle>),
|
nodes: (&Self::Handle, Option<&Self::Handle>),
|
||||||
|
@ -779,7 +782,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_before_sibling(
|
fn append_before_sibling(
|
||||||
&mut self,
|
&self,
|
||||||
sibling: &Self::Handle,
|
sibling: &Self::Handle,
|
||||||
new_node: HtmlNodeOrText<Self::Handle>,
|
new_node: HtmlNodeOrText<Self::Handle>,
|
||||||
) {
|
) {
|
||||||
|
@ -794,7 +797,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_based_on_parent_node(
|
fn append_based_on_parent_node(
|
||||||
&mut self,
|
&self,
|
||||||
elem: &Self::Handle,
|
elem: &Self::Handle,
|
||||||
prev_elem: &Self::Handle,
|
prev_elem: &Self::Handle,
|
||||||
child: HtmlNodeOrText<Self::Handle>,
|
child: HtmlNodeOrText<Self::Handle>,
|
||||||
|
@ -810,11 +813,11 @@ impl TreeSink for Sink {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_error(&mut self, msg: Cow<'static, str>) {
|
fn parse_error(&self, msg: Cow<'static, str>) {
|
||||||
debug!("Parse error: {}", msg);
|
debug!("Parse error: {}", msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_quirks_mode(&mut self, mode: QuirksMode) {
|
fn set_quirks_mode(&self, mode: QuirksMode) {
|
||||||
let mode = match mode {
|
let mode = match mode {
|
||||||
QuirksMode::Quirks => ServoQuirksMode::Quirks,
|
QuirksMode::Quirks => ServoQuirksMode::Quirks,
|
||||||
QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
|
QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
|
||||||
|
@ -823,7 +826,7 @@ impl TreeSink for Sink {
|
||||||
self.send_op(ParseOperation::SetQuirksMode { mode });
|
self.send_op(ParseOperation::SetQuirksMode { mode });
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append(&mut self, parent: &Self::Handle, child: HtmlNodeOrText<Self::Handle>) {
|
fn append(&self, parent: &Self::Handle, child: HtmlNodeOrText<Self::Handle>) {
|
||||||
let child = match child {
|
let child = match child {
|
||||||
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
|
HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node),
|
||||||
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text)),
|
HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text)),
|
||||||
|
@ -835,7 +838,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_doctype_to_document(
|
fn append_doctype_to_document(
|
||||||
&mut self,
|
&self,
|
||||||
name: StrTendril,
|
name: StrTendril,
|
||||||
public_id: StrTendril,
|
public_id: StrTendril,
|
||||||
system_id: StrTendril,
|
system_id: StrTendril,
|
||||||
|
@ -847,7 +850,7 @@ impl TreeSink for Sink {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_attrs_if_missing(&mut self, target: &Self::Handle, html_attrs: Vec<HtmlAttribute>) {
|
fn add_attrs_if_missing(&self, target: &Self::Handle, html_attrs: Vec<HtmlAttribute>) {
|
||||||
let attrs = html_attrs
|
let attrs = html_attrs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|attr| Attribute {
|
.map(|attr| Attribute {
|
||||||
|
@ -861,19 +864,19 @@ impl TreeSink for Sink {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_from_parent(&mut self, target: &Self::Handle) {
|
fn remove_from_parent(&self, target: &Self::Handle) {
|
||||||
self.send_op(ParseOperation::RemoveFromParent { target: target.id });
|
self.send_op(ParseOperation::RemoveFromParent { target: target.id });
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mark_script_already_started(&mut self, node: &Self::Handle) {
|
fn mark_script_already_started(&self, node: &Self::Handle) {
|
||||||
self.send_op(ParseOperation::MarkScriptAlreadyStarted { node: node.id });
|
self.send_op(ParseOperation::MarkScriptAlreadyStarted { node: node.id });
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_script(&mut self, _: &Self::Handle) -> NextParserState {
|
fn complete_script(&self, _: &Self::Handle) -> NextParserState {
|
||||||
panic!("complete_script should not be called here!");
|
panic!("complete_script should not be called here!");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reparent_children(&mut self, parent: &Self::Handle, new_parent: &Self::Handle) {
|
fn reparent_children(&self, parent: &Self::Handle, new_parent: &Self::Handle) {
|
||||||
self.send_op(ParseOperation::ReparentChildren {
|
self.send_op(ParseOperation::ReparentChildren {
|
||||||
parent: parent.id,
|
parent: parent.id,
|
||||||
new_parent: new_parent.id,
|
new_parent: new_parent.id,
|
||||||
|
@ -887,11 +890,11 @@ impl TreeSink for Sink {
|
||||||
node_data.is_integration_point
|
node_data.is_integration_point
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_line(&mut self, line_number: u64) {
|
fn set_current_line(&self, line_number: u64) {
|
||||||
self.current_line = line_number;
|
self.current_line.set(line_number);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pop(&mut self, node: &Self::Handle) {
|
fn pop(&self, node: &Self::Handle) {
|
||||||
self.send_op(ParseOperation::Pop { node: node.id });
|
self.send_op(ParseOperation::Pop { node: node.id });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
#![allow(crown::unrooted_must_root)]
|
#![allow(crown::unrooted_must_root)]
|
||||||
|
|
||||||
|
use std::cell::Cell;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use html5ever::buffer_queue::BufferQueue;
|
use html5ever::buffer_queue::BufferQueue;
|
||||||
|
@ -47,7 +48,7 @@ impl Tokenizer {
|
||||||
let sink = Sink {
|
let sink = Sink {
|
||||||
base_url: url,
|
base_url: url,
|
||||||
document: Dom::from_ref(document),
|
document: Dom::from_ref(document),
|
||||||
current_line: 1,
|
current_line: Cell::new(1),
|
||||||
script: Default::default(),
|
script: Default::default(),
|
||||||
parsing_algorithm,
|
parsing_algorithm,
|
||||||
};
|
};
|
||||||
|
@ -78,7 +79,7 @@ impl Tokenizer {
|
||||||
Tokenizer { inner }
|
Tokenizer { inner }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
pub fn feed(&self, input: &BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
||||||
match self.inner.feed(input) {
|
match self.inner.feed(input) {
|
||||||
TokenizerResult::Done => TokenizerResult::Done,
|
TokenizerResult::Done => TokenizerResult::Done,
|
||||||
TokenizerResult::Script(script) => {
|
TokenizerResult::Script(script) => {
|
||||||
|
@ -87,7 +88,7 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn end(&mut self) {
|
pub fn end(&self) {
|
||||||
self.inner.end();
|
self.inner.end();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +96,7 @@ impl Tokenizer {
|
||||||
&self.inner.sink.sink.base_url
|
&self.inner.sink.sink.base_url
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_plaintext_state(&mut self) {
|
pub fn set_plaintext_state(&self) {
|
||||||
self.inner.set_plaintext_state();
|
self.inner.set_plaintext_state();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::mem;
|
|
||||||
|
|
||||||
use base::id::PipelineId;
|
use base::id::PipelineId;
|
||||||
use base64::engine::general_purpose;
|
use base64::engine::general_purpose;
|
||||||
|
@ -102,13 +101,13 @@ pub struct ServoParser {
|
||||||
/// Input received from network.
|
/// Input received from network.
|
||||||
#[ignore_malloc_size_of = "Defined in html5ever"]
|
#[ignore_malloc_size_of = "Defined in html5ever"]
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
network_input: DomRefCell<BufferQueue>,
|
network_input: BufferQueue,
|
||||||
/// Input received from script. Used only to support document.write().
|
/// Input received from script. Used only to support document.write().
|
||||||
#[ignore_malloc_size_of = "Defined in html5ever"]
|
#[ignore_malloc_size_of = "Defined in html5ever"]
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
script_input: DomRefCell<BufferQueue>,
|
script_input: BufferQueue,
|
||||||
/// The tokenizer of this parser.
|
/// The tokenizer of this parser.
|
||||||
tokenizer: DomRefCell<Tokenizer>,
|
tokenizer: Tokenizer,
|
||||||
/// Whether to expect any further input from the associated network request.
|
/// Whether to expect any further input from the associated network request.
|
||||||
last_chunk_received: Cell<bool>,
|
last_chunk_received: Cell<bool>,
|
||||||
/// Whether this parser should avoid passing any further data to the tokenizer.
|
/// Whether this parser should avoid passing any further data to the tokenizer.
|
||||||
|
@ -122,10 +121,10 @@ pub struct ServoParser {
|
||||||
/// We do a quick-and-dirty parse of the input looking for resources to prefetch.
|
/// We do a quick-and-dirty parse of the input looking for resources to prefetch.
|
||||||
// TODO: if we had speculative parsing, we could do this when speculatively
|
// TODO: if we had speculative parsing, we could do this when speculatively
|
||||||
// building the DOM. https://github.com/servo/servo/pull/19203
|
// building the DOM. https://github.com/servo/servo/pull/19203
|
||||||
prefetch_tokenizer: DomRefCell<prefetch::Tokenizer>,
|
prefetch_tokenizer: prefetch::Tokenizer,
|
||||||
#[ignore_malloc_size_of = "Defined in html5ever"]
|
#[ignore_malloc_size_of = "Defined in html5ever"]
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
prefetch_input: DomRefCell<BufferQueue>,
|
prefetch_input: BufferQueue,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ElementAttribute {
|
pub struct ElementAttribute {
|
||||||
|
@ -147,7 +146,7 @@ impl ElementAttribute {
|
||||||
|
|
||||||
impl ServoParser {
|
impl ServoParser {
|
||||||
pub fn parser_is_not_active(&self) -> bool {
|
pub fn parser_is_not_active(&self) -> bool {
|
||||||
self.can_write() || self.tokenizer.try_borrow_mut().is_ok()
|
self.can_write()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_html_document(document: &Document, input: Option<DOMString>, url: ServoUrl) {
|
pub fn parse_html_document(document: &Document, input: Option<DOMString>, url: ServoUrl) {
|
||||||
|
@ -302,12 +301,9 @@ impl ServoParser {
|
||||||
assert!(self.suspended.get());
|
assert!(self.suspended.get());
|
||||||
self.suspended.set(false);
|
self.suspended.set(false);
|
||||||
|
|
||||||
mem::swap(
|
self.script_input.swap_with(&self.network_input);
|
||||||
&mut *self.script_input.borrow_mut(),
|
while let Some(chunk) = self.script_input.pop_front() {
|
||||||
&mut *self.network_input.borrow_mut(),
|
self.network_input.push_back(chunk);
|
||||||
);
|
|
||||||
while let Some(chunk) = self.script_input.borrow_mut().pop_front() {
|
|
||||||
self.network_input.borrow_mut().push_back(chunk);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let script_nesting_level = self.script_nesting_level.get();
|
let script_nesting_level = self.script_nesting_level.get();
|
||||||
|
@ -335,9 +331,7 @@ impl ServoParser {
|
||||||
// parser is suspended, we just append everything to the
|
// parser is suspended, we just append everything to the
|
||||||
// script input and abort these steps.
|
// script input and abort these steps.
|
||||||
for chunk in text {
|
for chunk in text {
|
||||||
self.script_input
|
self.script_input.push_back(String::from(chunk).into());
|
||||||
.borrow_mut()
|
|
||||||
.push_back(String::from(chunk).into());
|
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -345,21 +339,21 @@ impl ServoParser {
|
||||||
// There is no pending parsing blocking script, so all previous calls
|
// There is no pending parsing blocking script, so all previous calls
|
||||||
// to document.write() should have seen their entire input tokenized
|
// to document.write() should have seen their entire input tokenized
|
||||||
// and process, with nothing pushed to the parser script input.
|
// and process, with nothing pushed to the parser script input.
|
||||||
assert!(self.script_input.borrow().is_empty());
|
assert!(self.script_input.is_empty());
|
||||||
|
|
||||||
let mut input = BufferQueue::default();
|
let input = BufferQueue::default();
|
||||||
for chunk in text {
|
for chunk in text {
|
||||||
input.push_back(String::from(chunk).into());
|
input.push_back(String::from(chunk).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.tokenize(|tokenizer| tokenizer.feed(&mut input));
|
self.tokenize(|tokenizer| tokenizer.feed(&input));
|
||||||
|
|
||||||
if self.suspended.get() {
|
if self.suspended.get() {
|
||||||
// Parser got suspended, insert remaining input at end of
|
// Parser got suspended, insert remaining input at end of
|
||||||
// script input, following anything written by scripts executed
|
// script input, following anything written by scripts executed
|
||||||
// reentrantly during this call.
|
// reentrantly during this call.
|
||||||
while let Some(chunk) = input.pop_front() {
|
while let Some(chunk) = input.pop_front() {
|
||||||
self.script_input.borrow_mut().push_back(chunk);
|
self.script_input.push_back(chunk);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -389,15 +383,15 @@ impl ServoParser {
|
||||||
self.aborted.set(true);
|
self.aborted.set(true);
|
||||||
|
|
||||||
// Step 1.
|
// Step 1.
|
||||||
*self.script_input.borrow_mut() = BufferQueue::default();
|
self.script_input.replace_with(BufferQueue::default());
|
||||||
*self.network_input.borrow_mut() = BufferQueue::default();
|
self.network_input.replace_with(BufferQueue::default());
|
||||||
|
|
||||||
// Step 2.
|
// Step 2.
|
||||||
self.document
|
self.document
|
||||||
.set_ready_state(DocumentReadyState::Interactive);
|
.set_ready_state(DocumentReadyState::Interactive);
|
||||||
|
|
||||||
// Step 3.
|
// Step 3.
|
||||||
self.tokenizer.borrow_mut().end();
|
self.tokenizer.end();
|
||||||
self.document.set_current_parser(None);
|
self.document.set_current_parser(None);
|
||||||
|
|
||||||
// Step 4.
|
// Step 4.
|
||||||
|
@ -416,16 +410,16 @@ impl ServoParser {
|
||||||
document: Dom::from_ref(document),
|
document: Dom::from_ref(document),
|
||||||
bom_sniff: DomRefCell::new(Some(Vec::with_capacity(3))),
|
bom_sniff: DomRefCell::new(Some(Vec::with_capacity(3))),
|
||||||
network_decoder: DomRefCell::new(Some(NetworkDecoder::new(document.encoding()))),
|
network_decoder: DomRefCell::new(Some(NetworkDecoder::new(document.encoding()))),
|
||||||
network_input: DomRefCell::new(BufferQueue::default()),
|
network_input: BufferQueue::default(),
|
||||||
script_input: DomRefCell::new(BufferQueue::default()),
|
script_input: BufferQueue::default(),
|
||||||
tokenizer: DomRefCell::new(tokenizer),
|
tokenizer,
|
||||||
last_chunk_received: Cell::new(false),
|
last_chunk_received: Cell::new(false),
|
||||||
suspended: Default::default(),
|
suspended: Default::default(),
|
||||||
script_nesting_level: Default::default(),
|
script_nesting_level: Default::default(),
|
||||||
aborted: Default::default(),
|
aborted: Default::default(),
|
||||||
script_created_parser: kind == ParserKind::ScriptCreated,
|
script_created_parser: kind == ParserKind::ScriptCreated,
|
||||||
prefetch_tokenizer: DomRefCell::new(prefetch::Tokenizer::new(document)),
|
prefetch_tokenizer: prefetch::Tokenizer::new(document),
|
||||||
prefetch_input: DomRefCell::new(BufferQueue::default()),
|
prefetch_input: BufferQueue::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -454,15 +448,12 @@ impl ServoParser {
|
||||||
// to prefetch. If the user script uses `document.write()`
|
// to prefetch. If the user script uses `document.write()`
|
||||||
// to overwrite the network input, this prefetching may
|
// to overwrite the network input, this prefetching may
|
||||||
// have been wasted, but in most cases it won't.
|
// have been wasted, but in most cases it won't.
|
||||||
let mut prefetch_input = self.prefetch_input.borrow_mut();
|
self.prefetch_input.push_back(chunk.clone());
|
||||||
prefetch_input.push_back(chunk.clone());
|
self.prefetch_tokenizer.feed(&self.prefetch_input);
|
||||||
self.prefetch_tokenizer
|
|
||||||
.borrow_mut()
|
|
||||||
.feed(&mut prefetch_input);
|
|
||||||
}
|
}
|
||||||
// Push the chunk into the network input stream,
|
// Push the chunk into the network input stream,
|
||||||
// which is tokenized lazily.
|
// which is tokenized lazily.
|
||||||
self.network_input.borrow_mut().push_back(chunk);
|
self.network_input.push_back(chunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_bytes_input_chunk(&self, chunk: Vec<u8>) {
|
fn push_bytes_input_chunk(&self, chunk: Vec<u8>) {
|
||||||
|
@ -513,7 +504,7 @@ impl ServoParser {
|
||||||
iframe: TimerMetadataFrameType::RootWindow,
|
iframe: TimerMetadataFrameType::RootWindow,
|
||||||
incremental: TimerMetadataReflowType::FirstReflow,
|
incremental: TimerMetadataReflowType::FirstReflow,
|
||||||
};
|
};
|
||||||
let profiler_category = self.tokenizer.borrow().profiler_category();
|
let profiler_category = self.tokenizer.profiler_category();
|
||||||
profile(
|
profile(
|
||||||
profiler_category,
|
profiler_category,
|
||||||
Some(metadata),
|
Some(metadata),
|
||||||
|
@ -527,7 +518,7 @@ impl ServoParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn do_parse_sync(&self) {
|
fn do_parse_sync(&self) {
|
||||||
assert!(self.script_input.borrow().is_empty());
|
assert!(self.script_input.is_empty());
|
||||||
|
|
||||||
// This parser will continue to parse while there is either pending input or
|
// This parser will continue to parse while there is either pending input or
|
||||||
// the parser remains unsuspended.
|
// the parser remains unsuspended.
|
||||||
|
@ -536,17 +527,17 @@ impl ServoParser {
|
||||||
if let Some(decoder) = self.network_decoder.borrow_mut().take() {
|
if let Some(decoder) = self.network_decoder.borrow_mut().take() {
|
||||||
let chunk = decoder.finish();
|
let chunk = decoder.finish();
|
||||||
if !chunk.is_empty() {
|
if !chunk.is_empty() {
|
||||||
self.network_input.borrow_mut().push_back(chunk);
|
self.network_input.push_back(chunk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.tokenize(|tokenizer| tokenizer.feed(&mut self.network_input.borrow_mut()));
|
self.tokenize(|tokenizer| tokenizer.feed(&self.network_input));
|
||||||
|
|
||||||
if self.suspended.get() {
|
if self.suspended.get() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(self.network_input.borrow().is_empty());
|
assert!(self.network_input.is_empty());
|
||||||
|
|
||||||
if self.last_chunk_received.get() {
|
if self.last_chunk_received.get() {
|
||||||
self.finish();
|
self.finish();
|
||||||
|
@ -570,16 +561,16 @@ impl ServoParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tokenize<F>(&self, mut feed: F)
|
fn tokenize<F>(&self, feed: F)
|
||||||
where
|
where
|
||||||
F: FnMut(&mut Tokenizer) -> TokenizerResult<DomRoot<HTMLScriptElement>>,
|
F: Fn(&Tokenizer) -> TokenizerResult<DomRoot<HTMLScriptElement>>,
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
assert!(!self.suspended.get());
|
assert!(!self.suspended.get());
|
||||||
assert!(!self.aborted.get());
|
assert!(!self.aborted.get());
|
||||||
|
|
||||||
self.document.reflow_if_reflow_timer_expired();
|
self.document.reflow_if_reflow_timer_expired();
|
||||||
let script = match feed(&mut self.tokenizer.borrow_mut()) {
|
let script = match feed(&self.tokenizer) {
|
||||||
TokenizerResult::Done => return,
|
TokenizerResult::Done => return,
|
||||||
TokenizerResult::Script(script) => script,
|
TokenizerResult::Script(script) => script,
|
||||||
};
|
};
|
||||||
|
@ -617,8 +608,8 @@ impl ServoParser {
|
||||||
fn finish(&self) {
|
fn finish(&self) {
|
||||||
assert!(!self.suspended.get());
|
assert!(!self.suspended.get());
|
||||||
assert!(self.last_chunk_received.get());
|
assert!(self.last_chunk_received.get());
|
||||||
assert!(self.script_input.borrow().is_empty());
|
assert!(self.script_input.is_empty());
|
||||||
assert!(self.network_input.borrow().is_empty());
|
assert!(self.network_input.is_empty());
|
||||||
assert!(self.network_decoder.borrow().is_none());
|
assert!(self.network_decoder.borrow().is_none());
|
||||||
|
|
||||||
// Step 1.
|
// Step 1.
|
||||||
|
@ -626,11 +617,11 @@ impl ServoParser {
|
||||||
.set_ready_state(DocumentReadyState::Interactive);
|
.set_ready_state(DocumentReadyState::Interactive);
|
||||||
|
|
||||||
// Step 2.
|
// Step 2.
|
||||||
self.tokenizer.borrow_mut().end();
|
self.tokenizer.end();
|
||||||
self.document.set_current_parser(None);
|
self.document.set_current_parser(None);
|
||||||
|
|
||||||
// Steps 3-12 are in another castle, namely finish_load.
|
// Steps 3-12 are in another castle, namely finish_load.
|
||||||
let url = self.tokenizer.borrow().url().clone();
|
let url = self.tokenizer.url().clone();
|
||||||
self.document.finish_load(LoadType::PageSource(url));
|
self.document.finish_load(LoadType::PageSource(url));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -674,19 +665,19 @@ enum Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Tokenizer {
|
impl Tokenizer {
|
||||||
fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
fn feed(&self, input: &BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input),
|
Tokenizer::Html(ref tokenizer) => tokenizer.feed(input),
|
||||||
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input),
|
Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.feed(input),
|
||||||
Tokenizer::Xml(ref mut tokenizer) => tokenizer.feed(input),
|
Tokenizer::Xml(ref tokenizer) => tokenizer.feed(input),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn end(&mut self) {
|
fn end(&self) {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::Html(ref mut tokenizer) => tokenizer.end(),
|
Tokenizer::Html(ref tokenizer) => tokenizer.end(),
|
||||||
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.end(),
|
Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.end(),
|
||||||
Tokenizer::Xml(ref mut tokenizer) => tokenizer.end(),
|
Tokenizer::Xml(ref tokenizer) => tokenizer.end(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -698,10 +689,10 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_plaintext_state(&mut self) {
|
fn set_plaintext_state(&self) {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::Html(ref mut tokenizer) => tokenizer.set_plaintext_state(),
|
Tokenizer::Html(ref tokenizer) => tokenizer.set_plaintext_state(),
|
||||||
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.set_plaintext_state(),
|
Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.set_plaintext_state(),
|
||||||
Tokenizer::Xml(_) => unimplemented!(),
|
Tokenizer::Xml(_) => unimplemented!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -852,7 +843,7 @@ impl FetchResponseListener for ParserContext {
|
||||||
let page = "<pre>\n".into();
|
let page = "<pre>\n".into();
|
||||||
parser.push_string_input_chunk(page);
|
parser.push_string_input_chunk(page);
|
||||||
parser.parse_sync();
|
parser.parse_sync();
|
||||||
parser.tokenizer.borrow_mut().set_plaintext_state();
|
parser.tokenizer.set_plaintext_state();
|
||||||
},
|
},
|
||||||
(mime::TEXT, mime::HTML, _) => match error {
|
(mime::TEXT, mime::HTML, _) => match error {
|
||||||
Some(NetworkError::SslValidation(reason, bytes)) => {
|
Some(NetworkError::SslValidation(reason, bytes)) => {
|
||||||
|
@ -1040,7 +1031,7 @@ pub struct Sink {
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
base_url: ServoUrl,
|
base_url: ServoUrl,
|
||||||
document: Dom<Document>,
|
document: Dom<Document>,
|
||||||
current_line: u64,
|
current_line: Cell<u64>,
|
||||||
script: MutNullableDom<HTMLScriptElement>,
|
script: MutNullableDom<HTMLScriptElement>,
|
||||||
parsing_algorithm: ParsingAlgorithm,
|
parsing_algorithm: ParsingAlgorithm,
|
||||||
}
|
}
|
||||||
|
@ -1068,12 +1059,12 @@ impl TreeSink for Sink {
|
||||||
type Handle = Dom<Node>;
|
type Handle = Dom<Node>;
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn get_document(&mut self) -> Dom<Node> {
|
fn get_document(&self) -> Dom<Node> {
|
||||||
Dom::from_ref(self.document.upcast())
|
Dom::from_ref(self.document.upcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn get_template_contents(&mut self, target: &Dom<Node>) -> Dom<Node> {
|
fn get_template_contents(&self, target: &Dom<Node>) -> Dom<Node> {
|
||||||
let template = target
|
let template = target
|
||||||
.downcast::<HTMLTemplateElement>()
|
.downcast::<HTMLTemplateElement>()
|
||||||
.expect("tried to get template contents of non-HTMLTemplateElement in HTML parsing");
|
.expect("tried to get template contents of non-HTMLTemplateElement in HTML parsing");
|
||||||
|
@ -1096,7 +1087,7 @@ impl TreeSink for Sink {
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn create_element(
|
fn create_element(
|
||||||
&mut self,
|
&self,
|
||||||
name: QualName,
|
name: QualName,
|
||||||
attrs: Vec<Attribute>,
|
attrs: Vec<Attribute>,
|
||||||
_flags: ElementFlags,
|
_flags: ElementFlags,
|
||||||
|
@ -1109,20 +1100,20 @@ impl TreeSink for Sink {
|
||||||
name,
|
name,
|
||||||
attrs,
|
attrs,
|
||||||
&self.document,
|
&self.document,
|
||||||
ElementCreator::ParserCreated(self.current_line),
|
ElementCreator::ParserCreated(self.current_line.get()),
|
||||||
self.parsing_algorithm,
|
self.parsing_algorithm,
|
||||||
);
|
);
|
||||||
Dom::from_ref(element.upcast())
|
Dom::from_ref(element.upcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn create_comment(&mut self, text: StrTendril) -> Dom<Node> {
|
fn create_comment(&self, text: StrTendril) -> Dom<Node> {
|
||||||
let comment = Comment::new(DOMString::from(String::from(text)), &self.document, None);
|
let comment = Comment::new(DOMString::from(String::from(text)), &self.document, None);
|
||||||
Dom::from_ref(comment.upcast())
|
Dom::from_ref(comment.upcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Dom<Node> {
|
fn create_pi(&self, target: StrTendril, data: StrTendril) -> Dom<Node> {
|
||||||
let doc = &*self.document;
|
let doc = &*self.document;
|
||||||
let pi = ProcessingInstruction::new(
|
let pi = ProcessingInstruction::new(
|
||||||
DOMString::from(String::from(target)),
|
DOMString::from(String::from(target)),
|
||||||
|
@ -1133,7 +1124,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn associate_with_form(
|
fn associate_with_form(
|
||||||
&mut self,
|
&self,
|
||||||
target: &Dom<Node>,
|
target: &Dom<Node>,
|
||||||
form: &Dom<Node>,
|
form: &Dom<Node>,
|
||||||
nodes: (&Dom<Node>, Option<&Dom<Node>>),
|
nodes: (&Dom<Node>, Option<&Dom<Node>>),
|
||||||
|
@ -1163,7 +1154,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn append_before_sibling(&mut self, sibling: &Dom<Node>, new_node: NodeOrText<Dom<Node>>) {
|
fn append_before_sibling(&self, sibling: &Dom<Node>, new_node: NodeOrText<Dom<Node>>) {
|
||||||
let parent = sibling
|
let parent = sibling
|
||||||
.GetParentNode()
|
.GetParentNode()
|
||||||
.expect("append_before_sibling called on node without parent");
|
.expect("append_before_sibling called on node without parent");
|
||||||
|
@ -1171,11 +1162,11 @@ impl TreeSink for Sink {
|
||||||
insert(&parent, Some(sibling), new_node, self.parsing_algorithm);
|
insert(&parent, Some(sibling), new_node, self.parsing_algorithm);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_error(&mut self, msg: Cow<'static, str>) {
|
fn parse_error(&self, msg: Cow<'static, str>) {
|
||||||
debug!("Parse error: {}", msg);
|
debug!("Parse error: {}", msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_quirks_mode(&mut self, mode: QuirksMode) {
|
fn set_quirks_mode(&self, mode: QuirksMode) {
|
||||||
let mode = match mode {
|
let mode = match mode {
|
||||||
QuirksMode::Quirks => ServoQuirksMode::Quirks,
|
QuirksMode::Quirks => ServoQuirksMode::Quirks,
|
||||||
QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
|
QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
|
||||||
|
@ -1185,13 +1176,13 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn append(&mut self, parent: &Dom<Node>, child: NodeOrText<Dom<Node>>) {
|
fn append(&self, parent: &Dom<Node>, child: NodeOrText<Dom<Node>>) {
|
||||||
insert(parent, None, child, self.parsing_algorithm);
|
insert(parent, None, child, self.parsing_algorithm);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(crown::unrooted_must_root)]
|
#[allow(crown::unrooted_must_root)]
|
||||||
fn append_based_on_parent_node(
|
fn append_based_on_parent_node(
|
||||||
&mut self,
|
&self,
|
||||||
elem: &Dom<Node>,
|
elem: &Dom<Node>,
|
||||||
prev_elem: &Dom<Node>,
|
prev_elem: &Dom<Node>,
|
||||||
child: NodeOrText<Dom<Node>>,
|
child: NodeOrText<Dom<Node>>,
|
||||||
|
@ -1204,7 +1195,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn append_doctype_to_document(
|
fn append_doctype_to_document(
|
||||||
&mut self,
|
&self,
|
||||||
name: StrTendril,
|
name: StrTendril,
|
||||||
public_id: StrTendril,
|
public_id: StrTendril,
|
||||||
system_id: StrTendril,
|
system_id: StrTendril,
|
||||||
|
@ -1221,7 +1212,7 @@ impl TreeSink for Sink {
|
||||||
.expect("Appending failed");
|
.expect("Appending failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_attrs_if_missing(&mut self, target: &Dom<Node>, attrs: Vec<Attribute>) {
|
fn add_attrs_if_missing(&self, target: &Dom<Node>, attrs: Vec<Attribute>) {
|
||||||
let elem = target
|
let elem = target
|
||||||
.downcast::<Element>()
|
.downcast::<Element>()
|
||||||
.expect("tried to set attrs on non-Element in HTML parsing");
|
.expect("tried to set attrs on non-Element in HTML parsing");
|
||||||
|
@ -1234,20 +1225,20 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_from_parent(&mut self, target: &Dom<Node>) {
|
fn remove_from_parent(&self, target: &Dom<Node>) {
|
||||||
if let Some(ref parent) = target.GetParentNode() {
|
if let Some(ref parent) = target.GetParentNode() {
|
||||||
parent.RemoveChild(target).unwrap();
|
parent.RemoveChild(target).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mark_script_already_started(&mut self, node: &Dom<Node>) {
|
fn mark_script_already_started(&self, node: &Dom<Node>) {
|
||||||
let script = node.downcast::<HTMLScriptElement>();
|
let script = node.downcast::<HTMLScriptElement>();
|
||||||
if let Some(script) = script {
|
if let Some(script) = script {
|
||||||
script.set_already_started(true)
|
script.set_already_started(true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_script(&mut self, node: &Dom<Node>) -> NextParserState {
|
fn complete_script(&self, node: &Dom<Node>) -> NextParserState {
|
||||||
if let Some(script) = node.downcast() {
|
if let Some(script) = node.downcast() {
|
||||||
self.script.set(Some(script));
|
self.script.set(Some(script));
|
||||||
NextParserState::Suspend
|
NextParserState::Suspend
|
||||||
|
@ -1256,7 +1247,7 @@ impl TreeSink for Sink {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reparent_children(&mut self, node: &Dom<Node>, new_parent: &Dom<Node>) {
|
fn reparent_children(&self, node: &Dom<Node>, new_parent: &Dom<Node>) {
|
||||||
while let Some(ref child) = node.GetFirstChild() {
|
while let Some(ref child) = node.GetFirstChild() {
|
||||||
new_parent.AppendChild(child).unwrap();
|
new_parent.AppendChild(child).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -1273,11 +1264,11 @@ impl TreeSink for Sink {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_current_line(&mut self, line_number: u64) {
|
fn set_current_line(&self, line_number: u64) {
|
||||||
self.current_line = line_number;
|
self.current_line.set(line_number);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pop(&mut self, node: &Dom<Node>) {
|
fn pop(&self, node: &Dom<Node>) {
|
||||||
let node = DomRoot::from_ref(&**node);
|
let node = DomRoot::from_ref(&**node);
|
||||||
vtable_for(&node).pop();
|
vtable_for(&node).pop();
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
use std::cell::{Cell, RefCell};
|
||||||
|
|
||||||
use base::id::PipelineId;
|
use base::id::PipelineId;
|
||||||
use html5ever::buffer_queue::BufferQueue;
|
use html5ever::buffer_queue::BufferQueue;
|
||||||
use html5ever::tokenizer::states::RawKind;
|
use html5ever::tokenizer::states::RawKind;
|
||||||
|
@ -41,7 +43,7 @@ impl Tokenizer {
|
||||||
let sink = PrefetchSink {
|
let sink = PrefetchSink {
|
||||||
origin: document.origin().immutable().clone(),
|
origin: document.origin().immutable().clone(),
|
||||||
pipeline_id: document.global().pipeline_id(),
|
pipeline_id: document.global().pipeline_id(),
|
||||||
base_url: None,
|
base_url: RefCell::new(None),
|
||||||
document_url: document.url(),
|
document_url: document.url(),
|
||||||
referrer: document.global().get_referrer(),
|
referrer: document.global().get_referrer(),
|
||||||
referrer_policy: document.get_referrer_policy(),
|
referrer_policy: document.get_referrer_policy(),
|
||||||
|
@ -49,14 +51,14 @@ impl Tokenizer {
|
||||||
// Initially we set prefetching to false, and only set it
|
// Initially we set prefetching to false, and only set it
|
||||||
// true after the first script tag, since that is what will
|
// true after the first script tag, since that is what will
|
||||||
// block the main parser.
|
// block the main parser.
|
||||||
prefetching: false,
|
prefetching: Cell::new(false),
|
||||||
};
|
};
|
||||||
let options = Default::default();
|
let options = Default::default();
|
||||||
let inner = HtmlTokenizer::new(sink, options);
|
let inner = HtmlTokenizer::new(sink, options);
|
||||||
Tokenizer { inner }
|
Tokenizer { inner }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn feed(&mut self, input: &mut BufferQueue) {
|
pub fn feed(&self, input: &BufferQueue) {
|
||||||
while let TokenizerResult::Script(PrefetchHandle) = self.inner.feed(input) {}
|
while let TokenizerResult::Script(PrefetchHandle) = self.inner.feed(input) {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,14 +72,14 @@ struct PrefetchSink {
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
document_url: ServoUrl,
|
document_url: ServoUrl,
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
base_url: Option<ServoUrl>,
|
base_url: RefCell<Option<ServoUrl>>,
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
referrer: Referrer,
|
referrer: Referrer,
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
referrer_policy: Option<ReferrerPolicy>,
|
referrer_policy: Option<ReferrerPolicy>,
|
||||||
#[no_trace]
|
#[no_trace]
|
||||||
resource_threads: ResourceThreads,
|
resource_threads: ResourceThreads,
|
||||||
prefetching: bool,
|
prefetching: Cell<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The prefetch tokenizer produces trivial results
|
/// The prefetch tokenizer produces trivial results
|
||||||
|
@ -85,17 +87,13 @@ struct PrefetchHandle;
|
||||||
|
|
||||||
impl TokenSink for PrefetchSink {
|
impl TokenSink for PrefetchSink {
|
||||||
type Handle = PrefetchHandle;
|
type Handle = PrefetchHandle;
|
||||||
fn process_token(
|
fn process_token(&self, token: Token, _line_number: u64) -> TokenSinkResult<PrefetchHandle> {
|
||||||
&mut self,
|
|
||||||
token: Token,
|
|
||||||
_line_number: u64,
|
|
||||||
) -> TokenSinkResult<PrefetchHandle> {
|
|
||||||
let tag = match token {
|
let tag = match token {
|
||||||
Token::TagToken(ref tag) => tag,
|
Token::TagToken(ref tag) => tag,
|
||||||
_ => return TokenSinkResult::Continue,
|
_ => return TokenSinkResult::Continue,
|
||||||
};
|
};
|
||||||
match (tag.kind, &tag.name) {
|
match (tag.kind, &tag.name) {
|
||||||
(TagKind::StartTag, &local_name!("script")) if self.prefetching => {
|
(TagKind::StartTag, &local_name!("script")) if self.prefetching.get() => {
|
||||||
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
||||||
debug!("Prefetch script {}", url);
|
debug!("Prefetch script {}", url);
|
||||||
let cors_setting = self.get_cors_settings(tag, local_name!("crossorigin"));
|
let cors_setting = self.get_cors_settings(tag, local_name!("crossorigin"));
|
||||||
|
@ -123,7 +121,7 @@ impl TokenSink for PrefetchSink {
|
||||||
}
|
}
|
||||||
TokenSinkResult::RawData(RawKind::ScriptData)
|
TokenSinkResult::RawData(RawKind::ScriptData)
|
||||||
},
|
},
|
||||||
(TagKind::StartTag, &local_name!("img")) if self.prefetching => {
|
(TagKind::StartTag, &local_name!("img")) if self.prefetching.get() => {
|
||||||
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
||||||
debug!("Prefetch {} {}", tag.name, url);
|
debug!("Prefetch {} {}", tag.name, url);
|
||||||
let request = image_fetch_request(
|
let request = image_fetch_request(
|
||||||
|
@ -141,7 +139,7 @@ impl TokenSink for PrefetchSink {
|
||||||
}
|
}
|
||||||
TokenSinkResult::Continue
|
TokenSinkResult::Continue
|
||||||
},
|
},
|
||||||
(TagKind::StartTag, &local_name!("link")) if self.prefetching => {
|
(TagKind::StartTag, &local_name!("link")) if self.prefetching.get() => {
|
||||||
if let Some(rel) = self.get_attr(tag, local_name!("rel")) {
|
if let Some(rel) = self.get_attr(tag, local_name!("rel")) {
|
||||||
if rel.value.eq_ignore_ascii_case("stylesheet") {
|
if rel.value.eq_ignore_ascii_case("stylesheet") {
|
||||||
if let Some(url) = self.get_url(tag, local_name!("href")) {
|
if let Some(url) = self.get_url(tag, local_name!("href")) {
|
||||||
|
@ -176,14 +174,14 @@ impl TokenSink for PrefetchSink {
|
||||||
},
|
},
|
||||||
(TagKind::EndTag, &local_name!("script")) => {
|
(TagKind::EndTag, &local_name!("script")) => {
|
||||||
// After the first script tag, the main parser is blocked, so it's worth prefetching.
|
// After the first script tag, the main parser is blocked, so it's worth prefetching.
|
||||||
self.prefetching = true;
|
self.prefetching.set(true);
|
||||||
TokenSinkResult::Script(PrefetchHandle)
|
TokenSinkResult::Script(PrefetchHandle)
|
||||||
},
|
},
|
||||||
(TagKind::StartTag, &local_name!("base")) => {
|
(TagKind::StartTag, &local_name!("base")) => {
|
||||||
if let Some(url) = self.get_url(tag, local_name!("href")) {
|
if let Some(url) = self.get_url(tag, local_name!("href")) {
|
||||||
if self.base_url.is_none() {
|
if self.base_url.borrow().is_none() {
|
||||||
debug!("Setting base {}", url);
|
debug!("Setting base {}", url);
|
||||||
self.base_url = Some(url);
|
*self.base_url.borrow_mut() = Some(url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenSinkResult::Continue
|
TokenSinkResult::Continue
|
||||||
|
@ -200,7 +198,8 @@ impl PrefetchSink {
|
||||||
|
|
||||||
fn get_url(&self, tag: &Tag, name: LocalName) -> Option<ServoUrl> {
|
fn get_url(&self, tag: &Tag, name: LocalName) -> Option<ServoUrl> {
|
||||||
let attr = self.get_attr(tag, name)?;
|
let attr = self.get_attr(tag, name)?;
|
||||||
let base = self.base_url.as_ref().unwrap_or(&self.document_url);
|
let base_url = self.base_url.borrow();
|
||||||
|
let base = base_url.as_ref().unwrap_or(&self.document_url);
|
||||||
ServoUrl::parse_with_base(Some(base), &attr.value).ok()
|
ServoUrl::parse_with_base(Some(base), &attr.value).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
|
|
||||||
#![allow(crown::unrooted_must_root)]
|
#![allow(crown::unrooted_must_root)]
|
||||||
|
|
||||||
|
use std::cell::Cell;
|
||||||
|
|
||||||
use html5ever::tokenizer::TokenizerResult;
|
use html5ever::tokenizer::TokenizerResult;
|
||||||
use js::jsapi::JSTracer;
|
use js::jsapi::JSTracer;
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
|
@ -30,7 +32,7 @@ impl Tokenizer {
|
||||||
let sink = Sink {
|
let sink = Sink {
|
||||||
base_url: url,
|
base_url: url,
|
||||||
document: Dom::from_ref(document),
|
document: Dom::from_ref(document),
|
||||||
current_line: 1,
|
current_line: Cell::new(1),
|
||||||
script: Default::default(),
|
script: Default::default(),
|
||||||
parsing_algorithm: ParsingAlgorithm::Normal,
|
parsing_algorithm: ParsingAlgorithm::Normal,
|
||||||
};
|
};
|
||||||
|
@ -41,7 +43,7 @@ impl Tokenizer {
|
||||||
Tokenizer { inner: tok }
|
Tokenizer { inner: tok }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
pub fn feed(&self, input: &BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
|
||||||
self.inner.run(input);
|
self.inner.run(input);
|
||||||
match self.inner.sink.sink.script.take() {
|
match self.inner.sink.sink.script.take() {
|
||||||
Some(script) => TokenizerResult::Script(script),
|
Some(script) => TokenizerResult::Script(script),
|
||||||
|
@ -49,7 +51,7 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn end(&mut self) {
|
pub fn end(&self) {
|
||||||
self.inner.end()
|
self.inner.end()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -545,9 +545,6 @@ impl Window {
|
||||||
|
|
||||||
// see note at https://dom.spec.whatwg.org/#concept-event-dispatch step 2
|
// see note at https://dom.spec.whatwg.org/#concept-event-dispatch step 2
|
||||||
pub fn dispatch_event_with_target_override(&self, event: &Event) -> EventStatus {
|
pub fn dispatch_event_with_target_override(&self, event: &Event) -> EventStatus {
|
||||||
if self.has_document() {
|
|
||||||
assert!(self.Document().can_invoke_script());
|
|
||||||
}
|
|
||||||
event.dispatch(self.upcast(), true)
|
event.dispatch(self.upcast(), true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
33
tests/wpt/mozilla/meta/MANIFEST.json
vendored
33
tests/wpt/mozilla/meta/MANIFEST.json
vendored
|
@ -13776,6 +13776,39 @@
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
|
"parser-reentrancy-customelement.window.js": [
|
||||||
|
"0df997aa6c521ef31c32c4054568ed005e011663",
|
||||||
|
[
|
||||||
|
"mozilla/parser-reentrancy-customelement.window.html?async",
|
||||||
|
{
|
||||||
|
"script_metadata": [
|
||||||
|
[
|
||||||
|
"variant",
|
||||||
|
"?default"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"variant",
|
||||||
|
"?async"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"mozilla/parser-reentrancy-customelement.window.html?default",
|
||||||
|
{
|
||||||
|
"script_metadata": [
|
||||||
|
[
|
||||||
|
"variant",
|
||||||
|
"?default"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"variant",
|
||||||
|
"?async"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
],
|
||||||
"partial_shadow_dom.html": [
|
"partial_shadow_dom.html": [
|
||||||
"74e308f94036a6dbf5c4223cd3d229f49ffceb4e",
|
"74e308f94036a6dbf5c4223cd3d229f49ffceb4e",
|
||||||
[
|
[
|
||||||
|
|
6
tests/wpt/mozilla/meta/mozilla/parser-reentrancy-customelement.window.js.ini
vendored
Normal file
6
tests/wpt/mozilla/meta/mozilla/parser-reentrancy-customelement.window.js.ini
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
[parser-reentrancy-customelement.window.html?default]
|
||||||
|
prefs: ["dom.servoparser.async_html_tokenizer.enabled:false"]
|
||||||
|
|
||||||
|
[parser-reentrancy-customelement.window.html?async]
|
||||||
|
expected: CRASH
|
||||||
|
prefs: ["dom.servoparser.async_html_tokenizer.enabled:true"]
|
22
tests/wpt/mozilla/tests/mozilla/parser-reentrancy-customelement.window.js
vendored
Normal file
22
tests/wpt/mozilla/tests/mozilla/parser-reentrancy-customelement.window.js
vendored
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
// META: variant=?default
|
||||||
|
// META: variant=?async
|
||||||
|
|
||||||
|
let script = "\
|
||||||
|
class PopupInfo extends HTMLElement { \
|
||||||
|
connectedCallback() { \
|
||||||
|
frameElement.globalTest.step_timeout(() => frameElement.globalTest.done(), 0); \
|
||||||
|
document.open(); \
|
||||||
|
document.write('did not panic'); \
|
||||||
|
document.close(); \
|
||||||
|
} \
|
||||||
|
} \
|
||||||
|
\
|
||||||
|
customElements.define('popup-info', PopupInfo); \
|
||||||
|
";
|
||||||
|
|
||||||
|
async_test(function(t) {
|
||||||
|
let iframe = document.createElement('iframe');
|
||||||
|
iframe.globalTest = t;
|
||||||
|
iframe.srcdoc = "<script>" + script + "<" + "/script><popup-info></popup-info>";
|
||||||
|
document.body.appendChild(iframe);
|
||||||
|
}, "Dynamic markup insertion during custom element callbacks does not panic");
|
Loading…
Add table
Add a link
Reference in a new issue