diff --git a/Cargo.lock b/Cargo.lock index 07d9b2fcaca..fd8cc6914b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1300,7 +1300,7 @@ dependencies = [ [[package]] name = "derive_common" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "darling", "proc-macro2", @@ -1468,7 +1468,7 @@ dependencies = [ [[package]] name = "dom" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "bitflags 2.6.0", ] @@ -2852,9 +2852,9 @@ dependencies = [ [[package]] name = "html5ever" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4" +checksum = "0ff6858c1f7e2a470c5403091866fa95b36fe0dbac5d771f932c15e5ff1ee501" dependencies = [ "log", "mac", @@ -4073,7 +4073,7 @@ dependencies = [ [[package]] name = "malloc_size_of" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "accountable-refcell", "app_units", @@ -4123,9 +4123,9 @@ dependencies = [ [[package]] name = "markup5ever" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45" +checksum = "d581ff8be69d08a2efa23a959d81aa22b739073f749f067348bd4f4ba4b69195" dependencies = [ "log", "phf 0.11.2", @@ -5756,7 +5756,7 @@ checksum = "0495e4577c672de8254beb68d01a9b62d0e8a13c099edecdbedccce3223cd29f" [[package]] name = "selectors" version = "0.24.0" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "bitflags 2.6.0", "cssparser", @@ -6070,7 +6070,7 @@ dependencies = [ [[package]] name = "servo_arc" version = "0.2.0" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "serde", "stable_deref_trait", @@ -6079,7 +6079,7 @@ dependencies = [ [[package]] name = "servo_atoms" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "string_cache", "string_cache_codegen", @@ -6277,7 +6277,7 @@ checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "size_of_test" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "static_assertions", ] @@ -6418,7 +6418,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "static_prefs" version = "0.1.0" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" [[package]] name = "strck" @@ -6471,7 +6471,7 @@ dependencies = [ [[package]] name = "style" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "app_units", "arrayvec", @@ -6530,7 +6530,7 @@ dependencies = [ [[package]] name = "style_config" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "lazy_static", ] @@ -6538,7 +6538,7 @@ dependencies = [ [[package]] name = "style_derive" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "darling", "derive_common", @@ -6569,7 +6569,7 @@ dependencies = [ [[package]] name = "style_traits" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "app_units", "bitflags 2.6.0", @@ -6918,7 +6918,7 @@ dependencies = [ [[package]] name = "to_shmem" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "cssparser", "servo_arc", @@ -6931,7 +6931,7 @@ dependencies = [ [[package]] name = "to_shmem_derive" version = "0.0.1" -source = "git+https://github.com/servo/stylo?branch=2024-07-16#33c26645a906afbe35821ba567c7c92d96a3ba14" +source = "git+https://github.com/servo/stylo?branch=2024-07-16#947990669824c192736f63f982e38b7e62150688" dependencies = [ "darling", "derive_common", @@ -8431,9 +8431,9 @@ checksum = "539a77ee7c0de333dcc6da69b177380a0b81e0dacfa4f7344c465a36871ee601" [[package]] name = "xml5ever" -version = "0.18.1" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bbb26405d8e919bc1547a5aa9abc95cbfa438f04844f5fdd9dc7596b748bf69" +checksum = "d7b906d34d867d216b2d79fb0e9470aaa7f4948ea86b44c27846efedd596076c" dependencies = [ "log", "mac", diff --git a/Cargo.toml b/Cargo.toml index 471fb040f96..41262ea6189 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,7 +58,7 @@ gstreamer-gl-sys = "0.22" gstreamer-sys = "0.22" gstreamer-video = "0.22" headers = "0.3" -html5ever = "0.27" +html5ever = "0.28" http = "0.2" hyper = "0.14" hyper-rustls = { version = "0.24", default-features = false, features = ["acceptor", "http1", "http2", "logging", "tls12", "webpki-tokio"] } @@ -142,7 +142,7 @@ wgpu-core = { git = "https://github.com/gfx-rs/wgpu", rev = "781b54a8b9cee1a2cb2 wgpu-types = { git = "https://github.com/gfx-rs/wgpu", rev = "781b54a8b9cee1a2cb22bda565662edec52eb70e" } windows-sys = "0.59" xi-unicode = "0.1.0" -xml5ever = "0.18" +xml5ever = "0.19" [profile.release] opt-level = 3 diff --git a/components/script/dom/document.rs b/components/script/dom/document.rs index 048628e805f..9151a2be9a7 100644 --- a/components/script/dom/document.rs +++ b/components/script/dom/document.rs @@ -2670,18 +2670,6 @@ impl Document { self.current_parser.get() } - pub fn can_invoke_script(&self) -> bool { - match self.get_current_parser() { - Some(parser) => { - // It is safe to run script if the parser is not actively parsing, - // or if it is impossible to interact with the token stream. - parser.parser_is_not_active() || - self.throw_on_dynamic_markup_insertion_counter.get() > 0 - }, - None => true, - } - } - /// Iterate over all iframes in the document. pub fn iter_iframes(&self) -> impl Iterator> { self.upcast::() diff --git a/components/script/dom/eventtarget.rs b/components/script/dom/eventtarget.rs index aa85c5ac090..26756cefd89 100644 --- a/components/script/dom/eventtarget.rs +++ b/components/script/dom/eventtarget.rs @@ -395,11 +395,6 @@ impl EventTarget { } pub fn dispatch_event(&self, event: &Event) -> EventStatus { - if let Some(window) = self.global().downcast::() { - if window.has_document() { - assert!(window.Document().can_invoke_script()); - } - }; event.dispatch(self, false) } diff --git a/components/script/dom/servoparser/async_html.rs b/components/script/dom/servoparser/async_html.rs index 7200b9ed5c6..ed405c9ca61 100644 --- a/components/script/dom/servoparser/async_html.rs +++ b/components/script/dom/servoparser/async_html.rs @@ -5,7 +5,7 @@ #![allow(crown::unrooted_must_root)] use std::borrow::Cow; -use std::cell::Cell; +use std::cell::{Cell, Ref, RefCell, RefMut}; use std::collections::vec_deque::VecDeque; use std::collections::HashMap; use std::thread; @@ -171,7 +171,7 @@ enum ToHtmlTokenizerMsg { } fn create_buffer_queue(mut buffers: VecDeque>) -> BufferQueue { - let mut buffer_queue = BufferQueue::default(); + let buffer_queue = BufferQueue::default(); while let Some(st) = buffers.pop_front() { buffer_queue.push_back(StrTendril::from(st)); } @@ -214,7 +214,7 @@ pub struct Tokenizer { #[no_trace] html_tokenizer_sender: Sender, #[ignore_malloc_size_of = "Defined in std"] - nodes: HashMap>, + nodes: RefCell>>, #[no_trace] url: ServoUrl, parsing_algorithm: ParsingAlgorithm, @@ -236,17 +236,17 @@ impl Tokenizer { None => ParsingAlgorithm::Normal, }; - let mut tokenizer = Tokenizer { + let tokenizer = Tokenizer { document: Dom::from_ref(document), receiver: tokenizer_receiver, html_tokenizer_sender: to_html_tokenizer_sender, - nodes: HashMap::new(), + nodes: RefCell::new(HashMap::new()), url, parsing_algorithm: algorithm, }; tokenizer.insert_node(0, Dom::from_ref(document.upcast())); - let mut sink = Sink::new(to_tokenizer_sender.clone()); + let sink = Sink::new(to_tokenizer_sender.clone()); let mut ctxt_parse_node = None; let mut form_parse_node = None; let mut fragment_context_is_some = false; @@ -283,7 +283,7 @@ impl Tokenizer { tokenizer } - pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult> { + pub fn feed(&self, input: &BufferQueue) -> TokenizerResult> { let mut send_tendrils = VecDeque::new(); while let Some(str) = input.pop_front() { send_tendrils.push_back(SendTendril::from(str)); @@ -306,7 +306,7 @@ impl Tokenizer { ToTokenizerMsg::ProcessOperation(parse_op) => self.process_operation(parse_op), ToTokenizerMsg::TokenizerResultDone { updated_input } => { let buffer_queue = create_buffer_queue(updated_input); - *input = buffer_queue; + input.replace_with(buffer_queue); return TokenizerResult::Done; }, ToTokenizerMsg::TokenizerResultScript { @@ -314,7 +314,7 @@ impl Tokenizer { updated_input, } => { let buffer_queue = create_buffer_queue(updated_input); - *input = buffer_queue; + input.replace_with(buffer_queue); let script = self.get_node(&script.id); return TokenizerResult::Script(DomRoot::from_ref(script.downcast().unwrap())); }, @@ -323,7 +323,7 @@ impl Tokenizer { } } - pub fn end(&mut self) { + pub fn end(&self) { self.html_tokenizer_sender .send(ToHtmlTokenizerMsg::End) .unwrap(); @@ -344,21 +344,23 @@ impl Tokenizer { &self.url } - pub fn set_plaintext_state(&mut self) { + pub fn set_plaintext_state(&self) { self.html_tokenizer_sender .send(ToHtmlTokenizerMsg::SetPlainTextState) .unwrap(); } - fn insert_node(&mut self, id: ParseNodeId, node: Dom) { - assert!(self.nodes.insert(id, node).is_none()); + fn insert_node(&self, id: ParseNodeId, node: Dom) { + assert!(self.nodes.borrow_mut().insert(id, node).is_none()); } - fn get_node<'a>(&'a self, id: &ParseNodeId) -> &'a Dom { - self.nodes.get(id).expect("Node not found!") + fn get_node<'a>(&'a self, id: &ParseNodeId) -> Ref<'a, Dom> { + Ref::map(self.nodes.borrow(), |nodes| { + nodes.get(id).expect("Node not found!") + }) } - fn append_before_sibling(&mut self, sibling: ParseNodeId, node: NodeOrText) { + fn append_before_sibling(&self, sibling: ParseNodeId, node: NodeOrText) { let node = match node { NodeOrText::Node(n) => { HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id))) @@ -373,7 +375,7 @@ impl Tokenizer { super::insert(parent, Some(sibling), node, self.parsing_algorithm); } - fn append(&mut self, parent: ParseNodeId, node: NodeOrText) { + fn append(&self, parent: ParseNodeId, node: NodeOrText) { let node = match node { NodeOrText::Node(n) => { HtmlNodeOrText::AppendNode(Dom::from_ref(&**self.get_node(&n.id))) @@ -398,7 +400,7 @@ impl Tokenizer { x.is_in_same_home_subtree(y) } - fn process_operation(&mut self, op: ParseOperation) { + fn process_operation(&self, op: ParseOperation) { let document = DomRoot::from_ref(&**self.get_node(&0)); let document = document .downcast::() @@ -469,8 +471,8 @@ impl Tokenizer { .expect("Appending failed"); }, ParseOperation::AddAttrsIfMissing { target, attrs } => { - let elem = self - .get_node(&target) + let node = self.get_node(&target); + let elem = node .downcast::() .expect("tried to set attrs on non-Element in HTML parsing"); for attr in attrs { @@ -479,11 +481,12 @@ impl Tokenizer { }, ParseOperation::RemoveFromParent { target } => { if let Some(ref parent) = self.get_node(&target).GetParentNode() { - parent.RemoveChild(self.get_node(&target)).unwrap(); + parent.RemoveChild(&self.get_node(&target)).unwrap(); } }, ParseOperation::MarkScriptAlreadyStarted { node } => { - let script = self.get_node(&node).downcast::(); + let node = self.get_node(&node); + let script = node.downcast::(); if let Some(script) = script { script.set_already_started(true) } @@ -525,7 +528,7 @@ impl Tokenizer { } }, ParseOperation::Pop { node } => { - vtable_for(self.get_node(&node)).pop(); + vtable_for(&self.get_node(&node)).pop(); }, ParseOperation::CreatePI { node, target, data } => { let pi = ProcessingInstruction::new( @@ -555,7 +558,7 @@ fn run( ..Default::default() }; - let mut html_tokenizer = if fragment_context_is_some { + let html_tokenizer = if fragment_context_is_some { let tb = TreeBuilder::new_for_fragment(sink, ctxt_parse_node.unwrap(), form_parse_node, options); @@ -575,8 +578,8 @@ fn run( .expect("Unexpected channel panic in html parser thread") { ToHtmlTokenizerMsg::Feed { input } => { - let mut input = create_buffer_queue(input); - let res = html_tokenizer.feed(&mut input); + let input = create_buffer_queue(input); + let res = html_tokenizer.feed(&input); // Gather changes to 'input' and place them in 'updated_input', // which will be sent to the main thread to update feed method's 'input' @@ -611,8 +614,8 @@ struct ParseNodeData { } pub struct Sink { - current_line: u64, - parse_node_data: HashMap, + current_line: Cell, + parse_node_data: RefCell>, next_parse_node_id: Cell, document_node: ParseNode, sender: Sender, @@ -620,9 +623,9 @@ pub struct Sink { impl Sink { fn new(sender: Sender) -> Sink { - let mut sink = Sink { - current_line: 1, - parse_node_data: HashMap::new(), + let sink = Sink { + current_line: Cell::new(1), + parse_node_data: RefCell::new(HashMap::new()), next_parse_node_id: Cell::new(1), document_node: ParseNode { id: 0, @@ -635,7 +638,7 @@ impl Sink { sink } - fn new_parse_node(&mut self) -> ParseNode { + fn new_parse_node(&self) -> ParseNode { let id = self.next_parse_node_id.get(); let data = ParseNodeData::default(); self.insert_parse_node_data(id, data); @@ -652,20 +655,20 @@ impl Sink { .unwrap(); } - fn insert_parse_node_data(&mut self, id: ParseNodeId, data: ParseNodeData) { - assert!(self.parse_node_data.insert(id, data).is_none()); + fn insert_parse_node_data(&self, id: ParseNodeId, data: ParseNodeData) { + assert!(self.parse_node_data.borrow_mut().insert(id, data).is_none()); } - fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> &'a ParseNodeData { - self.parse_node_data - .get(id) - .expect("Parse Node data not found!") + fn get_parse_node_data<'a>(&'a self, id: &'a ParseNodeId) -> Ref<'a, ParseNodeData> { + Ref::map(self.parse_node_data.borrow(), |data| { + data.get(id).expect("Parse Node data not found!") + }) } - fn get_parse_node_data_mut<'a>(&'a mut self, id: &'a ParseNodeId) -> &'a mut ParseNodeData { - self.parse_node_data - .get_mut(id) - .expect("Parse Node data not found!") + fn get_parse_node_data_mut<'a>(&'a self, id: &'a ParseNodeId) -> RefMut<'a, ParseNodeData> { + RefMut::map(self.parse_node_data.borrow_mut(), |data| { + data.get_mut(id).expect("Parse Node data not found!") + }) } } @@ -678,17 +681,17 @@ impl TreeSink for Sink { type Handle = ParseNode; - fn get_document(&mut self) -> Self::Handle { + fn get_document(&self) -> Self::Handle { self.document_node.clone() } - fn get_template_contents(&mut self, target: &Self::Handle) -> Self::Handle { + fn get_template_contents(&self, target: &Self::Handle) -> Self::Handle { if let Some(ref contents) = self.get_parse_node_data(&target.id).contents { return contents.clone(); } let node = self.new_parse_node(); { - let data = self.get_parse_node_data_mut(&target.id); + let mut data = self.get_parse_node_data_mut(&target.id); data.contents = Some(node.clone()); } self.send_op(ParseOperation::GetTemplateContents { @@ -711,7 +714,7 @@ impl TreeSink for Sink { } fn create_element( - &mut self, + &self, name: QualName, html_attrs: Vec, _flags: ElementFlags, @@ -719,7 +722,7 @@ impl TreeSink for Sink { let mut node = self.new_parse_node(); node.qual_name = Some(name.clone()); { - let node_data = self.get_parse_node_data_mut(&node.id); + let mut node_data = self.get_parse_node_data_mut(&node.id); node_data.is_integration_point = html_attrs.iter().any(|attr| { let attr_value = &String::from(attr.value.clone()); (attr.name.local == local_name!("encoding") && attr.name.ns == ns!()) && @@ -739,12 +742,12 @@ impl TreeSink for Sink { node: node.id, name, attrs, - current_line: self.current_line, + current_line: self.current_line.get(), }); node } - fn create_comment(&mut self, text: StrTendril) -> Self::Handle { + fn create_comment(&self, text: StrTendril) -> Self::Handle { let node = self.new_parse_node(); self.send_op(ParseOperation::CreateComment { text: String::from(text), @@ -753,7 +756,7 @@ impl TreeSink for Sink { node } - fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> ParseNode { + fn create_pi(&self, target: StrTendril, data: StrTendril) -> ParseNode { let node = self.new_parse_node(); self.send_op(ParseOperation::CreatePI { node: node.id, @@ -764,7 +767,7 @@ impl TreeSink for Sink { } fn associate_with_form( - &mut self, + &self, target: &Self::Handle, form: &Self::Handle, nodes: (&Self::Handle, Option<&Self::Handle>), @@ -779,7 +782,7 @@ impl TreeSink for Sink { } fn append_before_sibling( - &mut self, + &self, sibling: &Self::Handle, new_node: HtmlNodeOrText, ) { @@ -794,7 +797,7 @@ impl TreeSink for Sink { } fn append_based_on_parent_node( - &mut self, + &self, elem: &Self::Handle, prev_elem: &Self::Handle, child: HtmlNodeOrText, @@ -810,11 +813,11 @@ impl TreeSink for Sink { }); } - fn parse_error(&mut self, msg: Cow<'static, str>) { + fn parse_error(&self, msg: Cow<'static, str>) { debug!("Parse error: {}", msg); } - fn set_quirks_mode(&mut self, mode: QuirksMode) { + fn set_quirks_mode(&self, mode: QuirksMode) { let mode = match mode { QuirksMode::Quirks => ServoQuirksMode::Quirks, QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks, @@ -823,7 +826,7 @@ impl TreeSink for Sink { self.send_op(ParseOperation::SetQuirksMode { mode }); } - fn append(&mut self, parent: &Self::Handle, child: HtmlNodeOrText) { + fn append(&self, parent: &Self::Handle, child: HtmlNodeOrText) { let child = match child { HtmlNodeOrText::AppendNode(node) => NodeOrText::Node(node), HtmlNodeOrText::AppendText(text) => NodeOrText::Text(String::from(text)), @@ -835,7 +838,7 @@ impl TreeSink for Sink { } fn append_doctype_to_document( - &mut self, + &self, name: StrTendril, public_id: StrTendril, system_id: StrTendril, @@ -847,7 +850,7 @@ impl TreeSink for Sink { }); } - fn add_attrs_if_missing(&mut self, target: &Self::Handle, html_attrs: Vec) { + fn add_attrs_if_missing(&self, target: &Self::Handle, html_attrs: Vec) { let attrs = html_attrs .into_iter() .map(|attr| Attribute { @@ -861,19 +864,19 @@ impl TreeSink for Sink { }); } - fn remove_from_parent(&mut self, target: &Self::Handle) { + fn remove_from_parent(&self, target: &Self::Handle) { self.send_op(ParseOperation::RemoveFromParent { target: target.id }); } - fn mark_script_already_started(&mut self, node: &Self::Handle) { + fn mark_script_already_started(&self, node: &Self::Handle) { self.send_op(ParseOperation::MarkScriptAlreadyStarted { node: node.id }); } - fn complete_script(&mut self, _: &Self::Handle) -> NextParserState { + fn complete_script(&self, _: &Self::Handle) -> NextParserState { panic!("complete_script should not be called here!"); } - fn reparent_children(&mut self, parent: &Self::Handle, new_parent: &Self::Handle) { + fn reparent_children(&self, parent: &Self::Handle, new_parent: &Self::Handle) { self.send_op(ParseOperation::ReparentChildren { parent: parent.id, new_parent: new_parent.id, @@ -887,11 +890,11 @@ impl TreeSink for Sink { node_data.is_integration_point } - fn set_current_line(&mut self, line_number: u64) { - self.current_line = line_number; + fn set_current_line(&self, line_number: u64) { + self.current_line.set(line_number); } - fn pop(&mut self, node: &Self::Handle) { + fn pop(&self, node: &Self::Handle) { self.send_op(ParseOperation::Pop { node: node.id }); } } diff --git a/components/script/dom/servoparser/html.rs b/components/script/dom/servoparser/html.rs index 3066abcbe61..b3184923609 100644 --- a/components/script/dom/servoparser/html.rs +++ b/components/script/dom/servoparser/html.rs @@ -4,6 +4,7 @@ #![allow(crown::unrooted_must_root)] +use std::cell::Cell; use std::io; use html5ever::buffer_queue::BufferQueue; @@ -47,7 +48,7 @@ impl Tokenizer { let sink = Sink { base_url: url, document: Dom::from_ref(document), - current_line: 1, + current_line: Cell::new(1), script: Default::default(), parsing_algorithm, }; @@ -78,7 +79,7 @@ impl Tokenizer { Tokenizer { inner } } - pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult> { + pub fn feed(&self, input: &BufferQueue) -> TokenizerResult> { match self.inner.feed(input) { TokenizerResult::Done => TokenizerResult::Done, TokenizerResult::Script(script) => { @@ -87,7 +88,7 @@ impl Tokenizer { } } - pub fn end(&mut self) { + pub fn end(&self) { self.inner.end(); } @@ -95,7 +96,7 @@ impl Tokenizer { &self.inner.sink.sink.base_url } - pub fn set_plaintext_state(&mut self) { + pub fn set_plaintext_state(&self) { self.inner.set_plaintext_state(); } } diff --git a/components/script/dom/servoparser/mod.rs b/components/script/dom/servoparser/mod.rs index 5323012e979..28806a5df0c 100644 --- a/components/script/dom/servoparser/mod.rs +++ b/components/script/dom/servoparser/mod.rs @@ -4,7 +4,6 @@ use std::borrow::Cow; use std::cell::Cell; -use std::mem; use base::id::PipelineId; use base64::engine::general_purpose; @@ -102,13 +101,13 @@ pub struct ServoParser { /// Input received from network. #[ignore_malloc_size_of = "Defined in html5ever"] #[no_trace] - network_input: DomRefCell, + network_input: BufferQueue, /// Input received from script. Used only to support document.write(). #[ignore_malloc_size_of = "Defined in html5ever"] #[no_trace] - script_input: DomRefCell, + script_input: BufferQueue, /// The tokenizer of this parser. - tokenizer: DomRefCell, + tokenizer: Tokenizer, /// Whether to expect any further input from the associated network request. last_chunk_received: Cell, /// Whether this parser should avoid passing any further data to the tokenizer. @@ -122,10 +121,10 @@ pub struct ServoParser { /// We do a quick-and-dirty parse of the input looking for resources to prefetch. // TODO: if we had speculative parsing, we could do this when speculatively // building the DOM. https://github.com/servo/servo/pull/19203 - prefetch_tokenizer: DomRefCell, + prefetch_tokenizer: prefetch::Tokenizer, #[ignore_malloc_size_of = "Defined in html5ever"] #[no_trace] - prefetch_input: DomRefCell, + prefetch_input: BufferQueue, } pub struct ElementAttribute { @@ -147,7 +146,7 @@ impl ElementAttribute { impl ServoParser { pub fn parser_is_not_active(&self) -> bool { - self.can_write() || self.tokenizer.try_borrow_mut().is_ok() + self.can_write() } pub fn parse_html_document(document: &Document, input: Option, url: ServoUrl) { @@ -302,12 +301,9 @@ impl ServoParser { assert!(self.suspended.get()); self.suspended.set(false); - mem::swap( - &mut *self.script_input.borrow_mut(), - &mut *self.network_input.borrow_mut(), - ); - while let Some(chunk) = self.script_input.borrow_mut().pop_front() { - self.network_input.borrow_mut().push_back(chunk); + self.script_input.swap_with(&self.network_input); + while let Some(chunk) = self.script_input.pop_front() { + self.network_input.push_back(chunk); } let script_nesting_level = self.script_nesting_level.get(); @@ -335,9 +331,7 @@ impl ServoParser { // parser is suspended, we just append everything to the // script input and abort these steps. for chunk in text { - self.script_input - .borrow_mut() - .push_back(String::from(chunk).into()); + self.script_input.push_back(String::from(chunk).into()); } return; } @@ -345,21 +339,21 @@ impl ServoParser { // There is no pending parsing blocking script, so all previous calls // to document.write() should have seen their entire input tokenized // and process, with nothing pushed to the parser script input. - assert!(self.script_input.borrow().is_empty()); + assert!(self.script_input.is_empty()); - let mut input = BufferQueue::default(); + let input = BufferQueue::default(); for chunk in text { input.push_back(String::from(chunk).into()); } - self.tokenize(|tokenizer| tokenizer.feed(&mut input)); + self.tokenize(|tokenizer| tokenizer.feed(&input)); if self.suspended.get() { // Parser got suspended, insert remaining input at end of // script input, following anything written by scripts executed // reentrantly during this call. while let Some(chunk) = input.pop_front() { - self.script_input.borrow_mut().push_back(chunk); + self.script_input.push_back(chunk); } return; } @@ -389,15 +383,15 @@ impl ServoParser { self.aborted.set(true); // Step 1. - *self.script_input.borrow_mut() = BufferQueue::default(); - *self.network_input.borrow_mut() = BufferQueue::default(); + self.script_input.replace_with(BufferQueue::default()); + self.network_input.replace_with(BufferQueue::default()); // Step 2. self.document .set_ready_state(DocumentReadyState::Interactive); // Step 3. - self.tokenizer.borrow_mut().end(); + self.tokenizer.end(); self.document.set_current_parser(None); // Step 4. @@ -416,16 +410,16 @@ impl ServoParser { document: Dom::from_ref(document), bom_sniff: DomRefCell::new(Some(Vec::with_capacity(3))), network_decoder: DomRefCell::new(Some(NetworkDecoder::new(document.encoding()))), - network_input: DomRefCell::new(BufferQueue::default()), - script_input: DomRefCell::new(BufferQueue::default()), - tokenizer: DomRefCell::new(tokenizer), + network_input: BufferQueue::default(), + script_input: BufferQueue::default(), + tokenizer, last_chunk_received: Cell::new(false), suspended: Default::default(), script_nesting_level: Default::default(), aborted: Default::default(), script_created_parser: kind == ParserKind::ScriptCreated, - prefetch_tokenizer: DomRefCell::new(prefetch::Tokenizer::new(document)), - prefetch_input: DomRefCell::new(BufferQueue::default()), + prefetch_tokenizer: prefetch::Tokenizer::new(document), + prefetch_input: BufferQueue::default(), } } @@ -454,15 +448,12 @@ impl ServoParser { // to prefetch. If the user script uses `document.write()` // to overwrite the network input, this prefetching may // have been wasted, but in most cases it won't. - let mut prefetch_input = self.prefetch_input.borrow_mut(); - prefetch_input.push_back(chunk.clone()); - self.prefetch_tokenizer - .borrow_mut() - .feed(&mut prefetch_input); + self.prefetch_input.push_back(chunk.clone()); + self.prefetch_tokenizer.feed(&self.prefetch_input); } // Push the chunk into the network input stream, // which is tokenized lazily. - self.network_input.borrow_mut().push_back(chunk); + self.network_input.push_back(chunk); } fn push_bytes_input_chunk(&self, chunk: Vec) { @@ -513,7 +504,7 @@ impl ServoParser { iframe: TimerMetadataFrameType::RootWindow, incremental: TimerMetadataReflowType::FirstReflow, }; - let profiler_category = self.tokenizer.borrow().profiler_category(); + let profiler_category = self.tokenizer.profiler_category(); profile( profiler_category, Some(metadata), @@ -527,7 +518,7 @@ impl ServoParser { } fn do_parse_sync(&self) { - assert!(self.script_input.borrow().is_empty()); + assert!(self.script_input.is_empty()); // This parser will continue to parse while there is either pending input or // the parser remains unsuspended. @@ -536,17 +527,17 @@ impl ServoParser { if let Some(decoder) = self.network_decoder.borrow_mut().take() { let chunk = decoder.finish(); if !chunk.is_empty() { - self.network_input.borrow_mut().push_back(chunk); + self.network_input.push_back(chunk); } } } - self.tokenize(|tokenizer| tokenizer.feed(&mut self.network_input.borrow_mut())); + self.tokenize(|tokenizer| tokenizer.feed(&self.network_input)); if self.suspended.get() { return; } - assert!(self.network_input.borrow().is_empty()); + assert!(self.network_input.is_empty()); if self.last_chunk_received.get() { self.finish(); @@ -570,16 +561,16 @@ impl ServoParser { } } - fn tokenize(&self, mut feed: F) + fn tokenize(&self, feed: F) where - F: FnMut(&mut Tokenizer) -> TokenizerResult>, + F: Fn(&Tokenizer) -> TokenizerResult>, { loop { assert!(!self.suspended.get()); assert!(!self.aborted.get()); self.document.reflow_if_reflow_timer_expired(); - let script = match feed(&mut self.tokenizer.borrow_mut()) { + let script = match feed(&self.tokenizer) { TokenizerResult::Done => return, TokenizerResult::Script(script) => script, }; @@ -617,8 +608,8 @@ impl ServoParser { fn finish(&self) { assert!(!self.suspended.get()); assert!(self.last_chunk_received.get()); - assert!(self.script_input.borrow().is_empty()); - assert!(self.network_input.borrow().is_empty()); + assert!(self.script_input.is_empty()); + assert!(self.network_input.is_empty()); assert!(self.network_decoder.borrow().is_none()); // Step 1. @@ -626,11 +617,11 @@ impl ServoParser { .set_ready_state(DocumentReadyState::Interactive); // Step 2. - self.tokenizer.borrow_mut().end(); + self.tokenizer.end(); self.document.set_current_parser(None); // Steps 3-12 are in another castle, namely finish_load. - let url = self.tokenizer.borrow().url().clone(); + let url = self.tokenizer.url().clone(); self.document.finish_load(LoadType::PageSource(url)); } } @@ -674,19 +665,19 @@ enum Tokenizer { } impl Tokenizer { - fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult> { + fn feed(&self, input: &BufferQueue) -> TokenizerResult> { match *self { - Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input), - Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input), - Tokenizer::Xml(ref mut tokenizer) => tokenizer.feed(input), + Tokenizer::Html(ref tokenizer) => tokenizer.feed(input), + Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.feed(input), + Tokenizer::Xml(ref tokenizer) => tokenizer.feed(input), } } - fn end(&mut self) { + fn end(&self) { match *self { - Tokenizer::Html(ref mut tokenizer) => tokenizer.end(), - Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.end(), - Tokenizer::Xml(ref mut tokenizer) => tokenizer.end(), + Tokenizer::Html(ref tokenizer) => tokenizer.end(), + Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.end(), + Tokenizer::Xml(ref tokenizer) => tokenizer.end(), } } @@ -698,10 +689,10 @@ impl Tokenizer { } } - fn set_plaintext_state(&mut self) { + fn set_plaintext_state(&self) { match *self { - Tokenizer::Html(ref mut tokenizer) => tokenizer.set_plaintext_state(), - Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.set_plaintext_state(), + Tokenizer::Html(ref tokenizer) => tokenizer.set_plaintext_state(), + Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.set_plaintext_state(), Tokenizer::Xml(_) => unimplemented!(), } } @@ -852,7 +843,7 @@ impl FetchResponseListener for ParserContext { let page = "
\n".into();
                 parser.push_string_input_chunk(page);
                 parser.parse_sync();
-                parser.tokenizer.borrow_mut().set_plaintext_state();
+                parser.tokenizer.set_plaintext_state();
             },
             (mime::TEXT, mime::HTML, _) => match error {
                 Some(NetworkError::SslValidation(reason, bytes)) => {
@@ -1040,7 +1031,7 @@ pub struct Sink {
     #[no_trace]
     base_url: ServoUrl,
     document: Dom,
-    current_line: u64,
+    current_line: Cell,
     script: MutNullableDom,
     parsing_algorithm: ParsingAlgorithm,
 }
@@ -1068,12 +1059,12 @@ impl TreeSink for Sink {
     type Handle = Dom;
 
     #[allow(crown::unrooted_must_root)]
-    fn get_document(&mut self) -> Dom {
+    fn get_document(&self) -> Dom {
         Dom::from_ref(self.document.upcast())
     }
 
     #[allow(crown::unrooted_must_root)]
-    fn get_template_contents(&mut self, target: &Dom) -> Dom {
+    fn get_template_contents(&self, target: &Dom) -> Dom {
         let template = target
             .downcast::()
             .expect("tried to get template contents of non-HTMLTemplateElement in HTML parsing");
@@ -1096,7 +1087,7 @@ impl TreeSink for Sink {
 
     #[allow(crown::unrooted_must_root)]
     fn create_element(
-        &mut self,
+        &self,
         name: QualName,
         attrs: Vec,
         _flags: ElementFlags,
@@ -1109,20 +1100,20 @@ impl TreeSink for Sink {
             name,
             attrs,
             &self.document,
-            ElementCreator::ParserCreated(self.current_line),
+            ElementCreator::ParserCreated(self.current_line.get()),
             self.parsing_algorithm,
         );
         Dom::from_ref(element.upcast())
     }
 
     #[allow(crown::unrooted_must_root)]
-    fn create_comment(&mut self, text: StrTendril) -> Dom {
+    fn create_comment(&self, text: StrTendril) -> Dom {
         let comment = Comment::new(DOMString::from(String::from(text)), &self.document, None);
         Dom::from_ref(comment.upcast())
     }
 
     #[allow(crown::unrooted_must_root)]
-    fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Dom {
+    fn create_pi(&self, target: StrTendril, data: StrTendril) -> Dom {
         let doc = &*self.document;
         let pi = ProcessingInstruction::new(
             DOMString::from(String::from(target)),
@@ -1133,7 +1124,7 @@ impl TreeSink for Sink {
     }
 
     fn associate_with_form(
-        &mut self,
+        &self,
         target: &Dom,
         form: &Dom,
         nodes: (&Dom, Option<&Dom>),
@@ -1163,7 +1154,7 @@ impl TreeSink for Sink {
     }
 
     #[allow(crown::unrooted_must_root)]
-    fn append_before_sibling(&mut self, sibling: &Dom, new_node: NodeOrText>) {
+    fn append_before_sibling(&self, sibling: &Dom, new_node: NodeOrText>) {
         let parent = sibling
             .GetParentNode()
             .expect("append_before_sibling called on node without parent");
@@ -1171,11 +1162,11 @@ impl TreeSink for Sink {
         insert(&parent, Some(sibling), new_node, self.parsing_algorithm);
     }
 
-    fn parse_error(&mut self, msg: Cow<'static, str>) {
+    fn parse_error(&self, msg: Cow<'static, str>) {
         debug!("Parse error: {}", msg);
     }
 
-    fn set_quirks_mode(&mut self, mode: QuirksMode) {
+    fn set_quirks_mode(&self, mode: QuirksMode) {
         let mode = match mode {
             QuirksMode::Quirks => ServoQuirksMode::Quirks,
             QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
@@ -1185,13 +1176,13 @@ impl TreeSink for Sink {
     }
 
     #[allow(crown::unrooted_must_root)]
-    fn append(&mut self, parent: &Dom, child: NodeOrText>) {
+    fn append(&self, parent: &Dom, child: NodeOrText>) {
         insert(parent, None, child, self.parsing_algorithm);
     }
 
     #[allow(crown::unrooted_must_root)]
     fn append_based_on_parent_node(
-        &mut self,
+        &self,
         elem: &Dom,
         prev_elem: &Dom,
         child: NodeOrText>,
@@ -1204,7 +1195,7 @@ impl TreeSink for Sink {
     }
 
     fn append_doctype_to_document(
-        &mut self,
+        &self,
         name: StrTendril,
         public_id: StrTendril,
         system_id: StrTendril,
@@ -1221,7 +1212,7 @@ impl TreeSink for Sink {
             .expect("Appending failed");
     }
 
-    fn add_attrs_if_missing(&mut self, target: &Dom, attrs: Vec) {
+    fn add_attrs_if_missing(&self, target: &Dom, attrs: Vec) {
         let elem = target
             .downcast::()
             .expect("tried to set attrs on non-Element in HTML parsing");
@@ -1234,20 +1225,20 @@ impl TreeSink for Sink {
         }
     }
 
-    fn remove_from_parent(&mut self, target: &Dom) {
+    fn remove_from_parent(&self, target: &Dom) {
         if let Some(ref parent) = target.GetParentNode() {
             parent.RemoveChild(target).unwrap();
         }
     }
 
-    fn mark_script_already_started(&mut self, node: &Dom) {
+    fn mark_script_already_started(&self, node: &Dom) {
         let script = node.downcast::();
         if let Some(script) = script {
             script.set_already_started(true)
         }
     }
 
-    fn complete_script(&mut self, node: &Dom) -> NextParserState {
+    fn complete_script(&self, node: &Dom) -> NextParserState {
         if let Some(script) = node.downcast() {
             self.script.set(Some(script));
             NextParserState::Suspend
@@ -1256,7 +1247,7 @@ impl TreeSink for Sink {
         }
     }
 
-    fn reparent_children(&mut self, node: &Dom, new_parent: &Dom) {
+    fn reparent_children(&self, node: &Dom, new_parent: &Dom) {
         while let Some(ref child) = node.GetFirstChild() {
             new_parent.AppendChild(child).unwrap();
         }
@@ -1273,11 +1264,11 @@ impl TreeSink for Sink {
             })
     }
 
-    fn set_current_line(&mut self, line_number: u64) {
-        self.current_line = line_number;
+    fn set_current_line(&self, line_number: u64) {
+        self.current_line.set(line_number);
     }
 
-    fn pop(&mut self, node: &Dom) {
+    fn pop(&self, node: &Dom) {
         let node = DomRoot::from_ref(&**node);
         vtable_for(&node).pop();
     }
diff --git a/components/script/dom/servoparser/prefetch.rs b/components/script/dom/servoparser/prefetch.rs
index 1f57ccc0a94..a96872590a3 100644
--- a/components/script/dom/servoparser/prefetch.rs
+++ b/components/script/dom/servoparser/prefetch.rs
@@ -2,6 +2,8 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
 
+use std::cell::{Cell, RefCell};
+
 use base::id::PipelineId;
 use html5ever::buffer_queue::BufferQueue;
 use html5ever::tokenizer::states::RawKind;
@@ -41,7 +43,7 @@ impl Tokenizer {
         let sink = PrefetchSink {
             origin: document.origin().immutable().clone(),
             pipeline_id: document.global().pipeline_id(),
-            base_url: None,
+            base_url: RefCell::new(None),
             document_url: document.url(),
             referrer: document.global().get_referrer(),
             referrer_policy: document.get_referrer_policy(),
@@ -49,14 +51,14 @@ impl Tokenizer {
             // Initially we set prefetching to false, and only set it
             // true after the first script tag, since that is what will
             // block the main parser.
-            prefetching: false,
+            prefetching: Cell::new(false),
         };
         let options = Default::default();
         let inner = HtmlTokenizer::new(sink, options);
         Tokenizer { inner }
     }
 
-    pub fn feed(&mut self, input: &mut BufferQueue) {
+    pub fn feed(&self, input: &BufferQueue) {
         while let TokenizerResult::Script(PrefetchHandle) = self.inner.feed(input) {}
     }
 }
@@ -70,14 +72,14 @@ struct PrefetchSink {
     #[no_trace]
     document_url: ServoUrl,
     #[no_trace]
-    base_url: Option,
+    base_url: RefCell>,
     #[no_trace]
     referrer: Referrer,
     #[no_trace]
     referrer_policy: Option,
     #[no_trace]
     resource_threads: ResourceThreads,
-    prefetching: bool,
+    prefetching: Cell,
 }
 
 /// The prefetch tokenizer produces trivial results
@@ -85,17 +87,13 @@ struct PrefetchHandle;
 
 impl TokenSink for PrefetchSink {
     type Handle = PrefetchHandle;
-    fn process_token(
-        &mut self,
-        token: Token,
-        _line_number: u64,
-    ) -> TokenSinkResult {
+    fn process_token(&self, token: Token, _line_number: u64) -> TokenSinkResult {
         let tag = match token {
             Token::TagToken(ref tag) => tag,
             _ => return TokenSinkResult::Continue,
         };
         match (tag.kind, &tag.name) {
-            (TagKind::StartTag, &local_name!("script")) if self.prefetching => {
+            (TagKind::StartTag, &local_name!("script")) if self.prefetching.get() => {
                 if let Some(url) = self.get_url(tag, local_name!("src")) {
                     debug!("Prefetch script {}", url);
                     let cors_setting = self.get_cors_settings(tag, local_name!("crossorigin"));
@@ -123,7 +121,7 @@ impl TokenSink for PrefetchSink {
                 }
                 TokenSinkResult::RawData(RawKind::ScriptData)
             },
-            (TagKind::StartTag, &local_name!("img")) if self.prefetching => {
+            (TagKind::StartTag, &local_name!("img")) if self.prefetching.get() => {
                 if let Some(url) = self.get_url(tag, local_name!("src")) {
                     debug!("Prefetch {} {}", tag.name, url);
                     let request = image_fetch_request(
@@ -141,7 +139,7 @@ impl TokenSink for PrefetchSink {
                 }
                 TokenSinkResult::Continue
             },
-            (TagKind::StartTag, &local_name!("link")) if self.prefetching => {
+            (TagKind::StartTag, &local_name!("link")) if self.prefetching.get() => {
                 if let Some(rel) = self.get_attr(tag, local_name!("rel")) {
                     if rel.value.eq_ignore_ascii_case("stylesheet") {
                         if let Some(url) = self.get_url(tag, local_name!("href")) {
@@ -176,14 +174,14 @@ impl TokenSink for PrefetchSink {
             },
             (TagKind::EndTag, &local_name!("script")) => {
                 // After the first script tag, the main parser is blocked, so it's worth prefetching.
-                self.prefetching = true;
+                self.prefetching.set(true);
                 TokenSinkResult::Script(PrefetchHandle)
             },
             (TagKind::StartTag, &local_name!("base")) => {
                 if let Some(url) = self.get_url(tag, local_name!("href")) {
-                    if self.base_url.is_none() {
+                    if self.base_url.borrow().is_none() {
                         debug!("Setting base {}", url);
-                        self.base_url = Some(url);
+                        *self.base_url.borrow_mut() = Some(url);
                     }
                 }
                 TokenSinkResult::Continue
@@ -200,7 +198,8 @@ impl PrefetchSink {
 
     fn get_url(&self, tag: &Tag, name: LocalName) -> Option {
         let attr = self.get_attr(tag, name)?;
-        let base = self.base_url.as_ref().unwrap_or(&self.document_url);
+        let base_url = self.base_url.borrow();
+        let base = base_url.as_ref().unwrap_or(&self.document_url);
         ServoUrl::parse_with_base(Some(base), &attr.value).ok()
     }
 
diff --git a/components/script/dom/servoparser/xml.rs b/components/script/dom/servoparser/xml.rs
index 26b43910100..d0d7b153b99 100644
--- a/components/script/dom/servoparser/xml.rs
+++ b/components/script/dom/servoparser/xml.rs
@@ -4,6 +4,8 @@
 
 #![allow(crown::unrooted_must_root)]
 
+use std::cell::Cell;
+
 use html5ever::tokenizer::TokenizerResult;
 use js::jsapi::JSTracer;
 use servo_url::ServoUrl;
@@ -30,7 +32,7 @@ impl Tokenizer {
         let sink = Sink {
             base_url: url,
             document: Dom::from_ref(document),
-            current_line: 1,
+            current_line: Cell::new(1),
             script: Default::default(),
             parsing_algorithm: ParsingAlgorithm::Normal,
         };
@@ -41,7 +43,7 @@ impl Tokenizer {
         Tokenizer { inner: tok }
     }
 
-    pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult> {
+    pub fn feed(&self, input: &BufferQueue) -> TokenizerResult> {
         self.inner.run(input);
         match self.inner.sink.sink.script.take() {
             Some(script) => TokenizerResult::Script(script),
@@ -49,7 +51,7 @@ impl Tokenizer {
         }
     }
 
-    pub fn end(&mut self) {
+    pub fn end(&self) {
         self.inner.end()
     }
 
diff --git a/components/script/dom/window.rs b/components/script/dom/window.rs
index 00f03df0e21..d89c7ebecfe 100644
--- a/components/script/dom/window.rs
+++ b/components/script/dom/window.rs
@@ -545,9 +545,6 @@ impl Window {
 
     // see note at https://dom.spec.whatwg.org/#concept-event-dispatch step 2
     pub fn dispatch_event_with_target_override(&self, event: &Event) -> EventStatus {
-        if self.has_document() {
-            assert!(self.Document().can_invoke_script());
-        }
         event.dispatch(self.upcast(), true)
     }
 }
diff --git a/tests/wpt/mozilla/meta/MANIFEST.json b/tests/wpt/mozilla/meta/MANIFEST.json
index fd72059ce36..159822ee27d 100644
--- a/tests/wpt/mozilla/meta/MANIFEST.json
+++ b/tests/wpt/mozilla/meta/MANIFEST.json
@@ -13776,6 +13776,39 @@
       {}
      ]
     ],
+    "parser-reentrancy-customelement.window.js": [
+     "0df997aa6c521ef31c32c4054568ed005e011663",
+     [
+      "mozilla/parser-reentrancy-customelement.window.html?async",
+      {
+       "script_metadata": [
+        [
+         "variant",
+         "?default"
+        ],
+        [
+         "variant",
+         "?async"
+        ]
+       ]
+      }
+     ],
+     [
+      "mozilla/parser-reentrancy-customelement.window.html?default",
+      {
+       "script_metadata": [
+        [
+         "variant",
+         "?default"
+        ],
+        [
+         "variant",
+         "?async"
+        ]
+       ]
+      }
+     ]
+    ],
     "partial_shadow_dom.html": [
      "74e308f94036a6dbf5c4223cd3d229f49ffceb4e",
      [
diff --git a/tests/wpt/mozilla/meta/mozilla/parser-reentrancy-customelement.window.js.ini b/tests/wpt/mozilla/meta/mozilla/parser-reentrancy-customelement.window.js.ini
new file mode 100644
index 00000000000..68996501463
--- /dev/null
+++ b/tests/wpt/mozilla/meta/mozilla/parser-reentrancy-customelement.window.js.ini
@@ -0,0 +1,6 @@
+[parser-reentrancy-customelement.window.html?default]
+  prefs: ["dom.servoparser.async_html_tokenizer.enabled:false"]
+
+[parser-reentrancy-customelement.window.html?async]
+  expected: CRASH
+  prefs: ["dom.servoparser.async_html_tokenizer.enabled:true"]
\ No newline at end of file
diff --git a/tests/wpt/mozilla/tests/mozilla/parser-reentrancy-customelement.window.js b/tests/wpt/mozilla/tests/mozilla/parser-reentrancy-customelement.window.js
new file mode 100644
index 00000000000..0df997aa6c5
--- /dev/null
+++ b/tests/wpt/mozilla/tests/mozilla/parser-reentrancy-customelement.window.js
@@ -0,0 +1,22 @@
+// META: variant=?default
+// META: variant=?async
+
+  let script = "\
+  class PopupInfo extends HTMLElement { \
+    connectedCallback() { \
+      frameElement.globalTest.step_timeout(() => frameElement.globalTest.done(), 0); \
+      document.open(); \
+      document.write('did not panic'); \
+      document.close(); \
+    } \
+  } \
+\
+customElements.define('popup-info', PopupInfo); \
+";
+
+  async_test(function(t) {
+    let iframe = document.createElement('iframe');
+    iframe.globalTest = t;
+    iframe.srcdoc = "