Mark the page source as loaded only after parsing is done

This commit is contained in:
Anthony Ramine 2017-01-18 14:04:19 +01:00
parent d5442b87fc
commit 0f244d6948
10 changed files with 54 additions and 43 deletions

View file

@ -93,6 +93,10 @@ impl Tokenizer {
self.inner.end();
}
pub fn url(&self) -> &ServoUrl {
&self.inner.sink().sink().base_url
}
pub fn set_plaintext_state(&mut self) {
self.inner.set_plaintext_state();
}

View file

@ -110,7 +110,8 @@ impl ServoParser {
let url = context_document.url();
// Step 1.
let loader = DocumentLoader::new(&*context_document.loader());
let loader = DocumentLoader::new_with_threads(context_document.loader().resource_threads().clone(),
Some(url.clone()));
let document = Document::new(window, None, Some(url.clone()),
context_document.origin().alias(),
IsHTMLDocument::HTMLDocument,
@ -351,14 +352,17 @@ impl ServoParser {
self.document.set_current_parser(None);
if self.pipeline.is_some() {
// Initial reflow.
self.document.disarm_reflow_timeout();
self.document.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
let window = self.document.window();
window.reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, ReflowReason::FirstLoad);
}
// Step 3.
// Steps 3-12 are in other castles, namely process_deferred_scripts and finish_load.
let url = self.tokenizer.borrow().url().clone();
self.document.process_deferred_scripts();
self.document.finish_load(LoadType::PageSource(url));
}
}
@ -401,6 +405,13 @@ impl Tokenizer {
}
}
fn url(&self) -> &ServoUrl {
match *self {
Tokenizer::Html(ref tokenizer) => tokenizer.url(),
Tokenizer::Xml(ref tokenizer) => tokenizer.url(),
}
}
fn set_plaintext_state(&mut self) {
match *self {
Tokenizer::Html(ref mut tokenizer) => tokenizer.set_plaintext_state(),
@ -558,9 +569,6 @@ impl FetchResponseListener for ParserContext {
debug!("Failed to load page URL {}, error: {:?}", self.url, err);
}
parser.document
.finish_load(LoadType::PageSource(self.url.clone()));
parser.last_chunk_received.set(true);
if !parser.suspended.get() {
parser.parse_sync();

View file

@ -71,6 +71,10 @@ impl Tokenizer {
pub fn end(&mut self) {
self.inner.end()
}
pub fn url(&self) -> &ServoUrl {
&self.inner.sink().sink().base_url
}
}
#[allow(unsafe_code)]