mirror of
https://github.com/servo/servo.git
synced 2025-08-03 04:30:10 +01:00
Added Async HTML Tokenizer
This commit is contained in:
parent
80488c4494
commit
161ff15d54
2 changed files with 502 additions and 1 deletions
|
@ -51,6 +51,7 @@ use std::cell::Cell;
|
|||
use std::mem;
|
||||
use style::context::QuirksMode as ServoQuirksMode;
|
||||
|
||||
mod async_html;
|
||||
mod html;
|
||||
mod xml;
|
||||
|
||||
|
@ -138,6 +139,7 @@ impl ServoParser {
|
|||
// Step 11.
|
||||
let form = context_node.inclusive_ancestors()
|
||||
.find(|element| element.is::<HTMLFormElement>());
|
||||
|
||||
let fragment_context = FragmentContext {
|
||||
context_elem: context_node,
|
||||
form_elem: form.r(),
|
||||
|
@ -145,7 +147,7 @@ impl ServoParser {
|
|||
|
||||
let parser = ServoParser::new(&document,
|
||||
Tokenizer::Html(self::html::Tokenizer::new(&document,
|
||||
url.clone(),
|
||||
url,
|
||||
Some(fragment_context))),
|
||||
LastChunkState::Received,
|
||||
ParserKind::Normal);
|
||||
|
@ -485,6 +487,7 @@ enum ParserKind {
|
|||
#[must_root]
|
||||
enum Tokenizer {
|
||||
Html(self::html::Tokenizer),
|
||||
AsyncHtml(self::async_html::Tokenizer),
|
||||
Xml(self::xml::Tokenizer),
|
||||
}
|
||||
|
||||
|
@ -492,6 +495,7 @@ impl Tokenizer {
|
|||
fn feed(&mut self, input: &mut BufferQueue) -> Result<(), Root<HTMLScriptElement>> {
|
||||
match *self {
|
||||
Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input),
|
||||
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input),
|
||||
Tokenizer::Xml(ref mut tokenizer) => tokenizer.feed(input),
|
||||
}
|
||||
}
|
||||
|
@ -499,6 +503,7 @@ impl Tokenizer {
|
|||
fn end(&mut self) {
|
||||
match *self {
|
||||
Tokenizer::Html(ref mut tokenizer) => tokenizer.end(),
|
||||
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.end(),
|
||||
Tokenizer::Xml(ref mut tokenizer) => tokenizer.end(),
|
||||
}
|
||||
}
|
||||
|
@ -506,6 +511,7 @@ impl Tokenizer {
|
|||
fn url(&self) -> &ServoUrl {
|
||||
match *self {
|
||||
Tokenizer::Html(ref tokenizer) => tokenizer.url(),
|
||||
Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.url(),
|
||||
Tokenizer::Xml(ref tokenizer) => tokenizer.url(),
|
||||
}
|
||||
}
|
||||
|
@ -513,6 +519,7 @@ impl Tokenizer {
|
|||
fn set_plaintext_state(&mut self) {
|
||||
match *self {
|
||||
Tokenizer::Html(ref mut tokenizer) => tokenizer.set_plaintext_state(),
|
||||
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.set_plaintext_state(),
|
||||
Tokenizer::Xml(_) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
@ -520,6 +527,7 @@ impl Tokenizer {
|
|||
fn profiler_category(&self) -> ProfilerCategory {
|
||||
match *self {
|
||||
Tokenizer::Html(_) => ProfilerCategory::ScriptParseHTML,
|
||||
Tokenizer::AsyncHtml(_) => ProfilerCategory::ScriptParseHTML,
|
||||
Tokenizer::Xml(_) => ProfilerCategory::ScriptParseXML,
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue