Refactor ServoParser Tokenizer to return TokenizerResult

This commit is contained in:
Nipun Garg 2020-01-27 23:46:37 +05:30 committed by Martin Robinson
parent 7aaad0aa7e
commit 41fe94244a
No known key found for this signature in database
GPG key ID: D56AA4FA55EFE6F8
4 changed files with 22 additions and 14 deletions

View file

@ -272,7 +272,8 @@ impl Tokenizer {
tokenizer tokenizer
} }
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> { #[must_use]
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
let mut send_tendrils = VecDeque::new(); let mut send_tendrils = VecDeque::new();
while let Some(str) = input.pop_front() { while let Some(str) = input.pop_front() {
send_tendrils.push_back(SendTendril::from(str)); send_tendrils.push_back(SendTendril::from(str));
@ -296,7 +297,7 @@ impl Tokenizer {
ToTokenizerMsg::TokenizerResultDone { updated_input } => { ToTokenizerMsg::TokenizerResultDone { updated_input } => {
let buffer_queue = create_buffer_queue(updated_input); let buffer_queue = create_buffer_queue(updated_input);
*input = buffer_queue; *input = buffer_queue;
return Ok(()); return TokenizerResult::Done;
}, },
ToTokenizerMsg::TokenizerResultScript { ToTokenizerMsg::TokenizerResultScript {
script, script,
@ -305,7 +306,7 @@ impl Tokenizer {
let buffer_queue = create_buffer_queue(updated_input); let buffer_queue = create_buffer_queue(updated_input);
*input = buffer_queue; *input = buffer_queue;
let script = self.get_node(&script.id); let script = self.get_node(&script.id);
return Err(DomRoot::from_ref(script.downcast().unwrap())); return TokenizerResult::Script(DomRoot::from_ref(script.downcast().unwrap()));
}, },
ToTokenizerMsg::End => unreachable!(), ToTokenizerMsg::End => unreachable!(),
}; };

View file

@ -77,10 +77,13 @@ impl Tokenizer {
Tokenizer { inner: inner } Tokenizer { inner: inner }
} }
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> { #[must_use]
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
match self.inner.feed(input) { match self.inner.feed(input) {
TokenizerResult::Done => Ok(()), TokenizerResult::Done => TokenizerResult::Done,
TokenizerResult::Script(script) => Err(DomRoot::from_ref(script.downcast().unwrap())), TokenizerResult::Script(script) => {
TokenizerResult::Script(DomRoot::from_ref(script.downcast().unwrap()))
},
} }
} }

View file

@ -43,6 +43,7 @@ use encoding_rs::Encoding;
use html5ever::buffer_queue::BufferQueue; use html5ever::buffer_queue::BufferQueue;
use html5ever::tendril::fmt::UTF8; use html5ever::tendril::fmt::UTF8;
use html5ever::tendril::{ByteTendril, StrTendril, TendrilSink}; use html5ever::tendril::{ByteTendril, StrTendril, TendrilSink};
use html5ever::tokenizer::TokenizerResult;
use html5ever::tree_builder::{ElementFlags, NextParserState, NodeOrText, QuirksMode, TreeSink}; use html5ever::tree_builder::{ElementFlags, NextParserState, NodeOrText, QuirksMode, TreeSink};
use html5ever::{Attribute, ExpandedName, LocalName, QualName}; use html5ever::{Attribute, ExpandedName, LocalName, QualName};
use hyper_serde::Serde; use hyper_serde::Serde;
@ -589,7 +590,7 @@ impl ServoParser {
fn tokenize<F>(&self, mut feed: F) fn tokenize<F>(&self, mut feed: F)
where where
F: FnMut(&mut Tokenizer) -> Result<(), DomRoot<HTMLScriptElement>>, F: FnMut(&mut Tokenizer) -> TokenizerResult<DomRoot<HTMLScriptElement>>,
{ {
loop { loop {
assert!(!self.suspended.get()); assert!(!self.suspended.get());
@ -597,8 +598,8 @@ impl ServoParser {
self.document.reflow_if_reflow_timer_expired(); self.document.reflow_if_reflow_timer_expired();
let script = match feed(&mut *self.tokenizer.borrow_mut()) { let script = match feed(&mut *self.tokenizer.borrow_mut()) {
Ok(()) => return, TokenizerResult::Done => return,
Err(script) => script, TokenizerResult::Script(script) => script,
}; };
// https://html.spec.whatwg.org/multipage/#parsing-main-incdata // https://html.spec.whatwg.org/multipage/#parsing-main-incdata
@ -691,7 +692,8 @@ enum Tokenizer {
} }
impl Tokenizer { impl Tokenizer {
fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> { #[must_use]
fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
match *self { match *self {
Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input), Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input),
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input), Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input),

View file

@ -10,6 +10,7 @@ use crate::dom::document::Document;
use crate::dom::htmlscriptelement::HTMLScriptElement; use crate::dom::htmlscriptelement::HTMLScriptElement;
use crate::dom::node::Node; use crate::dom::node::Node;
use crate::dom::servoparser::{ParsingAlgorithm, Sink}; use crate::dom::servoparser::{ParsingAlgorithm, Sink};
use html5ever::tokenizer::TokenizerResult;
use js::jsapi::JSTracer; use js::jsapi::JSTracer;
use servo_url::ServoUrl; use servo_url::ServoUrl;
use xml5ever::buffer_queue::BufferQueue; use xml5ever::buffer_queue::BufferQueue;
@ -39,12 +40,13 @@ impl Tokenizer {
Tokenizer { inner: tok } Tokenizer { inner: tok }
} }
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> { #[must_use]
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
self.inner.run(input); self.inner.run(input);
if let Some(script) = self.inner.sink.sink.script.take() { match self.inner.sink.sink.script.take() {
return Err(script); Some(script) => TokenizerResult::Script(script),
None => TokenizerResult::Done,
} }
Ok(())
} }
pub fn end(&mut self) { pub fn end(&mut self) {