Auto merge of #25623 - nipunG314:25516, r=mrobinson

Refactor ServoParser Tokenizer to return TokenizerResult

As stated in  #25516, this PR refactors the feed functions in the following files to return an Option instead of a Result:

- components/script/dom/servoparser/async_html.rs
- components/script/dom/servoparser/html.rs
- components/script/dom/servoparser/xml.rs
- components/script/dom/servoparser/mod.rs

Originally, these functions were returning the Err values for situations that didn't actually result in an error. This PR fixes that.

---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: -->
- [X] `./mach build -d` does not report any errors
- [X] `./mach test-tidy` does not report any errors
- [X] These changes fix #25516

<!-- Either: -->
- [X] These changes do not require tests because it is a refactor.

<!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.-->

<!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
bors-servo 2023-06-22 14:21:16 +02:00 committed by GitHub
commit 5ac7cdc18d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 22 additions and 14 deletions

View file

@ -272,7 +272,8 @@ impl Tokenizer {
tokenizer
}
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
#[must_use]
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
let mut send_tendrils = VecDeque::new();
while let Some(str) = input.pop_front() {
send_tendrils.push_back(SendTendril::from(str));
@ -296,7 +297,7 @@ impl Tokenizer {
ToTokenizerMsg::TokenizerResultDone { updated_input } => {
let buffer_queue = create_buffer_queue(updated_input);
*input = buffer_queue;
return Ok(());
return TokenizerResult::Done;
},
ToTokenizerMsg::TokenizerResultScript {
script,
@ -305,7 +306,7 @@ impl Tokenizer {
let buffer_queue = create_buffer_queue(updated_input);
*input = buffer_queue;
let script = self.get_node(&script.id);
return Err(DomRoot::from_ref(script.downcast().unwrap()));
return TokenizerResult::Script(DomRoot::from_ref(script.downcast().unwrap()));
},
ToTokenizerMsg::End => unreachable!(),
};

View file

@ -77,10 +77,13 @@ impl Tokenizer {
Tokenizer { inner: inner }
}
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
#[must_use]
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
match self.inner.feed(input) {
TokenizerResult::Done => Ok(()),
TokenizerResult::Script(script) => Err(DomRoot::from_ref(script.downcast().unwrap())),
TokenizerResult::Done => TokenizerResult::Done,
TokenizerResult::Script(script) => {
TokenizerResult::Script(DomRoot::from_ref(script.downcast().unwrap()))
},
}
}

View file

@ -43,6 +43,7 @@ use encoding_rs::Encoding;
use html5ever::buffer_queue::BufferQueue;
use html5ever::tendril::fmt::UTF8;
use html5ever::tendril::{ByteTendril, StrTendril, TendrilSink};
use html5ever::tokenizer::TokenizerResult;
use html5ever::tree_builder::{ElementFlags, NextParserState, NodeOrText, QuirksMode, TreeSink};
use html5ever::{Attribute, ExpandedName, LocalName, QualName};
use hyper_serde::Serde;
@ -589,7 +590,7 @@ impl ServoParser {
fn tokenize<F>(&self, mut feed: F)
where
F: FnMut(&mut Tokenizer) -> Result<(), DomRoot<HTMLScriptElement>>,
F: FnMut(&mut Tokenizer) -> TokenizerResult<DomRoot<HTMLScriptElement>>,
{
loop {
assert!(!self.suspended.get());
@ -597,8 +598,8 @@ impl ServoParser {
self.document.reflow_if_reflow_timer_expired();
let script = match feed(&mut *self.tokenizer.borrow_mut()) {
Ok(()) => return,
Err(script) => script,
TokenizerResult::Done => return,
TokenizerResult::Script(script) => script,
};
// https://html.spec.whatwg.org/multipage/#parsing-main-incdata
@ -691,7 +692,8 @@ enum Tokenizer {
}
impl Tokenizer {
fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
#[must_use]
fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
match *self {
Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input),
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input),

View file

@ -10,6 +10,7 @@ use crate::dom::document::Document;
use crate::dom::htmlscriptelement::HTMLScriptElement;
use crate::dom::node::Node;
use crate::dom::servoparser::{ParsingAlgorithm, Sink};
use html5ever::tokenizer::TokenizerResult;
use js::jsapi::JSTracer;
use servo_url::ServoUrl;
use xml5ever::buffer_queue::BufferQueue;
@ -39,12 +40,13 @@ impl Tokenizer {
Tokenizer { inner: tok }
}
pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
#[must_use]
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<DomRoot<HTMLScriptElement>> {
self.inner.run(input);
if let Some(script) = self.inner.sink.sink.script.take() {
return Err(script);
match self.inner.sink.sink.script.take() {
Some(script) => TokenizerResult::Script(script),
None => TokenizerResult::Done,
}
Ok(())
}
pub fn end(&mut self) {