mirror of
https://github.com/servo/servo.git
synced 2025-07-23 15:23:42 +01:00
Responding to review comments
This commit is contained in:
parent
aeac382058
commit
49a5e84fb1
4 changed files with 127 additions and 105 deletions
|
@ -25,6 +25,7 @@ use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
|
||||||
use net_traits::request::{Destination, RequestBuilder};
|
use net_traits::request::{Destination, RequestBuilder};
|
||||||
use net_traits::response::{Response, ResponseInit};
|
use net_traits::response::{Response, ResponseInit};
|
||||||
use net_traits::storage_thread::StorageThreadMsg;
|
use net_traits::storage_thread::StorageThreadMsg;
|
||||||
|
use net_traits::DiscardFetch;
|
||||||
use net_traits::FetchTaskTarget;
|
use net_traits::FetchTaskTarget;
|
||||||
use net_traits::WebSocketNetworkEvent;
|
use net_traits::WebSocketNetworkEvent;
|
||||||
use net_traits::{CookieSource, CoreResourceMsg, CoreResourceThread};
|
use net_traits::{CookieSource, CoreResourceMsg, CoreResourceThread};
|
||||||
|
@ -248,7 +249,7 @@ impl ResourceChannelManager {
|
||||||
),
|
),
|
||||||
FetchChannels::Prefetch => {
|
FetchChannels::Prefetch => {
|
||||||
self.resource_manager
|
self.resource_manager
|
||||||
.fetch(req_init, None, (), http_state, None)
|
.fetch(req_init, None, DiscardFetch, http_state, None)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
CoreResourceMsg::DeleteCookies(request) => {
|
CoreResourceMsg::DeleteCookies(request) => {
|
||||||
|
|
|
@ -237,7 +237,12 @@ impl FetchTaskTarget for IpcSender<FetchResponseMsg> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FetchTaskTarget for () {
|
/// A fetch task that discards all data it's sent,
|
||||||
|
/// useful when speculatively prefetching data that we don't need right
|
||||||
|
/// now, but might need in the future.
|
||||||
|
pub struct DiscardFetch;
|
||||||
|
|
||||||
|
impl FetchTaskTarget for DiscardFetch {
|
||||||
fn process_request_body(&mut self, _: &Request) {}
|
fn process_request_body(&mut self, _: &Request) {}
|
||||||
|
|
||||||
fn process_request_eof(&mut self, _: &Request) {}
|
fn process_request_eof(&mut self, _: &Request) {}
|
||||||
|
|
|
@ -102,7 +102,9 @@ pub struct ServoParser {
|
||||||
aborted: Cell<bool>,
|
aborted: Cell<bool>,
|
||||||
/// <https://html.spec.whatwg.org/multipage/#script-created-parser>
|
/// <https://html.spec.whatwg.org/multipage/#script-created-parser>
|
||||||
script_created_parser: bool,
|
script_created_parser: bool,
|
||||||
/// We do a quick-and-dirty parse of the input looking for resources to prefetch
|
/// We do a quick-and-dirty parse of the input looking for resources to prefetch.
|
||||||
|
// TODO: if we had speculative parsing, we could do this when speculatively
|
||||||
|
// building the DOM. https://github.com/servo/servo/pull/19203
|
||||||
prefetch_tokenizer: DomRefCell<prefetch::Tokenizer>,
|
prefetch_tokenizer: DomRefCell<prefetch::Tokenizer>,
|
||||||
#[ignore_malloc_size_of = "Defined in html5ever"]
|
#[ignore_malloc_size_of = "Defined in html5ever"]
|
||||||
prefetch_input: DomRefCell<BufferQueue>,
|
prefetch_input: DomRefCell<BufferQueue>,
|
||||||
|
@ -433,7 +435,9 @@ impl ServoParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_tendril_input_chunk(&self, chunk: StrTendril) {
|
fn push_tendril_input_chunk(&self, chunk: StrTendril) {
|
||||||
if !chunk.is_empty() {
|
if chunk.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
// Per https://github.com/whatwg/html/issues/1495
|
// Per https://github.com/whatwg/html/issues/1495
|
||||||
// stylesheets should not be loaded for documents
|
// stylesheets should not be loaded for documents
|
||||||
// without browsing contexts.
|
// without browsing contexts.
|
||||||
|
@ -449,7 +453,7 @@ impl ServoParser {
|
||||||
// have been wasted, but in most cases it won't.
|
// have been wasted, but in most cases it won't.
|
||||||
let mut prefetch_input = self.prefetch_input.borrow_mut();
|
let mut prefetch_input = self.prefetch_input.borrow_mut();
|
||||||
prefetch_input.push_back(chunk.clone());
|
prefetch_input.push_back(chunk.clone());
|
||||||
let _ = self.prefetch_tokenizer
|
self.prefetch_tokenizer
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
.feed(&mut *prefetch_input);
|
.feed(&mut *prefetch_input);
|
||||||
}
|
}
|
||||||
|
@ -457,7 +461,6 @@ impl ServoParser {
|
||||||
// which is tokenized lazily.
|
// which is tokenized lazily.
|
||||||
self.network_input.borrow_mut().push_back(chunk);
|
self.network_input.borrow_mut().push_back(chunk);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn push_bytes_input_chunk(&self, chunk: Vec<u8>) {
|
fn push_bytes_input_chunk(&self, chunk: Vec<u8>) {
|
||||||
// For byte input, we convert it to text using the network decoder.
|
// For byte input, we convert it to text using the network decoder.
|
||||||
|
@ -471,7 +474,6 @@ impl ServoParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_string_input_chunk(&self, chunk: String) {
|
fn push_string_input_chunk(&self, chunk: String) {
|
||||||
// Convert the chunk to a tendril so cloning it isn't expensive.
|
|
||||||
// The input has already been decoded as a string, so doesn't need
|
// The input has already been decoded as a string, so doesn't need
|
||||||
// to be decoded by the network decoder again.
|
// to be decoded by the network decoder again.
|
||||||
let chunk = StrTendril::from(chunk);
|
let chunk = StrTendril::from(chunk);
|
||||||
|
|
|
@ -9,6 +9,7 @@ use crate::dom::htmlimageelement::image_fetch_request;
|
||||||
use crate::dom::htmlscriptelement::script_fetch_request;
|
use crate::dom::htmlscriptelement::script_fetch_request;
|
||||||
use crate::stylesheet_loader::stylesheet_fetch_request;
|
use crate::stylesheet_loader::stylesheet_fetch_request;
|
||||||
use html5ever::buffer_queue::BufferQueue;
|
use html5ever::buffer_queue::BufferQueue;
|
||||||
|
use html5ever::tokenizer::states::RawKind;
|
||||||
use html5ever::tokenizer::Tag;
|
use html5ever::tokenizer::Tag;
|
||||||
use html5ever::tokenizer::TagKind;
|
use html5ever::tokenizer::TagKind;
|
||||||
use html5ever::tokenizer::Token;
|
use html5ever::tokenizer::Token;
|
||||||
|
@ -63,8 +64,8 @@ impl Tokenizer {
|
||||||
Tokenizer { inner }
|
Tokenizer { inner }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<()> {
|
pub fn feed(&mut self, input: &mut BufferQueue) {
|
||||||
self.inner.feed(input)
|
while let TokenizerResult::Script(PrefetchHandle) = self.inner.feed(input) {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,10 +80,20 @@ struct PrefetchSink {
|
||||||
prefetching: bool,
|
prefetching: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The prefetch tokenizer produces trivial results
|
||||||
|
struct PrefetchHandle;
|
||||||
|
|
||||||
impl TokenSink for PrefetchSink {
|
impl TokenSink for PrefetchSink {
|
||||||
type Handle = ();
|
type Handle = PrefetchHandle;
|
||||||
fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
|
fn process_token(
|
||||||
if let Token::TagToken(ref tag) = token {
|
&mut self,
|
||||||
|
token: Token,
|
||||||
|
_line_number: u64,
|
||||||
|
) -> TokenSinkResult<PrefetchHandle> {
|
||||||
|
let tag = match token {
|
||||||
|
Token::TagToken(ref tag) => tag,
|
||||||
|
_ => return TokenSinkResult::Continue,
|
||||||
|
};
|
||||||
match (tag.kind, &tag.name) {
|
match (tag.kind, &tag.name) {
|
||||||
(TagKind::StartTag, local_name!("script")) if self.prefetching => {
|
(TagKind::StartTag, local_name!("script")) if self.prefetching => {
|
||||||
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
||||||
|
@ -105,18 +116,17 @@ impl TokenSink for PrefetchSink {
|
||||||
.resource_threads
|
.resource_threads
|
||||||
.send(CoreResourceMsg::Fetch(request, FetchChannels::Prefetch));
|
.send(CoreResourceMsg::Fetch(request, FetchChannels::Prefetch));
|
||||||
}
|
}
|
||||||
// Don't prefetch inside script
|
TokenSinkResult::RawData(RawKind::ScriptData)
|
||||||
self.prefetching = false;
|
|
||||||
},
|
},
|
||||||
(TagKind::StartTag, local_name!("img")) if self.prefetching => {
|
(TagKind::StartTag, local_name!("img")) if self.prefetching => {
|
||||||
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
if let Some(url) = self.get_url(tag, local_name!("src")) {
|
||||||
debug!("Prefetch {} {}", tag.name, url);
|
debug!("Prefetch {} {}", tag.name, url);
|
||||||
let request =
|
let request = image_fetch_request(url, self.origin.clone(), self.pipeline_id);
|
||||||
image_fetch_request(url, self.origin.clone(), self.pipeline_id);
|
|
||||||
let _ = self
|
let _ = self
|
||||||
.resource_threads
|
.resource_threads
|
||||||
.send(CoreResourceMsg::Fetch(request, FetchChannels::Prefetch));
|
.send(CoreResourceMsg::Fetch(request, FetchChannels::Prefetch));
|
||||||
}
|
}
|
||||||
|
TokenSinkResult::Continue
|
||||||
},
|
},
|
||||||
(TagKind::StartTag, local_name!("link")) if self.prefetching => {
|
(TagKind::StartTag, local_name!("link")) if self.prefetching => {
|
||||||
if let Some(rel) = self.get_attr(tag, local_name!("rel")) {
|
if let Some(rel) = self.get_attr(tag, local_name!("rel")) {
|
||||||
|
@ -144,21 +154,25 @@ impl TokenSink for PrefetchSink {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
TokenSinkResult::Continue
|
||||||
|
},
|
||||||
|
(TagKind::StartTag, local_name!("script")) => {
|
||||||
|
TokenSinkResult::RawData(RawKind::ScriptData)
|
||||||
},
|
},
|
||||||
(TagKind::EndTag, local_name!("script")) => {
|
(TagKind::EndTag, local_name!("script")) => {
|
||||||
// After the first script tag, the main parser is blocked, so it's worth prefetching.
|
// After the first script tag, the main parser is blocked, so it's worth prefetching.
|
||||||
self.prefetching = true;
|
self.prefetching = true;
|
||||||
|
TokenSinkResult::Script(PrefetchHandle)
|
||||||
},
|
},
|
||||||
(TagKind::StartTag, local_name!("base")) => {
|
(TagKind::StartTag, local_name!("base")) => {
|
||||||
if let Some(url) = self.get_url(tag, local_name!("href")) {
|
if let Some(url) = self.get_url(tag, local_name!("href")) {
|
||||||
debug!("Setting base {}", url);
|
debug!("Setting base {}", url);
|
||||||
self.base = url;
|
self.base = url;
|
||||||
}
|
}
|
||||||
},
|
|
||||||
_ => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TokenSinkResult::Continue
|
TokenSinkResult::Continue
|
||||||
|
},
|
||||||
|
_ => TokenSinkResult::Continue,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue