mirror of
https://github.com/servo/servo.git
synced 2025-06-06 16:45:39 +00:00
Fixes #8640. This commit adds a neterror page that displays really really basic information about what went wrong with your request, which is an improvement over the current state of blank page. It also fixes the problem of certificate validation errors not triggering the cert error page, since for some reason the function string seems to have turned lowercase.
422 lines
14 KiB
Rust
422 lines
14 KiB
Rust
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
//! The bulk of the HTML parser integration is in `script::parse::html`.
|
|
//! This module is mostly about its interaction with DOM memory management.
|
|
|
|
use document_loader::LoadType;
|
|
use dom::bindings::cell::DOMRefCell;
|
|
use dom::bindings::codegen::Bindings::ServoHTMLParserBinding;
|
|
use dom::bindings::global::GlobalRef;
|
|
use dom::bindings::js::{JS, Root};
|
|
use dom::bindings::refcounted::Trusted;
|
|
use dom::bindings::reflector::{Reflector, reflect_dom_object};
|
|
use dom::bindings::trace::JSTraceable;
|
|
use dom::document::Document;
|
|
use dom::node::Node;
|
|
use dom::window::Window;
|
|
use encoding::all::UTF_8;
|
|
use encoding::types::{DecoderTrap, Encoding};
|
|
use html5ever::tokenizer;
|
|
use html5ever::tree_builder;
|
|
use html5ever::tree_builder::{TreeBuilder, TreeBuilderOpts};
|
|
use hyper::header::ContentType;
|
|
use hyper::mime::{Mime, SubLevel, TopLevel};
|
|
use js::jsapi::JSTracer;
|
|
use msg::constellation_msg::{PipelineId, SubpageId};
|
|
use net_traits::{AsyncResponseListener, Metadata, NetworkError};
|
|
use network_listener::PreInvoke;
|
|
use parse::{TrustedParser, ParserRef, Parser};
|
|
use profile_traits::time::ProfilerCategory;
|
|
use profile_traits::time::{profile, TimerMetadata, TimerMetadataReflowType, TimerMetadataFrameType};
|
|
use script_thread::ScriptThread;
|
|
use std::cell::Cell;
|
|
use std::default::Default;
|
|
use url::Url;
|
|
use util::resource_files::read_resource_file;
|
|
|
|
#[must_root]
|
|
#[derive(JSTraceable, HeapSizeOf)]
|
|
pub struct Sink {
|
|
pub base_url: Option<Url>,
|
|
pub document: JS<Document>,
|
|
}
|
|
|
|
/// FragmentContext is used only to pass this group of related values
|
|
/// into functions.
|
|
#[derive(Copy, Clone)]
|
|
pub struct FragmentContext<'a> {
|
|
pub context_elem: &'a Node,
|
|
pub form_elem: Option<&'a Node>,
|
|
}
|
|
|
|
pub type Tokenizer = tokenizer::Tokenizer<TreeBuilder<JS<Node>, Sink>>;
|
|
|
|
/// The context required for asynchronously fetching a document and parsing it progressively.
|
|
pub struct ParserContext {
|
|
/// The parser that initiated the request.
|
|
parser: Option<TrustedParser>,
|
|
/// Is this a synthesized document
|
|
is_synthesized_document: bool,
|
|
/// The pipeline associated with this document.
|
|
id: PipelineId,
|
|
/// The subpage associated with this document.
|
|
subpage: Option<SubpageId>,
|
|
/// The URL for this document.
|
|
url: Url,
|
|
}
|
|
|
|
impl ParserContext {
|
|
pub fn new(id: PipelineId, subpage: Option<SubpageId>, url: Url) -> ParserContext {
|
|
ParserContext {
|
|
parser: None,
|
|
is_synthesized_document: false,
|
|
id: id,
|
|
subpage: subpage,
|
|
url: url,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl AsyncResponseListener for ParserContext {
|
|
fn headers_available(&mut self, meta_result: Result<Metadata, NetworkError>) {
|
|
let mut ssl_error = None;
|
|
let metadata = match meta_result {
|
|
Ok(meta) => Some(meta),
|
|
Err(NetworkError::SslValidation(url, reason)) => {
|
|
ssl_error = Some(reason);
|
|
let mut meta = Metadata::default(url);
|
|
let mime: Option<Mime> = "text/html".parse().ok();
|
|
meta.set_content_type(mime.as_ref());
|
|
Some(meta)
|
|
},
|
|
Err(_) => None,
|
|
};
|
|
let content_type = metadata.clone().and_then(|meta| meta.content_type);
|
|
let parser = match ScriptThread::page_headers_available(&self.id,
|
|
self.subpage.as_ref(),
|
|
metadata) {
|
|
Some(parser) => parser,
|
|
None => return,
|
|
};
|
|
|
|
let parser = parser.r();
|
|
self.parser = Some(match parser {
|
|
ParserRef::HTML(parser) => TrustedParser::HTML(
|
|
Trusted::new(parser)),
|
|
ParserRef::XML(parser) => TrustedParser::XML(
|
|
Trusted::new(parser)),
|
|
});
|
|
|
|
match content_type {
|
|
Some(ContentType(Mime(TopLevel::Image, _, _))) => {
|
|
self.is_synthesized_document = true;
|
|
let page = format!("<html><body><img src='{}' /></body></html>", self.url);
|
|
parser.pending_input().borrow_mut().push(page);
|
|
parser.parse_sync();
|
|
},
|
|
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) => {
|
|
// https://html.spec.whatwg.org/multipage/#read-text
|
|
let page = format!("<pre>\n");
|
|
parser.pending_input().borrow_mut().push(page);
|
|
parser.parse_sync();
|
|
parser.set_plaintext_state();
|
|
},
|
|
Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, _))) => { // Handle text/html
|
|
if let Some(reason) = ssl_error {
|
|
self.is_synthesized_document = true;
|
|
let page_bytes = read_resource_file("badcert.html").unwrap();
|
|
let page = String::from_utf8(page_bytes).unwrap();
|
|
let page = page.replace("${reason}", &reason);
|
|
parser.pending_input().borrow_mut().push(page);
|
|
parser.parse_sync();
|
|
}
|
|
},
|
|
Some(ContentType(Mime(TopLevel::Text, SubLevel::Xml, _))) => {}, // Handle text/xml
|
|
Some(ContentType(Mime(toplevel, sublevel, _))) => {
|
|
if toplevel.as_str() == "application" && sublevel.as_str() == "xhtml+xml" {
|
|
// Handle xhtml (application/xhtml+xml).
|
|
return;
|
|
}
|
|
|
|
// Show warning page for unknown mime types.
|
|
let page = format!("<html><body><p>Unknown content type ({}/{}).</p></body></html>",
|
|
toplevel.as_str(), sublevel.as_str());
|
|
self.is_synthesized_document = true;
|
|
parser.pending_input().borrow_mut().push(page);
|
|
parser.parse_sync();
|
|
},
|
|
None => {
|
|
// No content-type header.
|
|
// Merge with #4212 when fixed.
|
|
}
|
|
}
|
|
}
|
|
|
|
fn data_available(&mut self, payload: Vec<u8>) {
|
|
if !self.is_synthesized_document {
|
|
// FIXME: use Vec<u8> (html5ever #34)
|
|
let data = UTF_8.decode(&payload, DecoderTrap::Replace).unwrap();
|
|
let parser = match self.parser.as_ref() {
|
|
Some(parser) => parser.root(),
|
|
None => return,
|
|
};
|
|
parser.r().parse_chunk(data);
|
|
}
|
|
}
|
|
|
|
fn response_complete(&mut self, status: Result<(), NetworkError>) {
|
|
let parser = match self.parser.as_ref() {
|
|
Some(parser) => parser.root(),
|
|
None => return,
|
|
};
|
|
|
|
if let Err(NetworkError::Internal(ref reason)) = status {
|
|
// Show an error page for network errors,
|
|
// certificate errors are handled earlier.
|
|
self.is_synthesized_document = true;
|
|
let parser = parser.r();
|
|
let page_bytes = read_resource_file("neterror.html").unwrap();
|
|
let page = String::from_utf8(page_bytes).unwrap();
|
|
let page = page.replace("${reason}", reason);
|
|
parser.pending_input().borrow_mut().push(page);
|
|
parser.parse_sync();
|
|
} else if let Err(err) = status {
|
|
// TODO(Savago): we should send a notification to callers #5463.
|
|
debug!("Failed to load page URL {}, error: {:?}", self.url, err);
|
|
}
|
|
|
|
parser.r().document().finish_load(LoadType::PageSource(self.url.clone()));
|
|
|
|
parser.r().last_chunk_received().set(true);
|
|
if !parser.r().is_suspended() {
|
|
parser.r().parse_sync();
|
|
}
|
|
}
|
|
}
|
|
|
|
impl PreInvoke for ParserContext {
|
|
}
|
|
|
|
#[dom_struct]
|
|
pub struct ServoHTMLParser {
|
|
reflector_: Reflector,
|
|
#[ignore_heap_size_of = "Defined in html5ever"]
|
|
tokenizer: DOMRefCell<Tokenizer>,
|
|
/// Input chunks received but not yet passed to the parser.
|
|
pending_input: DOMRefCell<Vec<String>>,
|
|
/// The document associated with this parser.
|
|
document: JS<Document>,
|
|
/// True if this parser should avoid passing any further data to the tokenizer.
|
|
suspended: Cell<bool>,
|
|
/// Whether to expect any further input from the associated network request.
|
|
last_chunk_received: Cell<bool>,
|
|
/// The pipeline associated with this parse, unavailable if this parse does not
|
|
/// correspond to a page load.
|
|
pipeline: Option<PipelineId>,
|
|
}
|
|
|
|
impl<'a> Parser for &'a ServoHTMLParser {
|
|
fn parse_chunk(self, input: String) {
|
|
self.document.set_current_parser(Some(ParserRef::HTML(self)));
|
|
self.pending_input.borrow_mut().push(input);
|
|
if !self.is_suspended() {
|
|
self.parse_sync();
|
|
}
|
|
}
|
|
|
|
fn finish(self) {
|
|
assert!(!self.suspended.get());
|
|
assert!(self.pending_input.borrow().is_empty());
|
|
|
|
self.tokenizer.borrow_mut().end();
|
|
debug!("finished parsing");
|
|
|
|
self.document.set_current_parser(None);
|
|
|
|
if let Some(pipeline) = self.pipeline {
|
|
ScriptThread::parsing_complete(pipeline);
|
|
}
|
|
}
|
|
}
|
|
|
|
impl ServoHTMLParser {
|
|
#[allow(unrooted_must_root)]
|
|
pub fn new(base_url: Option<Url>, document: &Document, pipeline: Option<PipelineId>)
|
|
-> Root<ServoHTMLParser> {
|
|
let sink = Sink {
|
|
base_url: base_url,
|
|
document: JS::from_ref(document),
|
|
};
|
|
|
|
let tb = TreeBuilder::new(sink, TreeBuilderOpts {
|
|
ignore_missing_rules: true,
|
|
.. Default::default()
|
|
});
|
|
|
|
let tok = tokenizer::Tokenizer::new(tb, Default::default());
|
|
|
|
let parser = ServoHTMLParser {
|
|
reflector_: Reflector::new(),
|
|
tokenizer: DOMRefCell::new(tok),
|
|
pending_input: DOMRefCell::new(vec!()),
|
|
document: JS::from_ref(document),
|
|
suspended: Cell::new(false),
|
|
last_chunk_received: Cell::new(false),
|
|
pipeline: pipeline,
|
|
};
|
|
|
|
reflect_dom_object(box parser, GlobalRef::Window(document.window()),
|
|
ServoHTMLParserBinding::Wrap)
|
|
}
|
|
|
|
#[allow(unrooted_must_root)]
|
|
pub fn new_for_fragment(base_url: Option<Url>, document: &Document,
|
|
fragment_context: FragmentContext) -> Root<ServoHTMLParser> {
|
|
let sink = Sink {
|
|
base_url: base_url,
|
|
document: JS::from_ref(document),
|
|
};
|
|
|
|
let tb_opts = TreeBuilderOpts {
|
|
ignore_missing_rules: true,
|
|
.. Default::default()
|
|
};
|
|
let tb = TreeBuilder::new_for_fragment(sink,
|
|
JS::from_ref(fragment_context.context_elem),
|
|
fragment_context.form_elem.map(|n| JS::from_ref(n)),
|
|
tb_opts);
|
|
|
|
let tok_opts = tokenizer::TokenizerOpts {
|
|
initial_state: Some(tb.tokenizer_state_for_context_elem()),
|
|
.. Default::default()
|
|
};
|
|
let tok = tokenizer::Tokenizer::new(tb, tok_opts);
|
|
|
|
let parser = ServoHTMLParser {
|
|
reflector_: Reflector::new(),
|
|
tokenizer: DOMRefCell::new(tok),
|
|
pending_input: DOMRefCell::new(vec!()),
|
|
document: JS::from_ref(document),
|
|
suspended: Cell::new(false),
|
|
last_chunk_received: Cell::new(true),
|
|
pipeline: None,
|
|
};
|
|
|
|
reflect_dom_object(box parser, GlobalRef::Window(document.window()),
|
|
ServoHTMLParserBinding::Wrap)
|
|
}
|
|
|
|
#[inline]
|
|
pub fn tokenizer(&self) -> &DOMRefCell<Tokenizer> {
|
|
&self.tokenizer
|
|
}
|
|
|
|
pub fn set_plaintext_state(&self) {
|
|
self.tokenizer.borrow_mut().set_plaintext_state()
|
|
}
|
|
|
|
pub fn end_tokenizer(&self) {
|
|
self.tokenizer.borrow_mut().end()
|
|
}
|
|
|
|
pub fn pending_input(&self) -> &DOMRefCell<Vec<String>> {
|
|
&self.pending_input
|
|
}
|
|
|
|
}
|
|
|
|
impl ServoHTMLParser {
|
|
pub fn parse_sync(&self) {
|
|
let metadata = TimerMetadata {
|
|
url: self.document.url().as_str().into(),
|
|
iframe: TimerMetadataFrameType::RootWindow,
|
|
incremental: TimerMetadataReflowType::FirstReflow,
|
|
};
|
|
profile(ProfilerCategory::ScriptParseHTML,
|
|
Some(metadata),
|
|
self.document.window().time_profiler_chan().clone(),
|
|
|| self.do_parse_sync())
|
|
}
|
|
|
|
fn do_parse_sync(&self) {
|
|
// This parser will continue to parse while there is either pending input or
|
|
// the parser remains unsuspended.
|
|
loop {
|
|
self.document.reflow_if_reflow_timer_expired();
|
|
let mut pending_input = self.pending_input.borrow_mut();
|
|
if !pending_input.is_empty() {
|
|
let chunk = pending_input.remove(0);
|
|
self.tokenizer.borrow_mut().feed(chunk.into());
|
|
} else {
|
|
self.tokenizer.borrow_mut().run();
|
|
}
|
|
|
|
// Document parsing is blocked on an external resource.
|
|
if self.suspended.get() {
|
|
return;
|
|
}
|
|
|
|
if pending_input.is_empty() {
|
|
break;
|
|
}
|
|
}
|
|
|
|
if self.last_chunk_received.get() {
|
|
self.finish();
|
|
}
|
|
}
|
|
|
|
pub fn window(&self) -> &Window {
|
|
self.document.window()
|
|
}
|
|
|
|
pub fn suspend(&self) {
|
|
assert!(!self.suspended.get());
|
|
self.suspended.set(true);
|
|
}
|
|
|
|
pub fn resume(&self) {
|
|
assert!(self.suspended.get());
|
|
self.suspended.set(false);
|
|
self.parse_sync();
|
|
}
|
|
|
|
pub fn is_suspended(&self) -> bool {
|
|
self.suspended.get()
|
|
}
|
|
|
|
pub fn document(&self) -> &Document {
|
|
&self.document
|
|
}
|
|
|
|
pub fn last_chunk_received(&self) -> &Cell<bool> {
|
|
&self.last_chunk_received
|
|
}
|
|
}
|
|
|
|
struct Tracer {
|
|
trc: *mut JSTracer,
|
|
}
|
|
|
|
impl tree_builder::Tracer for Tracer {
|
|
type Handle = JS<Node>;
|
|
#[allow(unrooted_must_root)]
|
|
fn trace_handle(&self, node: &JS<Node>) {
|
|
node.trace(self.trc);
|
|
}
|
|
}
|
|
|
|
impl JSTraceable for Tokenizer {
|
|
fn trace(&self, trc: *mut JSTracer) {
|
|
let tracer = Tracer {
|
|
trc: trc,
|
|
};
|
|
let tracer = &tracer as &tree_builder::Tracer<Handle=JS<Node>>;
|
|
|
|
let tree_builder = self.sink();
|
|
tree_builder.trace_handles(tracer);
|
|
tree_builder.sink().trace(trc);
|
|
}
|
|
}
|