mirror of
https://github.com/servo/servo.git
synced 2025-08-03 04:30:10 +01:00
Merge script::parse and script::dom::servoparser
This commit is contained in:
parent
1405be6917
commit
4b813e0bdc
9 changed files with 212 additions and 223 deletions
|
@ -18,9 +18,9 @@ use dom::bindings::str::DOMString;
|
||||||
use dom::document::{Document, IsHTMLDocument};
|
use dom::document::{Document, IsHTMLDocument};
|
||||||
use dom::document::DocumentSource;
|
use dom::document::DocumentSource;
|
||||||
use dom::globalscope::GlobalScope;
|
use dom::globalscope::GlobalScope;
|
||||||
|
use dom::servoparser::html::{ParseContext, parse_html};
|
||||||
|
use dom::servoparser::xml::{self, parse_xml};
|
||||||
use dom::window::Window;
|
use dom::window::Window;
|
||||||
use parse::html::{ParseContext, parse_html};
|
|
||||||
use parse::xml::{self, parse_xml};
|
|
||||||
|
|
||||||
#[dom_struct]
|
#[dom_struct]
|
||||||
pub struct DOMParser {
|
pub struct DOMParser {
|
||||||
|
|
|
@ -47,6 +47,7 @@ use dom::htmltextareaelement::{HTMLTextAreaElement, LayoutHTMLTextAreaElementHel
|
||||||
use dom::nodelist::NodeList;
|
use dom::nodelist::NodeList;
|
||||||
use dom::processinginstruction::ProcessingInstruction;
|
use dom::processinginstruction::ProcessingInstruction;
|
||||||
use dom::range::WeakRangeVec;
|
use dom::range::WeakRangeVec;
|
||||||
|
use dom::servoparser::html::parse_html_fragment;
|
||||||
use dom::svgsvgelement::{SVGSVGElement, LayoutSVGSVGElementHelpers};
|
use dom::svgsvgelement::{SVGSVGElement, LayoutSVGSVGElementHelpers};
|
||||||
use dom::text::Text;
|
use dom::text::Text;
|
||||||
use dom::virtualmethods::{VirtualMethods, vtable_for};
|
use dom::virtualmethods::{VirtualMethods, vtable_for};
|
||||||
|
@ -59,7 +60,6 @@ use html5ever::tree_builder::QuirksMode;
|
||||||
use js::jsapi::{JSContext, JSObject, JSRuntime};
|
use js::jsapi::{JSContext, JSObject, JSRuntime};
|
||||||
use libc::{self, c_void, uintptr_t};
|
use libc::{self, c_void, uintptr_t};
|
||||||
use msg::constellation_msg::PipelineId;
|
use msg::constellation_msg::PipelineId;
|
||||||
use parse::html::parse_html_fragment;
|
|
||||||
use ref_slice::ref_slice;
|
use ref_slice::ref_slice;
|
||||||
use script_layout_interface::{HTMLCanvasData, OpaqueStyleAndLayoutData, SVGSVGData};
|
use script_layout_interface::{HTMLCanvasData, OpaqueStyleAndLayoutData, SVGSVGData};
|
||||||
use script_layout_interface::{LayoutElementType, LayoutNodeType, TrustedNodeAddress};
|
use script_layout_interface::{LayoutElementType, LayoutNodeType, TrustedNodeAddress};
|
||||||
|
|
|
@ -23,7 +23,6 @@ use dom::htmltemplateelement::HTMLTemplateElement;
|
||||||
use dom::node::{document_from_node, window_from_node};
|
use dom::node::{document_from_node, window_from_node};
|
||||||
use dom::node::Node;
|
use dom::node::Node;
|
||||||
use dom::processinginstruction::ProcessingInstruction;
|
use dom::processinginstruction::ProcessingInstruction;
|
||||||
use dom::servoparser::{ServoParser, Tokenizer};
|
|
||||||
use dom::text::Text;
|
use dom::text::Text;
|
||||||
use html5ever::Attribute;
|
use html5ever::Attribute;
|
||||||
use html5ever::serialize::{AttrRef, Serializable, Serializer};
|
use html5ever::serialize::{AttrRef, Serializable, Serializer};
|
||||||
|
@ -34,10 +33,10 @@ use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts};
|
||||||
use html5ever::tree_builder::{NextParserState, NodeOrText, QuirksMode};
|
use html5ever::tree_builder::{NextParserState, NodeOrText, QuirksMode};
|
||||||
use html5ever::tree_builder::{TreeBuilder, TreeBuilderOpts, TreeSink};
|
use html5ever::tree_builder::{TreeBuilder, TreeBuilderOpts, TreeSink};
|
||||||
use msg::constellation_msg::PipelineId;
|
use msg::constellation_msg::PipelineId;
|
||||||
use parse::Sink;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use string_cache::QualName;
|
use string_cache::QualName;
|
||||||
|
use super::{LastChunkState, ServoParser, Sink, Tokenizer};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
fn insert(parent: &Node, reference_child: Option<&Node>, child: NodeOrText<JS<Node>>) {
|
fn insert(parent: &Node, reference_child: Option<&Node>, child: NodeOrText<JS<Node>>) {
|
||||||
|
@ -279,7 +278,8 @@ pub fn parse_html(document: &Document,
|
||||||
let tb = TreeBuilder::new(sink, options);
|
let tb = TreeBuilder::new(sink, options);
|
||||||
let tok = HtmlTokenizer::new(tb, Default::default());
|
let tok = HtmlTokenizer::new(tb, Default::default());
|
||||||
|
|
||||||
ServoParser::new(document, owner, Tokenizer::HTML(tok), false)
|
ServoParser::new(
|
||||||
|
document, owner, Tokenizer::HTML(tok), LastChunkState::NotReceived)
|
||||||
},
|
},
|
||||||
ParseContext::Fragment(fc) => {
|
ParseContext::Fragment(fc) => {
|
||||||
let tb = TreeBuilder::new_for_fragment(
|
let tb = TreeBuilder::new_for_fragment(
|
||||||
|
@ -294,7 +294,8 @@ pub fn parse_html(document: &Document,
|
||||||
};
|
};
|
||||||
let tok = HtmlTokenizer::new(tb, tok_options);
|
let tok = HtmlTokenizer::new(tb, tok_options);
|
||||||
|
|
||||||
ServoParser::new(document, None, Tokenizer::HTML(tok), true)
|
ServoParser::new(
|
||||||
|
document, None, Tokenizer::HTML(tok), LastChunkState::Received)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
parser.parse_chunk(String::from(input));
|
parser.parse_chunk(String::from(input));
|
|
@ -2,29 +2,46 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
use document_loader::LoadType;
|
||||||
use dom::bindings::cell::DOMRefCell;
|
use dom::bindings::cell::DOMRefCell;
|
||||||
|
use dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
|
||||||
|
use dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods;
|
||||||
|
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
|
||||||
use dom::bindings::codegen::Bindings::ServoParserBinding;
|
use dom::bindings::codegen::Bindings::ServoParserBinding;
|
||||||
use dom::bindings::inheritance::Castable;
|
use dom::bindings::inheritance::Castable;
|
||||||
use dom::bindings::js::{JS, Root};
|
use dom::bindings::js::{JS, Root};
|
||||||
|
use dom::bindings::refcounted::Trusted;
|
||||||
use dom::bindings::reflector::{Reflector, reflect_dom_object};
|
use dom::bindings::reflector::{Reflector, reflect_dom_object};
|
||||||
|
use dom::bindings::str::DOMString;
|
||||||
use dom::bindings::trace::JSTraceable;
|
use dom::bindings::trace::JSTraceable;
|
||||||
use dom::document::Document;
|
use dom::document::Document;
|
||||||
use dom::globalscope::GlobalScope;
|
use dom::globalscope::GlobalScope;
|
||||||
|
use dom::htmlimageelement::HTMLImageElement;
|
||||||
use dom::node::Node;
|
use dom::node::Node;
|
||||||
use dom::window::Window;
|
use encoding::all::UTF_8;
|
||||||
|
use encoding::types::{DecoderTrap, Encoding};
|
||||||
use html5ever::tokenizer::Tokenizer as HtmlTokenizer;
|
use html5ever::tokenizer::Tokenizer as HtmlTokenizer;
|
||||||
use html5ever::tree_builder::Tracer as HtmlTracer;
|
use html5ever::tree_builder::Tracer as HtmlTracer;
|
||||||
use html5ever::tree_builder::TreeBuilder as HtmlTreeBuilder;
|
use html5ever::tree_builder::TreeBuilder as HtmlTreeBuilder;
|
||||||
|
use hyper::header::ContentType;
|
||||||
|
use hyper::mime::{Mime, SubLevel, TopLevel};
|
||||||
|
use hyper_serde::Serde;
|
||||||
use js::jsapi::JSTracer;
|
use js::jsapi::JSTracer;
|
||||||
use msg::constellation_msg::PipelineId;
|
use msg::constellation_msg::PipelineId;
|
||||||
use parse::Sink;
|
use net_traits::{AsyncResponseListener, Metadata, NetworkError};
|
||||||
|
use network_listener::PreInvoke;
|
||||||
use profile_traits::time::{TimerMetadata, TimerMetadataFrameType};
|
use profile_traits::time::{TimerMetadata, TimerMetadataFrameType};
|
||||||
use profile_traits::time::{TimerMetadataReflowType, ProfilerCategory, profile};
|
use profile_traits::time::{TimerMetadataReflowType, ProfilerCategory, profile};
|
||||||
use script_thread::ScriptThread;
|
use script_thread::ScriptThread;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
use url::Url;
|
||||||
|
use util::resource_files::read_resource_file;
|
||||||
use xml5ever::tokenizer::XmlTokenizer;
|
use xml5ever::tokenizer::XmlTokenizer;
|
||||||
use xml5ever::tree_builder::{Tracer as XmlTracer, XmlTreeBuilder};
|
use xml5ever::tree_builder::{Tracer as XmlTracer, XmlTreeBuilder};
|
||||||
|
|
||||||
|
pub mod html;
|
||||||
|
pub mod xml;
|
||||||
|
|
||||||
#[dom_struct]
|
#[dom_struct]
|
||||||
pub struct ServoParser {
|
pub struct ServoParser {
|
||||||
reflector: Reflector,
|
reflector: Reflector,
|
||||||
|
@ -43,13 +60,19 @@ pub struct ServoParser {
|
||||||
suspended: Cell<bool>,
|
suspended: Cell<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq)]
|
||||||
|
enum LastChunkState {
|
||||||
|
Received,
|
||||||
|
NotReceived,
|
||||||
|
}
|
||||||
|
|
||||||
impl ServoParser {
|
impl ServoParser {
|
||||||
#[allow(unrooted_must_root)]
|
#[allow(unrooted_must_root)]
|
||||||
fn new_inherited(
|
fn new_inherited(
|
||||||
document: &Document,
|
document: &Document,
|
||||||
pipeline: Option<PipelineId>,
|
pipeline: Option<PipelineId>,
|
||||||
tokenizer: Tokenizer,
|
tokenizer: Tokenizer,
|
||||||
last_chunk_received: bool)
|
last_chunk_state: LastChunkState)
|
||||||
-> Self {
|
-> Self {
|
||||||
ServoParser {
|
ServoParser {
|
||||||
reflector: Reflector::new(),
|
reflector: Reflector::new(),
|
||||||
|
@ -57,20 +80,20 @@ impl ServoParser {
|
||||||
pipeline: pipeline,
|
pipeline: pipeline,
|
||||||
pending_input: DOMRefCell::new(vec![]),
|
pending_input: DOMRefCell::new(vec![]),
|
||||||
tokenizer: DOMRefCell::new(tokenizer),
|
tokenizer: DOMRefCell::new(tokenizer),
|
||||||
last_chunk_received: Cell::new(last_chunk_received),
|
last_chunk_received: Cell::new(last_chunk_state == LastChunkState::Received),
|
||||||
suspended: Default::default(),
|
suspended: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unrooted_must_root)]
|
#[allow(unrooted_must_root)]
|
||||||
pub fn new(
|
fn new(
|
||||||
document: &Document,
|
document: &Document,
|
||||||
pipeline: Option<PipelineId>,
|
pipeline: Option<PipelineId>,
|
||||||
tokenizer: Tokenizer,
|
tokenizer: Tokenizer,
|
||||||
last_chunk_received: bool)
|
last_chunk_state: LastChunkState)
|
||||||
-> Root<Self> {
|
-> Root<Self> {
|
||||||
reflect_dom_object(
|
reflect_dom_object(
|
||||||
box ServoParser::new_inherited(document, pipeline, tokenizer, last_chunk_received),
|
box ServoParser::new_inherited(document, pipeline, tokenizer, last_chunk_state),
|
||||||
document.window(),
|
document.window(),
|
||||||
ServoParserBinding::Wrap)
|
ServoParserBinding::Wrap)
|
||||||
}
|
}
|
||||||
|
@ -83,15 +106,15 @@ impl ServoParser {
|
||||||
self.pipeline
|
self.pipeline
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_pending_input(&self) -> bool {
|
fn has_pending_input(&self) -> bool {
|
||||||
!self.pending_input.borrow().is_empty()
|
!self.pending_input.borrow().is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_input_chunk(&self, chunk: String) {
|
fn push_input_chunk(&self, chunk: String) {
|
||||||
self.pending_input.borrow_mut().push(chunk);
|
self.pending_input.borrow_mut().push(chunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn take_next_input_chunk(&self) -> Option<String> {
|
fn take_next_input_chunk(&self) -> Option<String> {
|
||||||
let mut pending_input = self.pending_input.borrow_mut();
|
let mut pending_input = self.pending_input.borrow_mut();
|
||||||
if pending_input.is_empty() {
|
if pending_input.is_empty() {
|
||||||
None
|
None
|
||||||
|
@ -100,15 +123,15 @@ impl ServoParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn last_chunk_received(&self) -> bool {
|
fn last_chunk_received(&self) -> bool {
|
||||||
self.last_chunk_received.get()
|
self.last_chunk_received.get()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mark_last_chunk_received(&self) {
|
fn mark_last_chunk_received(&self) {
|
||||||
self.last_chunk_received.set(true)
|
self.last_chunk_received.set(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_plaintext_state(&self) {
|
fn set_plaintext_state(&self) {
|
||||||
self.tokenizer.borrow_mut().set_plaintext_state()
|
self.tokenizer.borrow_mut().set_plaintext_state()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -116,10 +139,6 @@ impl ServoParser {
|
||||||
self.tokenizer.borrow_mut().end()
|
self.tokenizer.borrow_mut().end()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn window(&self) -> &Window {
|
|
||||||
self.document().window()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn suspend(&self) {
|
pub fn suspend(&self) {
|
||||||
assert!(!self.suspended.get());
|
assert!(!self.suspended.get());
|
||||||
self.suspended.set(true);
|
self.suspended.set(true);
|
||||||
|
@ -135,7 +154,7 @@ impl ServoParser {
|
||||||
self.suspended.get()
|
self.suspended.get()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_sync(&self) {
|
fn parse_sync(&self) {
|
||||||
let metadata = TimerMetadata {
|
let metadata = TimerMetadata {
|
||||||
url: self.document().url().as_str().into(),
|
url: self.document().url().as_str().into(),
|
||||||
iframe: TimerMetadataFrameType::RootWindow,
|
iframe: TimerMetadataFrameType::RootWindow,
|
||||||
|
@ -174,7 +193,7 @@ impl ServoParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_chunk(&self, input: String) {
|
fn parse_chunk(&self, input: String) {
|
||||||
self.document().set_current_parser(Some(self));
|
self.document().set_current_parser(Some(self));
|
||||||
self.push_input_chunk(input);
|
self.push_input_chunk(input);
|
||||||
if !self.is_suspended() {
|
if !self.is_suspended() {
|
||||||
|
@ -182,7 +201,7 @@ impl ServoParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish(&self) {
|
fn finish(&self) {
|
||||||
assert!(!self.suspended.get());
|
assert!(!self.suspended.get());
|
||||||
assert!(!self.has_pending_input());
|
assert!(!self.has_pending_input());
|
||||||
|
|
||||||
|
@ -199,7 +218,7 @@ impl ServoParser {
|
||||||
|
|
||||||
#[derive(HeapSizeOf)]
|
#[derive(HeapSizeOf)]
|
||||||
#[must_root]
|
#[must_root]
|
||||||
pub enum Tokenizer {
|
enum Tokenizer {
|
||||||
HTML(
|
HTML(
|
||||||
#[ignore_heap_size_of = "Defined in html5ever"]
|
#[ignore_heap_size_of = "Defined in html5ever"]
|
||||||
HtmlTokenizer<HtmlTreeBuilder<JS<Node>, Sink>>
|
HtmlTokenizer<HtmlTreeBuilder<JS<Node>, Sink>>
|
||||||
|
@ -210,36 +229,43 @@ pub enum Tokenizer {
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(JSTraceable, HeapSizeOf)]
|
||||||
|
#[must_root]
|
||||||
|
struct Sink {
|
||||||
|
pub base_url: Url,
|
||||||
|
pub document: JS<Document>,
|
||||||
|
}
|
||||||
|
|
||||||
impl Tokenizer {
|
impl Tokenizer {
|
||||||
pub fn feed(&mut self, input: String) {
|
fn feed(&mut self, input: String) {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::HTML(ref mut tokenizer) => tokenizer.feed(input.into()),
|
Tokenizer::HTML(ref mut tokenizer) => tokenizer.feed(input.into()),
|
||||||
Tokenizer::XML(ref mut tokenizer) => tokenizer.feed(input.into()),
|
Tokenizer::XML(ref mut tokenizer) => tokenizer.feed(input.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(&mut self) {
|
fn run(&mut self) {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::HTML(ref mut tokenizer) => tokenizer.run(),
|
Tokenizer::HTML(ref mut tokenizer) => tokenizer.run(),
|
||||||
Tokenizer::XML(ref mut tokenizer) => tokenizer.run(),
|
Tokenizer::XML(ref mut tokenizer) => tokenizer.run(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn end(&mut self) {
|
fn end(&mut self) {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::HTML(ref mut tokenizer) => tokenizer.end(),
|
Tokenizer::HTML(ref mut tokenizer) => tokenizer.end(),
|
||||||
Tokenizer::XML(ref mut tokenizer) => tokenizer.end(),
|
Tokenizer::XML(ref mut tokenizer) => tokenizer.end(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_plaintext_state(&mut self) {
|
fn set_plaintext_state(&mut self) {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::HTML(ref mut tokenizer) => tokenizer.set_plaintext_state(),
|
Tokenizer::HTML(ref mut tokenizer) => tokenizer.set_plaintext_state(),
|
||||||
Tokenizer::XML(_) => { /* todo */ },
|
Tokenizer::XML(_) => { /* todo */ },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn profiler_category(&self) -> ProfilerCategory {
|
fn profiler_category(&self) -> ProfilerCategory {
|
||||||
match *self {
|
match *self {
|
||||||
Tokenizer::HTML(_) => ProfilerCategory::ScriptParseHTML,
|
Tokenizer::HTML(_) => ProfilerCategory::ScriptParseHTML,
|
||||||
Tokenizer::XML(_) => ProfilerCategory::ScriptParseXML,
|
Tokenizer::XML(_) => ProfilerCategory::ScriptParseXML,
|
||||||
|
@ -280,3 +306,147 @@ impl JSTraceable for Tokenizer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The context required for asynchronously fetching a document
|
||||||
|
/// and parsing it progressively.
|
||||||
|
pub struct ParserContext {
|
||||||
|
/// The parser that initiated the request.
|
||||||
|
parser: Option<Trusted<ServoParser>>,
|
||||||
|
/// Is this a synthesized document
|
||||||
|
is_synthesized_document: bool,
|
||||||
|
/// The pipeline associated with this document.
|
||||||
|
id: PipelineId,
|
||||||
|
/// The URL for this document.
|
||||||
|
url: Url,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParserContext {
|
||||||
|
pub fn new(id: PipelineId, url: Url) -> ParserContext {
|
||||||
|
ParserContext {
|
||||||
|
parser: None,
|
||||||
|
is_synthesized_document: false,
|
||||||
|
id: id,
|
||||||
|
url: url,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsyncResponseListener for ParserContext {
|
||||||
|
fn headers_available(&mut self, meta_result: Result<Metadata, NetworkError>) {
|
||||||
|
let mut ssl_error = None;
|
||||||
|
let metadata = match meta_result {
|
||||||
|
Ok(meta) => Some(meta),
|
||||||
|
Err(NetworkError::SslValidation(url, reason)) => {
|
||||||
|
ssl_error = Some(reason);
|
||||||
|
let mut meta = Metadata::default(url);
|
||||||
|
let mime: Option<Mime> = "text/html".parse().ok();
|
||||||
|
meta.set_content_type(mime.as_ref());
|
||||||
|
Some(meta)
|
||||||
|
},
|
||||||
|
Err(_) => None,
|
||||||
|
};
|
||||||
|
let content_type =
|
||||||
|
metadata.clone().and_then(|meta| meta.content_type).map(Serde::into_inner);
|
||||||
|
let parser = match ScriptThread::page_headers_available(&self.id,
|
||||||
|
metadata) {
|
||||||
|
Some(parser) => parser,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.parser = Some(Trusted::new(&*parser));
|
||||||
|
|
||||||
|
match content_type {
|
||||||
|
Some(ContentType(Mime(TopLevel::Image, _, _))) => {
|
||||||
|
self.is_synthesized_document = true;
|
||||||
|
let page = "<html><body></body></html>".into();
|
||||||
|
parser.push_input_chunk(page);
|
||||||
|
parser.parse_sync();
|
||||||
|
|
||||||
|
let doc = parser.document();
|
||||||
|
let doc_body = Root::upcast::<Node>(doc.GetBody().unwrap());
|
||||||
|
let img = HTMLImageElement::new(atom!("img"), None, doc);
|
||||||
|
img.SetSrc(DOMString::from(self.url.to_string()));
|
||||||
|
doc_body.AppendChild(&Root::upcast::<Node>(img)).expect("Appending failed");
|
||||||
|
|
||||||
|
},
|
||||||
|
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) => {
|
||||||
|
// https://html.spec.whatwg.org/multipage/#read-text
|
||||||
|
let page = "<pre>\n".into();
|
||||||
|
parser.push_input_chunk(page);
|
||||||
|
parser.parse_sync();
|
||||||
|
parser.set_plaintext_state();
|
||||||
|
},
|
||||||
|
Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, _))) => { // Handle text/html
|
||||||
|
if let Some(reason) = ssl_error {
|
||||||
|
self.is_synthesized_document = true;
|
||||||
|
let page_bytes = read_resource_file("badcert.html").unwrap();
|
||||||
|
let page = String::from_utf8(page_bytes).unwrap();
|
||||||
|
let page = page.replace("${reason}", &reason);
|
||||||
|
parser.push_input_chunk(page);
|
||||||
|
parser.parse_sync();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Some(ContentType(Mime(TopLevel::Text, SubLevel::Xml, _))) => {}, // Handle text/xml
|
||||||
|
Some(ContentType(Mime(toplevel, sublevel, _))) => {
|
||||||
|
if toplevel.as_str() == "application" && sublevel.as_str() == "xhtml+xml" {
|
||||||
|
// Handle xhtml (application/xhtml+xml).
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show warning page for unknown mime types.
|
||||||
|
let page = format!("<html><body><p>Unknown content type ({}/{}).</p></body></html>",
|
||||||
|
toplevel.as_str(), sublevel.as_str());
|
||||||
|
self.is_synthesized_document = true;
|
||||||
|
parser.push_input_chunk(page);
|
||||||
|
parser.parse_sync();
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
// No content-type header.
|
||||||
|
// Merge with #4212 when fixed.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn data_available(&mut self, payload: Vec<u8>) {
|
||||||
|
if !self.is_synthesized_document {
|
||||||
|
// FIXME: use Vec<u8> (html5ever #34)
|
||||||
|
let data = UTF_8.decode(&payload, DecoderTrap::Replace).unwrap();
|
||||||
|
let parser = match self.parser.as_ref() {
|
||||||
|
Some(parser) => parser.root(),
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
parser.parse_chunk(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn response_complete(&mut self, status: Result<(), NetworkError>) {
|
||||||
|
let parser = match self.parser.as_ref() {
|
||||||
|
Some(parser) => parser.root(),
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(NetworkError::Internal(ref reason)) = status {
|
||||||
|
// Show an error page for network errors,
|
||||||
|
// certificate errors are handled earlier.
|
||||||
|
self.is_synthesized_document = true;
|
||||||
|
let page_bytes = read_resource_file("neterror.html").unwrap();
|
||||||
|
let page = String::from_utf8(page_bytes).unwrap();
|
||||||
|
let page = page.replace("${reason}", reason);
|
||||||
|
parser.push_input_chunk(page);
|
||||||
|
parser.parse_sync();
|
||||||
|
} else if let Err(err) = status {
|
||||||
|
// TODO(Savago): we should send a notification to callers #5463.
|
||||||
|
debug!("Failed to load page URL {}, error: {:?}", self.url, err);
|
||||||
|
}
|
||||||
|
|
||||||
|
parser.document()
|
||||||
|
.finish_load(LoadType::PageSource(self.url.clone()));
|
||||||
|
|
||||||
|
parser.mark_last_chunk_received();
|
||||||
|
if !parser.is_suspended() {
|
||||||
|
parser.parse_sync();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PreInvoke for ParserContext {}
|
|
@ -15,13 +15,12 @@ use dom::element::{Element, ElementCreator};
|
||||||
use dom::htmlscriptelement::HTMLScriptElement;
|
use dom::htmlscriptelement::HTMLScriptElement;
|
||||||
use dom::node::Node;
|
use dom::node::Node;
|
||||||
use dom::processinginstruction::ProcessingInstruction;
|
use dom::processinginstruction::ProcessingInstruction;
|
||||||
use dom::servoparser::{ServoParser, Tokenizer};
|
|
||||||
use dom::text::Text;
|
use dom::text::Text;
|
||||||
use html5ever;
|
use html5ever;
|
||||||
use msg::constellation_msg::PipelineId;
|
use msg::constellation_msg::PipelineId;
|
||||||
use parse::Sink;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use string_cache::{Atom, QualName, Namespace};
|
use string_cache::{Atom, QualName, Namespace};
|
||||||
|
use super::{LastChunkState, ServoParser, Sink, Tokenizer};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use xml5ever::tendril::StrTendril;
|
use xml5ever::tendril::StrTendril;
|
||||||
use xml5ever::tokenizer::{Attribute, QName, XmlTokenizer};
|
use xml5ever::tokenizer::{Attribute, QName, XmlTokenizer};
|
||||||
|
@ -140,7 +139,8 @@ pub fn parse_xml(document: &Document,
|
||||||
});
|
});
|
||||||
let tok = XmlTokenizer::new(tb, Default::default());
|
let tok = XmlTokenizer::new(tb, Default::default());
|
||||||
|
|
||||||
ServoParser::new(document, owner, Tokenizer::XML(tok), false)
|
ServoParser::new(
|
||||||
|
document, owner, Tokenizer::XML(tok), LastChunkState::NotReceived)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
parser.parse_chunk(String::from(input));
|
parser.parse_chunk(String::from(input));
|
|
@ -28,6 +28,8 @@ use dom::globalscope::GlobalScope;
|
||||||
use dom::headers::is_forbidden_header_name;
|
use dom::headers::is_forbidden_header_name;
|
||||||
use dom::htmlformelement::{encode_multipart_form_data, generate_boundary};
|
use dom::htmlformelement::{encode_multipart_form_data, generate_boundary};
|
||||||
use dom::progressevent::ProgressEvent;
|
use dom::progressevent::ProgressEvent;
|
||||||
|
use dom::servoparser::html::{ParseContext, parse_html};
|
||||||
|
use dom::servoparser::xml::{self, parse_xml};
|
||||||
use dom::window::Window;
|
use dom::window::Window;
|
||||||
use dom::workerglobalscope::WorkerGlobalScope;
|
use dom::workerglobalscope::WorkerGlobalScope;
|
||||||
use dom::xmlhttprequesteventtarget::XMLHttpRequestEventTarget;
|
use dom::xmlhttprequesteventtarget::XMLHttpRequestEventTarget;
|
||||||
|
@ -53,8 +55,6 @@ use net_traits::CoreResourceMsg::Fetch;
|
||||||
use net_traits::request::{CredentialsMode, Destination, RequestInit, RequestMode};
|
use net_traits::request::{CredentialsMode, Destination, RequestInit, RequestMode};
|
||||||
use net_traits::trim_http_whitespace;
|
use net_traits::trim_http_whitespace;
|
||||||
use network_listener::{NetworkListener, PreInvoke};
|
use network_listener::{NetworkListener, PreInvoke};
|
||||||
use parse::html::{ParseContext, parse_html};
|
|
||||||
use parse::xml::{self, parse_xml};
|
|
||||||
use script_runtime::ScriptChan;
|
use script_runtime::ScriptChan;
|
||||||
use std::ascii::AsciiExt;
|
use std::ascii::AsciiExt;
|
||||||
use std::borrow::ToOwned;
|
use std::borrow::ToOwned;
|
||||||
|
|
|
@ -105,7 +105,6 @@ pub mod layout_wrapper;
|
||||||
mod mem;
|
mod mem;
|
||||||
mod network_listener;
|
mod network_listener;
|
||||||
pub mod origin;
|
pub mod origin;
|
||||||
pub mod parse;
|
|
||||||
pub mod script_runtime;
|
pub mod script_runtime;
|
||||||
#[allow(unsafe_code)]
|
#[allow(unsafe_code)]
|
||||||
pub mod script_thread;
|
pub mod script_thread;
|
||||||
|
|
|
@ -1,180 +0,0 @@
|
||||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
||||||
|
|
||||||
use document_loader::LoadType;
|
|
||||||
use dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
|
|
||||||
use dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods;
|
|
||||||
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
|
|
||||||
use dom::bindings::js::{JS, Root};
|
|
||||||
use dom::bindings::refcounted::Trusted;
|
|
||||||
use dom::bindings::str::DOMString;
|
|
||||||
use dom::document::Document;
|
|
||||||
use dom::htmlimageelement::HTMLImageElement;
|
|
||||||
use dom::node::Node;
|
|
||||||
use dom::servoparser::ServoParser;
|
|
||||||
use encoding::all::UTF_8;
|
|
||||||
use encoding::types::{DecoderTrap, Encoding};
|
|
||||||
use hyper::header::ContentType;
|
|
||||||
use hyper::mime::{Mime, SubLevel, TopLevel};
|
|
||||||
use hyper_serde::Serde;
|
|
||||||
use msg::constellation_msg::PipelineId;
|
|
||||||
use net_traits::{AsyncResponseListener, Metadata, NetworkError};
|
|
||||||
use network_listener::PreInvoke;
|
|
||||||
use script_thread::ScriptThread;
|
|
||||||
use url::Url;
|
|
||||||
use util::resource_files::read_resource_file;
|
|
||||||
|
|
||||||
pub mod html;
|
|
||||||
pub mod xml;
|
|
||||||
|
|
||||||
/// The context required for asynchronously fetching a document
|
|
||||||
/// and parsing it progressively.
|
|
||||||
pub struct ParserContext {
|
|
||||||
/// The parser that initiated the request.
|
|
||||||
parser: Option<Trusted<ServoParser>>,
|
|
||||||
/// Is this a synthesized document
|
|
||||||
is_synthesized_document: bool,
|
|
||||||
/// The pipeline associated with this document.
|
|
||||||
id: PipelineId,
|
|
||||||
/// The URL for this document.
|
|
||||||
url: Url,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ParserContext {
|
|
||||||
pub fn new(id: PipelineId, url: Url) -> ParserContext {
|
|
||||||
ParserContext {
|
|
||||||
parser: None,
|
|
||||||
is_synthesized_document: false,
|
|
||||||
id: id,
|
|
||||||
url: url,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsyncResponseListener for ParserContext {
|
|
||||||
fn headers_available(&mut self, meta_result: Result<Metadata, NetworkError>) {
|
|
||||||
let mut ssl_error = None;
|
|
||||||
let metadata = match meta_result {
|
|
||||||
Ok(meta) => Some(meta),
|
|
||||||
Err(NetworkError::SslValidation(url, reason)) => {
|
|
||||||
ssl_error = Some(reason);
|
|
||||||
let mut meta = Metadata::default(url);
|
|
||||||
let mime: Option<Mime> = "text/html".parse().ok();
|
|
||||||
meta.set_content_type(mime.as_ref());
|
|
||||||
Some(meta)
|
|
||||||
},
|
|
||||||
Err(_) => None,
|
|
||||||
};
|
|
||||||
let content_type =
|
|
||||||
metadata.clone().and_then(|meta| meta.content_type).map(Serde::into_inner);
|
|
||||||
let parser = match ScriptThread::page_headers_available(&self.id,
|
|
||||||
metadata) {
|
|
||||||
Some(parser) => parser,
|
|
||||||
None => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
self.parser = Some(Trusted::new(&*parser));
|
|
||||||
|
|
||||||
match content_type {
|
|
||||||
Some(ContentType(Mime(TopLevel::Image, _, _))) => {
|
|
||||||
self.is_synthesized_document = true;
|
|
||||||
let page = "<html><body></body></html>".into();
|
|
||||||
parser.push_input_chunk(page);
|
|
||||||
parser.parse_sync();
|
|
||||||
|
|
||||||
let doc = parser.document();
|
|
||||||
let doc_body = Root::upcast::<Node>(doc.GetBody().unwrap());
|
|
||||||
let img = HTMLImageElement::new(atom!("img"), None, doc);
|
|
||||||
img.SetSrc(DOMString::from(self.url.to_string()));
|
|
||||||
doc_body.AppendChild(&Root::upcast::<Node>(img)).expect("Appending failed");
|
|
||||||
|
|
||||||
},
|
|
||||||
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) => {
|
|
||||||
// https://html.spec.whatwg.org/multipage/#read-text
|
|
||||||
let page = "<pre>\n".into();
|
|
||||||
parser.push_input_chunk(page);
|
|
||||||
parser.parse_sync();
|
|
||||||
parser.set_plaintext_state();
|
|
||||||
},
|
|
||||||
Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, _))) => { // Handle text/html
|
|
||||||
if let Some(reason) = ssl_error {
|
|
||||||
self.is_synthesized_document = true;
|
|
||||||
let page_bytes = read_resource_file("badcert.html").unwrap();
|
|
||||||
let page = String::from_utf8(page_bytes).unwrap();
|
|
||||||
let page = page.replace("${reason}", &reason);
|
|
||||||
parser.push_input_chunk(page);
|
|
||||||
parser.parse_sync();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Some(ContentType(Mime(TopLevel::Text, SubLevel::Xml, _))) => {}, // Handle text/xml
|
|
||||||
Some(ContentType(Mime(toplevel, sublevel, _))) => {
|
|
||||||
if toplevel.as_str() == "application" && sublevel.as_str() == "xhtml+xml" {
|
|
||||||
// Handle xhtml (application/xhtml+xml).
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show warning page for unknown mime types.
|
|
||||||
let page = format!("<html><body><p>Unknown content type ({}/{}).</p></body></html>",
|
|
||||||
toplevel.as_str(), sublevel.as_str());
|
|
||||||
self.is_synthesized_document = true;
|
|
||||||
parser.push_input_chunk(page);
|
|
||||||
parser.parse_sync();
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
// No content-type header.
|
|
||||||
// Merge with #4212 when fixed.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn data_available(&mut self, payload: Vec<u8>) {
|
|
||||||
if !self.is_synthesized_document {
|
|
||||||
// FIXME: use Vec<u8> (html5ever #34)
|
|
||||||
let data = UTF_8.decode(&payload, DecoderTrap::Replace).unwrap();
|
|
||||||
let parser = match self.parser.as_ref() {
|
|
||||||
Some(parser) => parser.root(),
|
|
||||||
None => return,
|
|
||||||
};
|
|
||||||
parser.parse_chunk(data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn response_complete(&mut self, status: Result<(), NetworkError>) {
|
|
||||||
let parser = match self.parser.as_ref() {
|
|
||||||
Some(parser) => parser.root(),
|
|
||||||
None => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(NetworkError::Internal(ref reason)) = status {
|
|
||||||
// Show an error page for network errors,
|
|
||||||
// certificate errors are handled earlier.
|
|
||||||
self.is_synthesized_document = true;
|
|
||||||
let page_bytes = read_resource_file("neterror.html").unwrap();
|
|
||||||
let page = String::from_utf8(page_bytes).unwrap();
|
|
||||||
let page = page.replace("${reason}", reason);
|
|
||||||
parser.push_input_chunk(page);
|
|
||||||
parser.parse_sync();
|
|
||||||
} else if let Err(err) = status {
|
|
||||||
// TODO(Savago): we should send a notification to callers #5463.
|
|
||||||
debug!("Failed to load page URL {}, error: {:?}", self.url, err);
|
|
||||||
}
|
|
||||||
|
|
||||||
parser.document()
|
|
||||||
.finish_load(LoadType::PageSource(self.url.clone()));
|
|
||||||
|
|
||||||
parser.mark_last_chunk_received();
|
|
||||||
if !parser.is_suspended() {
|
|
||||||
parser.parse_sync();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PreInvoke for ParserContext {}
|
|
||||||
|
|
||||||
#[derive(JSTraceable, HeapSizeOf)]
|
|
||||||
#[must_root]
|
|
||||||
pub struct Sink {
|
|
||||||
pub base_url: Url,
|
|
||||||
pub document: JS<Document>,
|
|
||||||
}
|
|
|
@ -44,7 +44,9 @@ use dom::htmlanchorelement::HTMLAnchorElement;
|
||||||
use dom::node::{Node, NodeDamage, window_from_node};
|
use dom::node::{Node, NodeDamage, window_from_node};
|
||||||
use dom::serviceworker::TrustedServiceWorkerAddress;
|
use dom::serviceworker::TrustedServiceWorkerAddress;
|
||||||
use dom::serviceworkerregistration::ServiceWorkerRegistration;
|
use dom::serviceworkerregistration::ServiceWorkerRegistration;
|
||||||
use dom::servoparser::ServoParser;
|
use dom::servoparser::{ParserContext, ServoParser};
|
||||||
|
use dom::servoparser::html::{ParseContext, parse_html};
|
||||||
|
use dom::servoparser::xml::{self, parse_xml};
|
||||||
use dom::uievent::UIEvent;
|
use dom::uievent::UIEvent;
|
||||||
use dom::window::{ReflowReason, Window};
|
use dom::window::{ReflowReason, Window};
|
||||||
use dom::worker::TrustedWorkerAddress;
|
use dom::worker::TrustedWorkerAddress;
|
||||||
|
@ -71,9 +73,6 @@ use net_traits::{IpcSend, LoadData as NetLoadData};
|
||||||
use net_traits::bluetooth_thread::BluetoothMethodMsg;
|
use net_traits::bluetooth_thread::BluetoothMethodMsg;
|
||||||
use net_traits::image_cache_thread::{ImageCacheChan, ImageCacheResult, ImageCacheThread};
|
use net_traits::image_cache_thread::{ImageCacheChan, ImageCacheResult, ImageCacheThread};
|
||||||
use network_listener::NetworkListener;
|
use network_listener::NetworkListener;
|
||||||
use parse::ParserContext;
|
|
||||||
use parse::html::{ParseContext, parse_html};
|
|
||||||
use parse::xml::{self, parse_xml};
|
|
||||||
use profile_traits::mem::{self, OpaqueSender, Report, ReportKind, ReportsChan};
|
use profile_traits::mem::{self, OpaqueSender, Report, ReportKind, ReportsChan};
|
||||||
use profile_traits::time::{self, ProfilerCategory, profile};
|
use profile_traits::time::{self, ProfilerCategory, profile};
|
||||||
use script_layout_interface::message::{self, NewLayoutThreadInfo, ReflowQueryType};
|
use script_layout_interface::message::{self, NewLayoutThreadInfo, ReflowQueryType};
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue