more http cache work

This commit is contained in:
Gregory Terzian 2017-08-03 12:17:29 +02:00
parent f36a4fb6d7
commit 2799b4eac9
39 changed files with 798 additions and 2164 deletions

File diff suppressed because it is too large Load diff

View file

@ -13,6 +13,7 @@ use fetch::methods::{Data, DoneChannel, FetchContext, Target};
use fetch::methods::{is_cors_safelisted_request_header, is_cors_safelisted_method, main_fetch};
use flate2::read::{DeflateDecoder, GzDecoder};
use hsts::HstsList;
use http_cache::HttpCache;
use hyper::Error as HttpError;
use hyper::LanguageTag;
use hyper::client::{Pool, Request as HyperRequest, Response as HyperResponse};
@ -22,7 +23,7 @@ use hyper::header::{AccessControlMaxAge, AccessControlRequestHeaders};
use hyper::header::{AccessControlRequestMethod, AcceptEncoding, AcceptLanguage};
use hyper::header::{Authorization, Basic, CacheControl, CacheDirective};
use hyper::header::{ContentEncoding, ContentLength, Encoding, Header, Headers};
use hyper::header::{Host, Origin as HyperOrigin, IfMatch, IfRange};
use hyper::header::{Host, HttpDate, Origin as HyperOrigin, IfMatch, IfRange};
use hyper::header::{IfUnmodifiedSince, IfModifiedSince, IfNoneMatch, Location};
use hyper::header::{Pragma, Quality, QualityItem, Referer, SetCookie};
use hyper::header::{UserAgent, q, qitem};
@ -45,6 +46,7 @@ use std::io::{self, Read, Write};
use std::iter::FromIterator;
use std::mem;
use std::ops::Deref;
use std::str::FromStr;
use std::sync::RwLock;
use std::sync::mpsc::{channel, Sender};
use std::thread;
@ -69,6 +71,7 @@ fn read_block<R: Read>(reader: &mut R) -> Result<Data, ()> {
pub struct HttpState {
pub hsts_list: RwLock<HstsList>,
pub cookie_jar: RwLock<CookieStorage>,
pub http_cache: RwLock<HttpCache>,
pub auth_cache: RwLock<AuthCache>,
pub ssl_client: OpensslClient,
pub connector: Pool<Connector>,
@ -80,6 +83,7 @@ impl HttpState {
hsts_list: RwLock::new(HstsList::new()),
cookie_jar: RwLock::new(CookieStorage::new(150)),
auth_cache: RwLock::new(AuthCache::new()),
http_cache: RwLock::new(HttpCache::new()),
ssl_client: ssl_client.clone(),
connector: create_http_connector(ssl_client),
}
@ -893,34 +897,35 @@ fn http_network_or_cache_fetch(request: &mut Request,
let mut revalidating_flag = false;
// Step 21
// TODO have a HTTP cache to check for a completed response
let complete_http_response_from_cache: Option<Response> = None;
if http_request.cache_mode != CacheMode::NoStore &&
http_request.cache_mode != CacheMode::Reload &&
complete_http_response_from_cache.is_some() {
// TODO Substep 1 and 2. Select a response from HTTP cache.
// Substep 3
if let Some(ref response) = response {
revalidating_flag = response_needs_revalidation(&response);
if let Ok(http_cache) = context.state.http_cache.read() {
if let Some(response_from_cache) = http_cache.construct_response(&http_request) {
let response_headers = response_from_cache.response.headers.clone();
// Substep 1, 2, 3, 4
let (cached_response, needs_revalidation) = match (http_request.cache_mode, &http_request.mode) {
(CacheMode::ForceCache, _) => (Some(response_from_cache.response), false),
(CacheMode::OnlyIfCached, &RequestMode::SameOrigin) => (Some(response_from_cache.response), false),
(CacheMode::OnlyIfCached, _) | (CacheMode::NoStore, _) | (CacheMode::Reload, _) => (None, false),
(_, _) => (Some(response_from_cache.response), response_from_cache.needs_validation)
};
// Substep 4
if http_request.cache_mode == CacheMode::ForceCache ||
http_request.cache_mode == CacheMode::OnlyIfCached {
// TODO pull response from HTTP cache
// response = http_request
}
if revalidating_flag {
if needs_revalidation {
revalidating_flag = true;
// Substep 5
// TODO set If-None-Match and If-Modified-Since according to cached
// response headers.
// TODO: find out why the typed header getter return None from the headers of cached responses.
if let Some(date_slice) = response_headers.get_raw("Last-Modified") {
let date_string = String::from_utf8_lossy(&date_slice[0]);
if let Ok(http_date) = HttpDate::from_str(&date_string) {
http_request.headers.set(IfModifiedSince(http_date));
}
}
if let Some(entity_tag) =
response_headers.get_raw("ETag") {
http_request.headers.set_raw("If-None-Match", entity_tag.to_vec());
}
} else {
// Substep 6
// TODO pull response from HTTP cache
// response = http_request
// response.cache_state = CacheState::Local;
response = cached_response;
}
}
}
@ -931,26 +936,37 @@ fn http_network_or_cache_fetch(request: &mut Request,
return Response::network_error(
NetworkError::Internal("Couldn't find response in cache".into()))
}
}
// More Step 22
if response.is_none() {
// Substep 2
let forward_response = http_network_fetch(http_request, credentials_flag,
done_chan, context);
// Substep 3
if let Some((200...399, _)) = forward_response.raw_status {
if !http_request.method.safe() {
// TODO Invalidate HTTP cache response
if let Ok(mut http_cache) = context.state.http_cache.write() {
http_cache.invalidate(&http_request, &forward_response);
}
}
}
// Substep 4
if revalidating_flag && forward_response.status.map_or(false, |s| s == StatusCode::NotModified) {
// TODO update forward_response headers with cached response headers
if let Ok(mut http_cache) = context.state.http_cache.write() {
response = http_cache.refresh(&http_request, forward_response.clone(), done_chan);
}
}
// Substep 5
if response.is_none() {
if http_request.cache_mode != CacheMode::NoStore {
// Subsubstep 2, doing it first to avoid a clone of forward_response.
if let Ok(mut http_cache) = context.state.http_cache.write() {
http_cache.store(&http_request, &forward_response);
}
}
// Subsubstep 1
response = Some(forward_response);
// Subsubstep 2
// TODO: store http_request and forward_response in cache
}
}
@ -1168,7 +1184,9 @@ fn http_network_fetch(request: &Request,
// Step 14
if !response.is_network_error() && request.cache_mode != CacheMode::NoStore {
// TODO update response in the HTTP cache for request
if let Ok(mut http_cache) = context.state.http_cache.write() {
http_cache.store(&request, &response);
}
}
// TODO this step isn't possible yet
@ -1366,11 +1384,6 @@ fn is_no_store_cache(headers: &Headers) -> bool {
headers.has::<IfRange>()
}
fn response_needs_revalidation(_response: &Response) -> bool {
// TODO this function
false
}
/// <https://fetch.spec.whatwg.org/#redirect-status>
pub fn is_redirect_status(status: StatusCode) -> bool {
match status {

View file

@ -1,291 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A task that takes a URL and streams back the binary data.
use about_loader;
use data_loader;
use file_loader;
use http_cache::MemoryCache;
use http_loader;
use sniffer_task;
use std::comm::{channel, Receiver, Sender};
use std::sync::{Arc, Mutex};
use http::headers::content_type::MediaType;
use http::headers::response::HeaderCollection as ResponseHeaderCollection;
use http::headers::request::HeaderCollection as RequestHeaderCollection;
use http::method::{Method, Get};
use url::Url;
use http::status::Ok as StatusOk;
use http::status::Status;
use servo_util::task::spawn_named;
pub enum ControlMsg {
/// Request the data associated with a particular URL
Load(LoadData, Sender<LoadResponse>),
Exit
}
#[deriving(Clone)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: RequestHeaderCollection,
pub data: Option<Vec<u8>>,
pub cors: Option<ResourceCORSData>
}
impl LoadData {
pub fn new(url: Url) -> LoadData {
LoadData {
url: url,
method: Get,
headers: RequestHeaderCollection::new(),
data: None,
cors: None
}
}
}
#[deriving(Clone)]
pub struct ResourceCORSData {
/// CORS Preflight flag
pub preflight: bool,
/// Origin of CORS Request
pub origin: Url
}
/// Metadata about a loaded resource, such as is obtained from HTTP headers.
#[deriving(Clone)]
pub struct Metadata {
/// Final URL after redirects.
pub final_url: Url,
/// MIME type / subtype.
pub content_type: Option<(String, String)>,
/// Character set.
pub charset: Option<String>,
/// Headers
pub headers: Option<ResponseHeaderCollection>,
/// HTTP Status
pub status: Status
}
impl Metadata {
/// Metadata with defaults for everything optional.
pub fn default(url: Url) -> Metadata {
Metadata {
final_url: url,
content_type: None,
charset: None,
headers: None,
status: StatusOk // http://fetch.spec.whatwg.org/#concept-response-status-message
}
}
/// Extract the parts of a MediaType that we care about.
pub fn set_content_type(&mut self, content_type: &Option<MediaType>) {
match *content_type {
None => (),
Some(MediaType { ref type_,
ref subtype,
ref parameters }) => {
self.content_type = Some((type_.clone(), subtype.clone()));
for &(ref k, ref v) in parameters.iter() {
if "charset" == k.as_slice() {
self.charset = Some(v.clone());
}
}
}
}
}
}
/// Message sent in response to `Load`. Contains metadata, and a port
/// for receiving the data.
///
/// Even if loading fails immediately, we send one of these and the
/// progress_port will provide the error.
pub struct LoadResponse {
/// Metadata, such as from HTTP headers.
pub metadata: Metadata,
/// Port for reading data.
pub progress_port: Receiver<ProgressMsg>,
}
/// Messages sent in response to a `Load` message
#[deriving(PartialEq,Show)]
pub enum ProgressMsg {
/// Binary data - there may be multiple of these
Payload(Vec<u8>),
/// Indicates loading is complete, either successfully or not
Done(Result<(), String>)
}
/// For use by loaders in responding to a Load message.
pub fn start_sending(start_chan: Sender<LoadResponse>, metadata: Metadata) -> Sender<ProgressMsg> {
start_sending_opt(start_chan, metadata).ok().unwrap()
}
/// For use by loaders in responding to a Load message.
pub fn start_sending_opt(start_chan: Sender<LoadResponse>, metadata: Metadata) -> Result<Sender<ProgressMsg>, ()> {
let (progress_chan, progress_port) = channel();
let result = start_chan.send_opt(LoadResponse {
metadata: metadata,
progress_port: progress_port,
});
match result {
Ok(_) => Ok(progress_chan),
Err(_) => Err(())
}
}
/// Convenience function for synchronously loading a whole resource.
pub fn load_whole_resource(resource_task: &ResourceTask, url: Url)
-> Result<(Metadata, Vec<u8>), String> {
let (start_chan, start_port) = channel();
resource_task.send(Load(LoadData::new(url), start_chan));
let response = start_port.recv();
let mut buf = vec!();
loop {
match response.progress_port.recv() {
Payload(data) => buf.push_all(data.as_slice()),
Done(Ok(())) => return Ok((response.metadata, buf)),
Done(Err(e)) => return Err(e)
}
}
}
/// Handle to a resource task
pub type ResourceTask = Sender<ControlMsg>;
/// Create a ResourceTask
pub fn new_resource_task(user_agent: Option<String>) -> ResourceTask {
let (setup_chan, setup_port) = channel();
spawn_named("ResourceManager", proc() {
ResourceManager::new(setup_port, user_agent).start();
});
setup_chan
}
struct ResourceManager {
from_client: Receiver<ControlMsg>,
user_agent: Option<String>,
memory_cache: Arc<Mutex<MemoryCache>>,
}
impl ResourceManager {
fn new(from_client: Receiver<ControlMsg>, user_agent: Option<String>) -> ResourceManager {
ResourceManager {
from_client: from_client,
user_agent: user_agent,
memory_cache: Arc::new(Mutex::new(MemoryCache::new())),
}
}
}
impl ResourceManager {
fn start(&self) {
loop {
match self.from_client.recv() {
Load(load_data, start_chan) => {
self.load(load_data, start_chan)
}
Exit => {
break
}
}
}
}
fn load(&self, load_data: LoadData, start_chan: Sender<LoadResponse>) {
let mut load_data = load_data;
load_data.headers.user_agent = self.user_agent.clone();
// Create new communication channel, create new sniffer task,
// send all the data to the new sniffer task with the send
// end of the pipe, receive all the data.
let sniffer_task = sniffer_task::new_sniffer_task(start_chan.clone());
fn from_factory<'a>(factory: fn(LoadData, Sender<LoadResponse>))
-> proc(LoadData, Sender<LoadResponse>): 'a {
proc(load_data: LoadData, start_chan: Sender<LoadResponse>) {
factory(load_data, start_chan)
}
}
let loader = match load_data.url.scheme.as_slice() {
"file" => from_factory(file_loader::factory),
"http" | "https" => http_loader::factory(self.memory_cache.clone()),
"data" => from_factory(data_loader::factory),
"about" => from_factory(about_loader::factory),
_ => {
debug!("resource_task: no loader for scheme {:s}", load_data.url.scheme);
start_sending(start_chan, Metadata::default(load_data.url))
.send(Done(Err("no loader for scheme".to_string())));
return
}
};
debug!("resource_task: loading url: {:s}", load_data.url.serialize());
loader(load_data, sniffer_task);
}
}
/// Load a URL asynchronously and iterate over chunks of bytes from the response.
pub fn load_bytes_iter(resource_task: &ResourceTask, url: Url) -> (Metadata, ProgressMsgPortIterator) {
let (input_chan, input_port) = channel();
resource_task.send(Load(LoadData::new(url), input_chan));
let response = input_port.recv();
let iter = ProgressMsgPortIterator { progress_port: response.progress_port };
(response.metadata, iter)
}
/// Iterator that reads chunks of bytes from a ProgressMsg port
pub struct ProgressMsgPortIterator {
progress_port: Receiver<ProgressMsg>
}
impl Iterator<Vec<u8>> for ProgressMsgPortIterator {
fn next(&mut self) -> Option<Vec<u8>> {
match self.progress_port.recv() {
Payload(data) => Some(data),
Done(Ok(())) => None,
Done(Err(e)) => {
error!("error receiving bytes: {}", e);
None
}
}
}
}
#[test]
fn test_exit() {
let resource_task = new_resource_task(None);
resource_task.send(Exit);
}
#[test]
fn test_bad_scheme() {
let resource_task = new_resource_task(None);
let (start_chan, start) = channel();
let url = Url::parse("bogus://whatever").unwrap();
resource_task.send(Load(LoadData::new(url), start_chan));
let response = start.recv();
match response.progress_port.recv() {
Done(result) => { assert!(result.is_err()) }
_ => panic!("bleh")
}
resource_task.send(Exit);
}

View file

@ -12,6 +12,7 @@ use fetch::cors_cache::CorsCache;
use fetch::methods::{FetchContext, fetch};
use filemanager_thread::{FileManager, TFDProvider};
use hsts::HstsList;
use http_cache::HttpCache;
use http_loader::{HttpState, http_redirect_fetch};
use hyper_serde::Serde;
use ipc_channel::ipc::{self, IpcReceiver, IpcReceiverSet, IpcSender};
@ -91,6 +92,7 @@ struct ResourceChannelManager {
fn create_http_states(config_dir: Option<&Path>) -> (Arc<HttpState>, Arc<HttpState>) {
let mut hsts_list = HstsList::from_servo_preload();
let mut auth_cache = AuthCache::new();
let http_cache = HttpCache::new();
let mut cookie_jar = CookieStorage::new(150);
if let Some(config_dir) = config_dir {
read_json_from_file(&mut auth_cache, config_dir, "auth_cache.json");
@ -109,6 +111,7 @@ fn create_http_states(config_dir: Option<&Path>) -> (Arc<HttpState>, Arc<HttpSta
let http_state = HttpState {
cookie_jar: RwLock::new(cookie_jar),
auth_cache: RwLock::new(auth_cache),
http_cache: RwLock::new(http_cache),
hsts_list: RwLock::new(hsts_list),
ssl_client: ssl_client.clone(),
connector: create_http_connector(ssl_client),

View file

@ -1,268 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::AttrHelpers;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, HTMLScriptElementCast};
use dom::bindings::js::{JS, JSRef, Temporary, OptionalRootable, Root};
use dom::comment::Comment;
use dom::document::{Document, DocumentHelpers};
use dom::documenttype::DocumentType;
use dom::element::{Element, AttributeHandlers, ElementHelpers, ParserCreated};
use dom::htmlscriptelement::HTMLScriptElement;
use dom::htmlscriptelement::HTMLScriptElementHelpers;
use dom::node::{Node, NodeHelpers, TrustedNodeAddress};
use dom::servohtmlparser;
use dom::servohtmlparser::ServoHTMLParser;
use dom::text::Text;
use page::Page;
use parse::Parser;
use encoding::all::UTF_8;
use encoding::types::{Encoding, DecodeReplace};
use servo_net::resource_task::{Load, LoadData, Payload, Done, ResourceTask};
use servo_msg::constellation_msg::LoadData as MsgLoadData;
use servo_util::task_state;
use servo_util::task_state::IN_HTML_PARSER;
use servo_util::time::parse_http_timestamp;
use std::ascii::AsciiExt;
use std::comm::channel;
use std::str::MaybeOwned;
use url::Url;
use http::headers::HeaderEnum;
use html5ever::Attribute;
use html5ever::tree_builder::{TreeSink, QuirksMode, NodeOrText, AppendNode, AppendText};
use string_cache::QualName;
pub enum HTMLInput {
InputString(String),
InputUrl(Url),
}
// Parses an RFC 2616 compliant date/time string, and returns a localized
// date/time string in a format suitable for document.lastModified.
fn parse_last_modified(timestamp: &str) -> String {
parse_http_timestamp(timestamp).map(|t| {
t.to_local().strftime("%m/%d/%Y %H:%M:%S").unwrap()
}).unwrap_or(String::new())
}
trait SinkHelpers {
fn get_or_create(&self, child: NodeOrText<TrustedNodeAddress>) -> Temporary<Node>;
}
impl SinkHelpers for servohtmlparser::Sink {
fn get_or_create(&self, child: NodeOrText<TrustedNodeAddress>) -> Temporary<Node> {
match child {
AppendNode(n) => Temporary::new(unsafe { JS::from_trusted_node_address(n) }),
AppendText(t) => {
let doc = self.document.root();
let text = Text::new(t, *doc);
NodeCast::from_temporary(text)
}
}
}
}
impl<'a> TreeSink<TrustedNodeAddress> for servohtmlparser::Sink {
fn get_document(&mut self) -> TrustedNodeAddress {
let doc = self.document.root();
let node: JSRef<Node> = NodeCast::from_ref(*doc);
node.to_trusted_node_address()
}
fn same_node(&self, x: TrustedNodeAddress, y: TrustedNodeAddress) -> bool {
x == y
}
fn elem_name(&self, target: TrustedNodeAddress) -> QualName {
let node: Root<Node> = unsafe { JS::from_trusted_node_address(target).root() };
let elem: JSRef<Element> = ElementCast::to_ref(*node)
.expect("tried to get name of non-Element in HTML parsing");
QualName {
ns: elem.get_namespace().clone(),
local: elem.get_local_name().clone(),
}
}
fn create_element(&mut self, name: QualName, attrs: Vec<Attribute>)
-> TrustedNodeAddress {
let doc = self.document.root();
let elem = Element::create(name, None, *doc, ParserCreated).root();
for attr in attrs.into_iter() {
elem.set_attribute_from_parser(attr.name, attr.value, None);
}
let node: JSRef<Node> = NodeCast::from_ref(*elem);
node.to_trusted_node_address()
}
fn create_comment(&mut self, text: String) -> TrustedNodeAddress {
let doc = self.document.root();
let comment = Comment::new(text, *doc);
let node: Root<Node> = NodeCast::from_temporary(comment).root();
node.to_trusted_node_address()
}
fn append_before_sibling(&mut self,
sibling: TrustedNodeAddress,
new_node: NodeOrText<TrustedNodeAddress>) -> Result<(), NodeOrText<TrustedNodeAddress>> {
// If there is no parent, return the node to the parser.
let sibling: Root<Node> = unsafe { JS::from_trusted_node_address(sibling).root() };
let parent = match sibling.parent_node() {
Some(p) => p.root(),
None => return Err(new_node),
};
let child = self.get_or_create(new_node).root();
assert!(parent.InsertBefore(*child, Some(*sibling)).is_ok());
Ok(())
}
fn parse_error(&mut self, msg: MaybeOwned<'static>) {
debug!("Parse error: {:s}", msg);
}
fn set_quirks_mode(&mut self, mode: QuirksMode) {
let doc = self.document.root();
doc.set_quirks_mode(mode);
}
fn append(&mut self, parent: TrustedNodeAddress, child: NodeOrText<TrustedNodeAddress>) {
let parent: Root<Node> = unsafe { JS::from_trusted_node_address(parent).root() };
let child = self.get_or_create(child).root();
// FIXME(#3701): Use a simpler algorithm and merge adjacent text nodes
assert!(parent.AppendChild(*child).is_ok());
}
fn append_doctype_to_document(&mut self, name: String, public_id: String, system_id: String) {
let doc = self.document.root();
let doc_node: JSRef<Node> = NodeCast::from_ref(*doc);
let doctype = DocumentType::new(name, Some(public_id), Some(system_id), *doc);
let node: Root<Node> = NodeCast::from_temporary(doctype).root();
assert!(doc_node.AppendChild(*node).is_ok());
}
fn add_attrs_if_missing(&mut self, target: TrustedNodeAddress, attrs: Vec<Attribute>) {
let node: Root<Node> = unsafe { JS::from_trusted_node_address(target).root() };
let elem: JSRef<Element> = ElementCast::to_ref(*node)
.expect("tried to set attrs on non-Element in HTML parsing");
for attr in attrs.into_iter() {
elem.set_attribute_from_parser(attr.name, attr.value, None);
}
}
fn remove_from_parent(&mut self, _target: TrustedNodeAddress) {
error!("remove_from_parent not implemented!");
}
fn mark_script_already_started(&mut self, node: TrustedNodeAddress) {
let node: Root<Node> = unsafe { JS::from_trusted_node_address(node).root() };
let script: Option<JSRef<HTMLScriptElement>> = HTMLScriptElementCast::to_ref(*node);
script.map(|script| script.mark_already_started());
}
fn complete_script(&mut self, node: TrustedNodeAddress) {
let node: Root<Node> = unsafe { JS::from_trusted_node_address(node).root() };
let script: Option<JSRef<HTMLScriptElement>> = HTMLScriptElementCast::to_ref(*node);
script.map(|script| script.prepare());
}
}
// The url from msg_load_data is ignored here
pub fn parse_html(page: &Page,
document: JSRef<Document>,
input: HTMLInput,
resource_task: ResourceTask,
msg_load_data: Option<MsgLoadData>) {
let (base_url, load_response) = match input {
InputUrl(ref url) => {
// Wait for the LoadResponse so that the parser knows the final URL.
let (input_chan, input_port) = channel();
let mut load_data = LoadData::new(url.clone());
msg_load_data.map(|m| {
load_data.headers = m.headers;
load_data.method = m.method;
load_data.data = m.data;
});
resource_task.send(Load(load_data, input_chan));
let load_response = input_port.recv();
load_response.metadata.headers.as_ref().map(|headers| {
let header = headers.iter().find(|h|
h.header_name().as_slice().to_ascii_lower() == "last-modified".to_string()
);
match header {
Some(h) => document.set_last_modified(
parse_last_modified(h.header_value().as_slice())),
None => {},
};
});
let base_url = load_response.metadata.final_url.clone();
{
// Store the final URL before we start parsing, so that DOM routines
// (e.g. HTMLImageElement::update_image) can resolve relative URLs
// correctly.
*page.mut_url() = Some((base_url.clone(), true));
}
(Some(base_url), Some(load_response))
},
InputString(_) => {
match *page.url() {
Some((ref page_url, _)) => (Some(page_url.clone()), None),
None => (None, None),
}
},
};
let parser = ServoHTMLParser::new(base_url.clone(), document).root();
let parser: JSRef<ServoHTMLParser> = *parser;
task_state::enter(IN_HTML_PARSER);
match input {
InputString(s) => {
parser.parse_chunk(s);
}
InputUrl(url) => {
let load_response = load_response.unwrap();
match load_response.metadata.content_type {
Some((ref t, _)) if t.as_slice().eq_ignore_ascii_case("image") => {
let page = format!("<html><body><img src='{:s}' /></body></html>", base_url.as_ref().unwrap().serialize());
parser.parse_chunk(page);
},
_ => {
for msg in load_response.progress_port.iter() {
match msg {
Payload(data) => {
// FIXME: use Vec<u8> (html5ever #34)
let data = UTF_8.decode(data.as_slice(), DecodeReplace).unwrap();
parser.parse_chunk(data);
}
Done(Err(err)) => {
panic!("Failed to load page URL {:s}, error: {:s}", url.serialize(), err);
}
Done(Ok(())) => break,
}
}
}
}
}
}
parser.finish();
task_state::exit(IN_HTML_PARSER);
debug!("finished parsing");
}

View file

@ -1,297 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Timing functions.
use collections::TreeMap;
use std::comm::{Sender, channel, Receiver};
use std::f64;
use std::io::timer::sleep;
use std::iter::AdditiveIterator;
use std::time::duration::Duration;
use std_time::{Tm, precise_time_ns, strptime};
use task::{spawn_named};
use url::Url;
// front-end representation of the profiler used to communicate with the profiler
#[deriving(Clone)]
pub struct TimeProfilerChan(pub Sender<TimeProfilerMsg>);
impl TimeProfilerChan {
pub fn send(&self, msg: TimeProfilerMsg) {
let TimeProfilerChan(ref c) = *self;
c.send(msg);
}
}
#[deriving(PartialEq, Clone, PartialOrd, Eq, Ord)]
pub struct TimerMetadata {
url: String,
iframe: bool,
incremental: bool,
}
pub trait Formatable {
fn format(&self) -> String;
}
impl Formatable for Option<TimerMetadata> {
fn format(&self) -> String {
match self {
// TODO(cgaebel): Center-align in the format strings as soon as rustc supports it.
&Some(ref meta) => {
let url = meta.url.as_slice();
let url = if url.len() > 30 {
url.slice_to(30)
} else {
url
};
let incremental = if meta.incremental { " yes" } else { " no " };
let iframe = if meta.iframe { " yes" } else { " no " };
format!(" {:14} {:9} {:30}", incremental, iframe, url)
},
&None =>
format!(" {:14} {:9} {:30}", " N/A", " N/A", " N/A")
}
}
}
#[deriving(Clone)]
pub enum TimeProfilerMsg {
/// Normal message used for reporting time
TimeMsg((TimeProfilerCategory, Option<TimerMetadata>), f64),
/// Message used to force print the profiling metrics
PrintMsg,
/// Tells the profiler to shut down.
ExitMsg,
}
#[repr(u32)]
#[deriving(PartialEq, Clone, PartialOrd, Eq, Ord)]
pub enum TimeProfilerCategory {
CompositingCategory,
LayoutPerformCategory,
LayoutStyleRecalcCategory,
LayoutRestyleDamagePropagation,
LayoutNonIncrementalReset,
LayoutSelectorMatchCategory,
LayoutTreeBuilderCategory,
LayoutDamagePropagateCategory,
LayoutMainCategory,
LayoutParallelWarmupCategory,
LayoutShapingCategory,
LayoutDispListBuildCategory,
PaintingPerTileCategory,
PaintingPrepBuffCategory,
PaintingCategory,
}
impl Formatable for TimeProfilerCategory {
// some categories are subcategories of LayoutPerformCategory
// and should be printed to indicate this
fn format(&self) -> String {
let padding = match *self {
LayoutStyleRecalcCategory |
LayoutRestyleDamagePropagation |
LayoutNonIncrementalReset |
LayoutMainCategory |
LayoutDispListBuildCategory |
LayoutShapingCategory |
LayoutDamagePropagateCategory |
PaintingPerTileCategory |
PaintingPrepBuffCategory => "+ ",
LayoutParallelWarmupCategory |
LayoutSelectorMatchCategory |
LayoutTreeBuilderCategory => "| + ",
_ => ""
};
let name = match *self {
CompositingCategory => "Compositing",
LayoutPerformCategory => "Layout",
LayoutStyleRecalcCategory => "Style Recalc",
LayoutRestyleDamagePropagation => "Restyle Damage Propagation",
LayoutNonIncrementalReset => "Non-incremental reset (temporary)",
LayoutSelectorMatchCategory => "Selector Matching",
LayoutTreeBuilderCategory => "Tree Building",
LayoutDamagePropagateCategory => "Damage Propagation",
LayoutMainCategory => "Primary Layout Pass",
LayoutParallelWarmupCategory => "Parallel Warmup",
LayoutShapingCategory => "Shaping",
LayoutDispListBuildCategory => "Display List Construction",
PaintingPerTileCategory => "Painting Per Tile",
PaintingPrepBuffCategory => "Buffer Prep",
PaintingCategory => "Painting",
};
format!("{:s}{}", padding, name)
}
}
type TimeProfilerBuckets = TreeMap<(TimeProfilerCategory, Option<TimerMetadata>), Vec<f64>>;
// back end of the profiler that handles data aggregation and performance metrics
pub struct TimeProfiler {
pub port: Receiver<TimeProfilerMsg>,
buckets: TimeProfilerBuckets,
pub last_msg: Option<TimeProfilerMsg>,
}
impl TimeProfiler {
pub fn create(period: Option<f64>) -> TimeProfilerChan {
let (chan, port) = channel();
match period {
Some(period) => {
let period = Duration::milliseconds((period * 1000f64) as i64);
let chan = chan.clone();
spawn_named("Time profiler timer", proc() {
loop {
sleep(period);
if chan.send_opt(PrintMsg).is_err() {
break;
}
}
});
// Spawn the time profiler.
spawn_named("Time profiler", proc() {
let mut profiler = TimeProfiler::new(port);
profiler.start();
});
}
None => {
// No-op to handle messages when the time profiler is inactive.
spawn_named("Time profiler", proc() {
loop {
match port.recv_opt() {
Err(_) | Ok(ExitMsg) => break,
_ => {}
}
}
});
}
}
TimeProfilerChan(chan)
}
pub fn new(port: Receiver<TimeProfilerMsg>) -> TimeProfiler {
TimeProfiler {
port: port,
buckets: TreeMap::new(),
last_msg: None,
}
}
pub fn start(&mut self) {
loop {
let msg = self.port.recv_opt();
match msg {
Ok(msg) => {
if !self.handle_msg(msg) {
break
}
}
_ => break
}
}
}
fn find_or_insert(&mut self, k: (TimeProfilerCategory, Option<TimerMetadata>), t: f64) {
match self.buckets.get_mut(&k) {
None => {},
Some(v) => { v.push(t); return; },
}
self.buckets.insert(k, vec!(t));
}
fn handle_msg(&mut self, msg: TimeProfilerMsg) -> bool {
match msg.clone() {
TimeMsg(k, t) => self.find_or_insert(k, t),
PrintMsg => match self.last_msg {
// only print if more data has arrived since the last printout
Some(TimeMsg(..)) => self.print_buckets(),
_ => ()
},
ExitMsg => return false,
};
self.last_msg = Some(msg);
true
}
fn print_buckets(&mut self) {
println!("{:35s} {:14} {:9} {:30} {:15s} {:15s} {:-15s} {:-15s} {:-15s}",
"_category_", "_incremental?_", "_iframe?_",
" _url_", " _mean (ms)_", " _median (ms)_",
" _min (ms)_", " _max (ms)_", " _events_");
for (&(ref category, ref meta), ref mut data) in self.buckets.iter_mut() {
data.sort_by(|a, b| {
if a < b {
Less
} else {
Greater
}
});
let data_len = data.len();
if data_len > 0 {
let (mean, median, min, max) =
(data.iter().map(|&x|x).sum() / (data_len as f64),
data.as_slice()[data_len / 2],
data.iter().fold(f64::INFINITY, |a, &b| a.min(b)),
data.iter().fold(-f64::INFINITY, |a, &b| a.max(b)));
println!("{:-35s}{} {:15.4f} {:15.4f} {:15.4f} {:15.4f} {:15u}",
category.format(), meta.format(), mean, median, min, max, data_len);
}
}
println!("");
}
}
pub fn profile<T>(category: TimeProfilerCategory,
// url, iframe?, first reflow?
meta: Option<(&Url, bool, bool)>,
time_profiler_chan: TimeProfilerChan,
callback: || -> T)
-> T {
let start_time = precise_time_ns();
let val = callback();
let end_time = precise_time_ns();
let ms = (end_time - start_time) as f64 / 1000000f64;
let meta = meta.map(|(url, iframe, first_reflow)|
TimerMetadata {
url: url.serialize(),
iframe: iframe,
incremental: !first_reflow,
});
time_profiler_chan.send(TimeMsg((category, meta), ms));
return val;
}
pub fn time<T>(msg: &str, callback: || -> T) -> T{
let start_time = precise_time_ns();
let val = callback();
let end_time = precise_time_ns();
let ms = (end_time - start_time) as f64 / 1000000f64;
if ms >= 5f64 {
debug!("{:s} took {} ms", msg, ms);
}
return val;
}
// Parses an RFC 2616 compliant date/time string
pub fn parse_http_timestamp(timestamp: &str) -> Option<Tm> {
// RFC 822, updated by RFC 1123
match strptime(timestamp, "%a, %d %b %Y %T %Z") {
Ok(t) => return Some(t),
Err(_) => ()
}
// RFC 850, obsoleted by RFC 1036
match strptime(timestamp, "%A, %d-%b-%y %T %Z") {
Ok(t) => return Some(t),
Err(_) => ()
}
// ANSI C's asctime() format
strptime(timestamp, "%c").ok()
}

View file

@ -1,106 +0,0 @@
function _oneline(x) {
var i = x.indexOf("\n");
return (i == -1) ? x : (x.slice(0, i) + "...");
}
var _expectations = 0;
var _tests = 0;
function expect(num) {
_expectations = num;
}
function _fail(s, m) {
_tests++;
// string split to avoid problems with tests that end up printing the value of window._fail.
window.alert(_oneline("TEST-UNEXPECTED" + "-FAIL | " + s + ": " + m));
}
function _pass(s, m) {
_tests++;
window.alert(_oneline("TEST-PASS | " + s + ": " + m));
}
function _printer(opstr, op) {
return function (a, b, msg) {
var f = op(a,b) ? _pass : _fail;
if (!msg) msg = "";
f(a + " " + opstr + " " + b, msg);
};
}
var is = _printer("===", function (a,b) { return a === b; });
var is_not = _printer("!==", function (a,b) { return a !== b; });
var is_a = _printer("is a", function (a,b) { return a instanceof b; });
var is_not_a = _printer("is not a", function (a,b) { return !(a instanceof b); });
var is_in = _printer("is in", function (a,b) { return a in b; });
var is_not_in = _printer("is not in", function (a,b) { return !(a in b); });
var as_str_is = _printer("as string is", function (a,b) { return String(a) == b; });
var lt = _printer("<", function (a,b) { return a < b; });
var gt = _printer(">", function (a,b) { return a > b; });
var leq = _printer("<=", function (a,b) { return a <= b; });
var geq = _printer(">=", function (a,b) { return a >= b; });
var starts_with = _printer("starts with", function (a,b) { return a.indexOf(b) == 0; });
function is_function(val, name) {
starts_with(String(val), "function " + name + "(");
}
function should_throw(f) {
try {
f();
_fail("operation should have thrown but did not");
} catch (x) {
_pass("operation successfully threw an exception", x.toString());
}
}
function should_not_throw(f) {
try {
f();
_pass("operation did not throw an exception");
} catch (x) {
_fail("operation should have not thrown", x.toString());
}
}
function check_selector(elem, selector, matches) {
is(elem.matches(selector), matches);
}
function check_disabled_selector(elem, disabled) {
check_selector(elem, ":disabled", disabled);
check_selector(elem, ":enabled", !disabled);
}
var _test_complete = false;
var _test_timeout = 10000; //10 seconds
function finish() {
if (_test_complete) {
_fail('finish called multiple times');
}
if (_expectations > _tests) {
_fail('expected ' + _expectations + ' tests, fullfilled ' + _tests);
}
_test_complete = true;
window.close();
}
function _test_timed_out() {
if (!_test_complete) {
_fail('test timed out (' + _test_timeout/1000 + 's)');
finish();
}
}
setTimeout(_test_timed_out, _test_timeout);
var _needs_finish = false;
function waitForExplicitFinish() {
_needs_finish = true;
}
addEventListener('load', function() {
if (!_needs_finish) {
finish();
}
});

View file

@ -1,25 +0,0 @@
function assert_requests_made(url, n) {
var x = new XMLHttpRequest();
x.open('GET', 'stats?' + url, false);
x.send();
is(parseInt(x.responseText), n, '# of requests for ' + url + ' should be ' + n);
}
function reset_stats() {
var x = new XMLHttpRequest();
x.open('POST', 'reset', false);
x.send();
is(x.status, 200, 'resetting stats should succeed');
}
function fetch(url, headers) {
var x = new XMLHttpRequest();
x.open('GET', url, false);
if (headers) {
for (var i = 0; i < headers.length; i++) {
x.setRequestHeader(headers[i][0], headers[i][1]);
}
}
x.send();
is(x.status, 200, 'fetching ' + url + ' should succeed ');
}

View file

@ -1,2 +0,0 @@
<html>
</html>

View file

@ -1,2 +0,0 @@
<html>
</html>

View file

@ -1,2 +0,0 @@
200
Cache-Control: must-revalidate

View file

@ -1,2 +0,0 @@
<html>
</html>

View file

@ -1,2 +0,0 @@
200
Cache-Control: no-cache

View file

@ -1,14 +0,0 @@
<html>
<head>
<script src="harness.js"></script>
<script src="netharness.js"></script>
</head>
<body>
<script>
reset_stats();
fetch('resources/helper.html');
fetch('resources/helper.html', [['X-User', 'foo']]);
assert_requests_made('resources/helper.html', 2);
</script>
</body>
</html>

View file

@ -1,14 +0,0 @@
<html>
<head>
<script src="harness.js"></script>
<script src="netharness.js"></script>
</head>
<body>
<script>
reset_stats();
fetch('resources/helper.html');
fetch('resources/helper.html');
assert_requests_made('resources/helper.html', 1);
</script>
</body>
</html>

View file

@ -1,30 +0,0 @@
<!doctype html>
<html>
<head>
<title></title>
<script src="harness.js"></script>
<script>
// test1: URL & documentURI
{
is_not(document.URL, null, "test1-0, URL & documentURI");
is_not(document.documentURI, null, "test1-1, URL & documentURI");
is(document.URL, document.documentURI, "test1-2, URL & documentURI");
}
// test2: new document
{
var doc = new Document();
is(doc.URL, "about:blank", "test2-0, new document");
}
// test3: current document
{
var url = document.URL.split("/");
is(url[0], "http:", "test3-0, current document");
is(url[url.length-1], "test_document_url.html", "test3-1, current document");
}
</script>
</head>
<body>
</body>
</html>

View file

@ -1,14 +0,0 @@
<html>
<head>
<script src="harness.js"></script>
<script src="netharness.js"></script>
</head>
<body>
<script>
reset_stats();
fetch('resources/helper_nocache.html');
fetch('resources/helper_nocache.html');
assert_requests_made('resources/helper_nocache.html', 2);
</script>
</body>
</html>

View file

@ -1,14 +0,0 @@
<html>
<head>
<script src="harness.js"></script>
<script src="netharness.js"></script>
</head>
<body>
<script>
reset_stats();
fetch('resources/helper_must_revalidate.html');
fetch('resources/helper_must_revalidate.html');
assert_requests_made('resources/helper_must_revalidate.html', 1);
</script>
</body>
</html>

194
tests/contenttest.rs vendored
View file

@ -1,194 +0,0 @@
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(unused_imports)]
#![deny(unused_variables)]
extern crate getopts;
extern crate regex;
extern crate test;
use test::{AutoColor, TestOpts, run_tests_console, TestDesc, TestDescAndFn, DynTestFn, DynTestName};
use getopts::{getopts, reqopt};
use std::comm::channel;
use std::from_str::FromStr;
use std::{os, str};
use std::io::fs;
use std::io::Reader;
use std::io::process::{Command, Ignored, CreatePipe, InheritFd, ExitStatus};
use std::task;
use regex::Regex;
#[deriving(Clone)]
struct Config {
source_dir: String,
filter: Option<Regex>
}
fn main() {
let args = os::args();
let config = parse_config(args.into_iter().collect());
let opts = test_options(&config);
let tests = find_tests(&config);
match run_tests_console(&opts, tests) {
Ok(false) => os::set_exit_status(1), // tests failed
Err(_) => os::set_exit_status(2), // I/O-related failure
_ => (),
}
}
enum ServerMsg {
IsAlive(Sender<bool>),
Exit,
}
fn run_http_server(source_dir: String) -> (Sender<ServerMsg>, u16) {
let (tx, rx) = channel();
let (port_sender, port_receiver) = channel();
task::spawn(proc() {
let mut prc = Command::new("python")
.args(["../httpserver.py"])
.stdin(Ignored)
.stdout(CreatePipe(false, true))
.stderr(Ignored)
.cwd(&Path::new(source_dir))
.spawn()
.ok()
.expect("Unable to spawn server.");
let mut bytes = vec!();
loop {
let byte = prc.stdout.as_mut().unwrap().read_byte().unwrap();
if byte == '\n' as u8 {
break;
} else {
bytes.push(byte);
}
}
let mut words = str::from_utf8(bytes.as_slice()).unwrap().split(' ');
let port = FromStr::from_str(words.last().unwrap()).unwrap();
port_sender.send(port);
loop {
match rx.recv() {
IsAlive(reply) => reply.send(prc.signal(0).is_ok()),
Exit => {
let _ = prc.signal_exit();
break;
}
}
}
});
(tx, port_receiver.recv())
}
fn parse_config(args: Vec<String>) -> Config {
let args = args.tail();
let opts = vec!(reqopt("s", "source-dir", "source-dir", "source-dir"));
let matches = match getopts(args, opts.as_slice()) {
Ok(m) => m,
Err(f) => panic!(format!("{}", f))
};
Config {
source_dir: matches.opt_str("source-dir").unwrap(),
filter: matches.free.as_slice().head().map(|s| Regex::new(s.as_slice()).unwrap())
}
}
fn test_options(config: &Config) -> TestOpts {
TestOpts {
filter: config.filter.clone(),
run_ignored: false,
run_tests: true,
run_benchmarks: false,
ratchet_metrics: None,
ratchet_noise_percent: None,
save_metrics: None,
test_shard: None,
logfile: None,
nocapture: false,
color: AutoColor
}
}
fn find_tests(config: &Config) -> Vec<TestDescAndFn> {
let files_res = fs::readdir(&Path::new(config.source_dir.clone()));
let mut files = match files_res {
Ok(files) => files,
_ => panic!("Error reading directory."),
};
files.retain(|file| file.extension_str() == Some("html") );
return files.iter().map(|file| make_test(format!("{}", file.display()),
config.source_dir.clone())).collect();
}
fn make_test(file: String, source_dir: String) -> TestDescAndFn {
TestDescAndFn {
desc: TestDesc {
name: DynTestName(file.clone()),
ignore: false,
should_fail: false
},
testfn: DynTestFn(proc() { run_test(file, source_dir) })
}
}
fn run_test(file: String, source_dir: String) {
let (server, port) = run_http_server(source_dir);
let path = os::make_absolute(&Path::new(file));
// FIXME (#1094): not the right way to transform a path
let infile = format!("http://localhost:{}/{}", port, path.filename_display());
let stdout = CreatePipe(false, true);
let stderr = InheritFd(2);
let args = ["-z", "-f", infile.as_slice()];
let (tx, rx) = channel();
server.send(IsAlive(tx));
assert!(rx.recv(), "HTTP server must be running.");
let mut prc = match Command::new("target/servo")
.args(args)
.stdin(Ignored)
.stdout(stdout)
.stderr(stderr)
.spawn()
{
Ok(p) => p,
_ => panic!("Unable to spawn process."),
};
let mut output = Vec::new();
loop {
let byte = prc.stdout.as_mut().unwrap().read_byte();
match byte {
Ok(byte) => {
print!("{}", byte as char);
output.push(byte);
}
_ => break
}
}
server.send(Exit);
let out = str::from_utf8(output.as_slice());
let lines: Vec<&str> = out.unwrap().split('\n').collect();
for &line in lines.iter() {
if line.contains("TEST-UNEXPECTED-FAIL") {
panic!(line.to_string());
}
}
let retval = prc.wait();
if retval != Ok(ExitStatus(0)) {
panic!("Servo exited with non-zero status {}", retval);
}
}

115
tests/httpserver.py vendored
View file

@ -1,115 +0,0 @@
from SimpleHTTPServer import SimpleHTTPRequestHandler
import SocketServer
import os
import sys
from collections import defaultdict
PORT = int(sys.argv[1]) if len(sys.argv) > 1 else 0
requests = defaultdict(int)
class CountingRequestHandler(SimpleHTTPRequestHandler):
def __init__(self, req, client_addr, server):
SimpleHTTPRequestHandler.__init__(self, req, client_addr, server)
def do_POST(self):
global requests
parts = self.path.split('/')
if parts[1] == 'reset':
requests = defaultdict(int)
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', 0)
self.end_headers()
self.wfile.write('')
return
def do_GET(self):
global requests
parts = self.path.split('?')
if parts[0] == '/stats':
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
if len(parts) > 1:
body = str(requests['/' + parts[1]])
else:
body = ''
for key, value in requests.iteritems():
body += key + ': ' + str(value) + '\n'
self.send_header('Content-Length', len(body))
self.end_headers()
self.wfile.write(body)
return
header_list = []
status = None
path = self.translate_path(self.path)
headers = path + '^headers'
if os.path.isfile(headers):
try:
h = open(headers, 'rb')
except IOError:
self.send_error(404, "Header file not found")
return
header_lines = h.readlines()
status = int(header_lines[0])
for header in header_lines[1:]:
parts = map(lambda x: x.strip(), header.split(':'))
header_list += [parts]
if self.headers.get('If-Modified-Since'):
self.send_response(304)
self.end_headers()
return
if not status or status == 200:
requests[self.path] += 1
if status or header_list:
ctype = self.guess_type(path)
try:
# Always read in binary mode. Opening files in text mode may cause
# newline translations, making the actual size of the content
# transmitted *less* than the content-length!
f = open(path, 'rb')
except IOError:
self.send_error(404, "File not found")
return
try:
self.send_response(status or 200)
self.send_header("Content-type", ctype)
fs = os.fstat(f.fileno())
self.send_header("Content-Length", str(fs[6]))
self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
for header in header_list:
self.send_header(header[0], header[1])
self.end_headers()
try:
self.copyfile(f, self.wfile)
finally:
f.close()
except:
f.close()
raise
else:
SimpleHTTPRequestHandler.do_GET(self)
class MyTCPServer(SocketServer.TCPServer):
request_queue_size = 2000
allow_reuse_address = True
httpd = MyTCPServer(("", PORT), CountingRequestHandler)
if not PORT:
ip, PORT = httpd.server_address
print "serving at port", PORT
sys.stdout.flush()
httpd.serve_forever()

View file

@ -524761,7 +524761,7 @@
"support"
],
"fetch/http-cache/cc-request.html": [
"d4417b8fd444362a3f217d1c95d37811a608e1a7",
"2002d341679139428e164cfe916dd39b9b664a3e",
"testharness"
],
"fetch/http-cache/freshness.html": [
@ -524769,7 +524769,7 @@
"testharness"
],
"fetch/http-cache/heuristic.html": [
"5b0d55f891cb2e235456cd65f4e9f63e07999410",
"63837026eb6085fc7d6220c3dcab200b4bcd1eca",
"testharness"
],
"fetch/http-cache/http-cache.js": [
@ -524781,7 +524781,7 @@
"testharness"
],
"fetch/http-cache/partial.html": [
"243e57c39f9e45e3e2acf845b36f3a140e3763bc",
"685057fe8876321a5d42bcf1e7582e6f0b745f85",
"testharness"
],
"fetch/http-cache/resources/http-cache.py": [
@ -524793,7 +524793,7 @@
"testharness"
],
"fetch/http-cache/vary.html": [
"fa9a2e0554671bf2de5826e66ac0ea73de28d530",
"45f337270cfa90932c7469802655e313367ac92f",
"testharness"
],
"fetch/nosniff/image.html": [

View file

@ -1,14 +0,0 @@
[304.htm]
type: testharness
[A 304 response with no CORS headers inherits from the stored response]
expected: FAIL
[A 304 can expand Access-Control-Expose-Headers]
expected: FAIL
[A 304 can contract Access-Control-Expose-Headers]
expected: FAIL
[A 304 can change Access-Control-Allow-Origin]
expected: FAIL

View file

@ -1,3 +0,0 @@
[request-cache-default-conditional.html]
type: testharness
disabled: https://github.com/servo/servo/issues/13441

View file

@ -1,11 +0,0 @@
[request-cache-default.html]
type: testharness
[RequestCache "default" mode checks the cache for previously cached content and avoids going to the network if a fresh response exists with Etag and fresh response]
expected: FAIL
[RequestCache "default" mode checks the cache for previously cached content and avoids going to the network if a fresh response exists with date and fresh response]
expected: FAIL
[RequestCache "default" mode checks the cache for previously cached content and avoids going to the network if a fresh response exists with Last-Modified and fresh response]
expected: FAIL

View file

@ -1,29 +0,0 @@
[request-cache-force-cache.html]
type: testharness
[RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for stale responses with Etag and stale response]
expected: FAIL
[RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for stale responses with date and stale response]
expected: FAIL
[RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for fresh responses with Etag and fresh response]
expected: FAIL
[RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for fresh responses with date and fresh response]
expected: FAIL
[RequestCache "force-cache" stores the response in the cache if it goes to the network with Etag and fresh response]
expected: FAIL
[RequestCache "force-cache" stores the response in the cache if it goes to the network with date and fresh response]
expected: FAIL
[RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for stale responses with Last-Modified and stale response]
expected: FAIL
[RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for fresh responses with Last-Modified and fresh response]
expected: FAIL
[RequestCache "force-cache" stores the response in the cache if it goes to the network with Last-Modified and fresh response]
expected: FAIL

View file

@ -1,65 +0,0 @@
[request-cache-only-if-cached.html]
type: testharness
[RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for stale responses with Etag and stale response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for stale responses with date and stale response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for fresh responses with Etag and fresh response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for fresh responses with date and fresh response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and does not go to the network if a cached response is not found with Etag and fresh response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and does not go to the network if a cached response is not found with date and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content with Etag and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content with date and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content with Etag and stale response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content with date and stale response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects with Etag and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects with date and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects with Etag and stale response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects with date and stale response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for stale responses with Last-Modified and stale response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for fresh responses with Last-Modified and fresh response]
expected: FAIL
[RequestCache "only-if-cached" mode checks the cache for previously cached content and does not go to the network if a cached response is not found with Last-Modified and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content with Last-Modified and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content with Last-Modified and stale response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects with Last-Modified and fresh response]
expected: FAIL
[RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects with Last-Modified and stale response]
expected: FAIL

View file

@ -1,20 +0,0 @@
[request-cache-reload.html]
type: testharness
[RequestCache "reload" mode does store the response in the cache with Etag and fresh response]
expected: FAIL
[RequestCache "reload" mode does store the response in the cache with date and fresh response]
expected: FAIL
[RequestCache "reload" mode does store the response in the cache even if a previous response is already stored with Etag and fresh response]
expected: FAIL
[RequestCache "reload" mode does store the response in the cache even if a previous response is already stored with date and fresh response]
expected: FAIL
[RequestCache "reload" mode does store the response in the cache with Last-Modified and fresh response]
expected: FAIL
[RequestCache "reload" mode does store the response in the cache even if a previous response is already stored with Last-Modified and fresh response]
expected: FAIL

View file

@ -1,14 +0,0 @@
[304-update.html]
type: testharness
[HTTP cache updates returned headers from a Last-Modified 304.]
expected: FAIL
[HTTP cache updates stored headers from a Last-Modified 304.]
expected: FAIL
[HTTP cache updates returned headers from a ETag 304.]
expected: FAIL
[HTTP cache updates stored headers from a ETag 304.]
expected: FAIL

View file

@ -1,11 +1,5 @@
[cc-request.html]
type: testharness
[HTTP cache does use aged stale response when request contains Cache-Control: max-stale that permits its use.]
expected: FAIL
[HTTP cache does reuse stale response with Age header when request contains Cache-Control: max-stale that permits its use.]
expected: FAIL
[HTTP cache generates 504 status code when nothing is in cache and request contains Cache-Control: only-if-cached.]
expected: FAIL

View file

@ -1,20 +0,0 @@
[freshness.html]
type: testharness
[HTTP cache reuses a response with a future Expires.]
expected: FAIL
[HTTP cache reuses a response with positive Cache-Control: max-age.]
expected: FAIL
[HTTP cache reuses a response with positive Cache-Control: max-age and a past Expires.]
expected: FAIL
[HTTP cache reuses a response with positive Cache-Control: max-age and an invalid Expires.]
expected: FAIL
[HTTP cache stores a response with Cache-Control: no-cache, but revalidates upon use.]
expected: FAIL
[HTTP cache stores a response with Cache-Control: no-cache, but revalidates upon use, even with max-age and Expires.]
expected: FAIL

View file

@ -1,29 +0,0 @@
[heuristic.html]
type: testharness
[HTTP cache reuses an unknown response with Last-Modified based upon heuristic freshness when Cache-Control: public is present.]
expected: FAIL
[HTTP cache reuses a 200 OK response with Last-Modified based upon heuristic freshness.]
expected: FAIL
[HTTP cache reuses a 203 Non-Authoritative Information response with Last-Modified based upon heuristic freshness.]
expected: FAIL
[HTTP cache reuses a 204 No Content response with Last-Modified based upon heuristic freshness.]
expected: FAIL
[HTTP cache reuses a 404 Not Found response with Last-Modified based upon heuristic freshness.]
expected: FAIL
[HTTP cache reuses a 405 Method Not Allowed response with Last-Modified based upon heuristic freshness.]
expected: FAIL
[HTTP cache reuses a 410 Gone response with Last-Modified based upon heuristic freshness.]
expected: FAIL
[HTTP cache reuses a 414 URI Too Long response with Last-Modified based upon heuristic freshness.]
expected: FAIL
[HTTP cache reuses a 501 Not Implemented response with Last-Modified based upon heuristic freshness.]
expected: FAIL

View file

@ -1,20 +1,11 @@
[invalidate.html]
type: testharness
[HTTP cache does not invalidate after a failed response from an unsafe request]
expected: FAIL
[HTTP cache invalidates after a successful response from an unknown method]
expected: FAIL
[HTTP cache does not invalidate Location URL after a failed response from an unsafe request]
expected: FAIL
[HTTP cache invalidates Location URL after a successful response from an unknown method]
expected: FAIL
[HTTP cache does not invalidate Content-Location URL after a failed response from an unsafe request]
expected: FAIL
[HTTP cache invalidates Content-Location URL after a successful response from an unknown method]
expected: FAIL

View file

@ -1,13 +1,5 @@
[partial.html]
type: testharness
[HTTP cache stores partial content and reuses it.]
expected: FAIL
[HTTP cache stores complete response and serves smaller ranges from it.]
expected: FAIL
[HTTP cache stores partial response and serves smaller ranges from it.]
expected: FAIL
[HTTP cache stores partial content and completes it.]
expected: FAIL

View file

@ -1,41 +0,0 @@
[status.html]
type: testharness
[HTTP cache avoids going to the network if it has a fresh 200 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 203 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 204 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 299 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 400 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 404 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 410 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 499 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 500 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 502 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 503 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 504 response.]
expected: FAIL
[HTTP cache avoids going to the network if it has a fresh 599 response.]
expected: FAIL

View file

@ -1,17 +0,0 @@
[vary.html]
type: testharness
[HTTP cache reuses Vary response when request matches.]
expected: FAIL
[HTTP cache doesn't invalidate existing Vary response.]
expected: FAIL
[HTTP cache doesn't pay attention to headers not listed in Vary.]
expected: FAIL
[HTTP cache reuses two-way Vary response when request matches.]
expected: FAIL
[HTTP cache reuses three-way Vary response when request matches.]
expected: FAIL

View file

@ -201,7 +201,8 @@
request_headers: [
["Cache-Control", "only-if-cached"]
],
expected_status: 504
expected_status: 504,
expected_response_text: ""
}
]
}

View file

@ -26,6 +26,7 @@
},
{
expected_type: "cached",
response_status: [299, "Whatever"],
}
]
},
@ -35,8 +36,7 @@
{
response_status: [299, "Whatever"],
response_headers: [
['Last-Modified', http_date(-3 * 100)],
['Cache-Control', 'public']
['Last-Modified', http_date(-3 * 100)]
],
},
{

View file

@ -24,7 +24,7 @@
response_status: [206, "Partial Content"],
response_headers: [
['Cache-Control', 'max-age=3600'],
['Content-Range', 'bytes 0-4/10']
['Content-Range', 'bytes 4-9/10']
],
response_body: "01234",
expected_request_headers: [
@ -36,12 +36,13 @@
['Range', "bytes=-5"]
],
expected_type: "cached",
expected_status: 206
expected_status: 206,
expected_response_text: "01234"
}
]
},
{
name: 'HTTP cache stores complete response and serves smaller ranges from it.',
name: 'HTTP cache stores complete response and serves smaller ranges from it(byte-range-spec).',
requests: [
{
response_headers: [
@ -51,15 +52,54 @@
},
{
request_headers: [
['Range', "bytes=-1"]
['Range', "bytes=0-1"]
],
expected_type: "cached",
expected_status: 206,
expected_response_text: "01"
},
]
},
{
name: 'HTTP cache stores complete response and serves smaller ranges from it(absent last-byte-pos).',
requests: [
{
response_headers: [
['Cache-Control', 'max-age=3600'],
],
response_body: "01234567890",
},
{
request_headers: [
['Range', "bytes=1-"]
],
expected_type: "cached",
expected_status: 206,
expected_response_text: "1234567890"
}
]
},
{
name: 'HTTP cache stores partial response and serves smaller ranges from it.',
name: 'HTTP cache stores complete response and serves smaller ranges from it(suffix-byte-range-spec).',
requests: [
{
response_headers: [
['Cache-Control', 'max-age=3600'],
],
response_body: "0123456789A",
},
{
request_headers: [
['Range', "bytes=-1"]
],
expected_type: "cached",
expected_status: 206,
expected_response_text: "A"
}
]
},
{
name: 'HTTP cache stores partial response and serves smaller ranges from it(byte-range-spec).',
requests: [
{
request_headers: [
@ -68,7 +108,55 @@
response_status: [206, "Partial Content"],
response_headers: [
['Cache-Control', 'max-age=3600'],
['Content-Range', 'bytes 0-4/10']
['Content-Range', 'bytes 4-9/10']
],
response_body: "01234",
},
{
request_headers: [
['Range', "bytes=6-8"]
],
expected_type: "cached",
expected_status: 206,
expected_response_text: "234"
}
]
},
{
name: 'HTTP cache stores partial response and serves smaller ranges from it(absent last-byte-pos).',
requests: [
{
request_headers: [
['Range', "bytes=-5"]
],
response_status: [206, "Partial Content"],
response_headers: [
['Cache-Control', 'max-age=3600'],
['Content-Range', 'bytes 4-9/10']
],
response_body: "01234",
},
{
request_headers: [
['Range', "bytes=6-"]
],
expected_type: "cached",
expected_status: 206,
expected_response_text: "234"
}
]
},
{
name: 'HTTP cache stores partial response and serves smaller ranges from it(suffix-byte-range-spec).',
requests: [
{
request_headers: [
['Range', "bytes=-5"]
],
response_status: [206, "Partial Content"],
response_headers: [
['Cache-Control', 'max-age=3600'],
['Content-Range', 'bytes 4-9/10']
],
response_body: "01234",
},
@ -77,7 +165,8 @@
['Range', "bytes=-1"]
],
expected_type: "cached",
expected_response_text: "01"
expected_status: 206,
expected_response_text: "4"
}
]
},

View file

@ -103,6 +103,7 @@
request_headers: [
["Foo", "1"]
],
response_body: http_content('foo_1'),
expected_type: "cached"
}
]
@ -245,7 +246,32 @@
]
},
{
name: "HTTP cache doesn't use three-way Vary response when request omits variant header.",
name: "HTTP cache doesn't use three-way Vary response when request doesn't match, regardless of header order.",
requests: [
{
request_headers: [
["Foo", "1"],
["Bar", "abc4"],
["Baz", "789"]
],
response_headers: [
["Expires", http_date(5000)],
["Last-Modified", http_date(-3000)],
["Vary", "Foo, Bar, Baz"]
]
},
{
request_headers: [
["Foo", "1"],
["Bar", "abc"],
["Baz", "789"]
],
expected_type: "not_cached"
}
]
},
{
name: "HTTP cache uses three-way Vary response when both request and the original request omited a variant header.",
requests: [
{
request_headers: [
@ -259,6 +285,33 @@
]
},
{
request_headers: [
["Foo", "1"],
["Baz", "789"]
],
expected_type: "cached"
}
]
},
{
name: "HTTP cache doesn't use Vary response with a field value of '*'.",
requests: [
{
request_headers: [
["Foo", "1"],
["Baz", "789"]
],
response_headers: [
["Expires", http_date(5000)],
["Last-Modified", http_date(-3000)],
["Vary", "*"]
]
},
{
request_headers: [
["*", "1"],
["Baz", "789"]
],
expected_type: "not_cached"
}
]