mirror of
https://github.com/servo/servo.git
synced 2025-07-24 07:40:27 +01:00
Update hyper to 0.12
This commit is contained in:
parent
95bfaa0a77
commit
024b40b39d
122 changed files with 3835 additions and 3448 deletions
|
@ -13,15 +13,19 @@ test = false
|
|||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.6"
|
||||
brotli = "1.0.6"
|
||||
cookie = "0.10"
|
||||
base64 = "0.9"
|
||||
brotli = "2.5"
|
||||
bytes = "0.4"
|
||||
cookie = "0.11"
|
||||
devtools_traits = {path = "../devtools_traits"}
|
||||
embedder_traits = { path = "../embedder_traits" }
|
||||
flate2 = "1"
|
||||
hyper = "0.10"
|
||||
hyper_serde = "0.8"
|
||||
hyper-openssl = "0.2.2"
|
||||
headers-core = "0.0.1"
|
||||
headers-ext = "0.0.3"
|
||||
http = "0.1"
|
||||
hyper = "0.12"
|
||||
hyper_serde = "0.9"
|
||||
hyper-openssl = "0.6"
|
||||
immeta = "0.4"
|
||||
ipc-channel = "0.11"
|
||||
lazy_static = "1"
|
||||
|
@ -29,11 +33,11 @@ log = "0.4"
|
|||
malloc_size_of = { path = "../malloc_size_of" }
|
||||
malloc_size_of_derive = { path = "../malloc_size_of_derive" }
|
||||
matches = "0.1"
|
||||
mime = "0.2.1"
|
||||
mime_guess = "1.8.0"
|
||||
mime = "0.3"
|
||||
mime_guess = "2.0.0-alpha.6"
|
||||
msg = {path = "../msg"}
|
||||
net_traits = {path = "../net_traits"}
|
||||
openssl = "0.9"
|
||||
openssl = "0.10"
|
||||
pixels = {path = "../pixels"}
|
||||
profile_traits = {path = "../profile_traits"}
|
||||
serde = "1.0"
|
||||
|
@ -43,9 +47,10 @@ servo_arc = {path = "../servo_arc"}
|
|||
servo_channel = {path = "../channel"}
|
||||
servo_config = {path = "../config"}
|
||||
servo_url = {path = "../url"}
|
||||
tokio = "0.1"
|
||||
tokio-timer = "0.2"
|
||||
threadpool = "1.0"
|
||||
time = "0.1.17"
|
||||
unicase = "1.4.0"
|
||||
url = "1.2"
|
||||
uuid = {version = "0.6", features = ["v4"]}
|
||||
webrender_api = {git = "https://github.com/servo/webrender", features = ["ipc"]}
|
||||
|
@ -53,6 +58,8 @@ ws = { version = "0.7", features = ["ssl"] }
|
|||
|
||||
[dev-dependencies]
|
||||
std_test_override = { path = "../std_test_override" }
|
||||
futures = "0.1"
|
||||
tokio-openssl = "0.2"
|
||||
|
||||
[[test]]
|
||||
name = "main"
|
||||
|
|
|
@ -3,11 +3,13 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use filemanager_thread::FileManager;
|
||||
use hyper::header::{Charset, ContentLength, ContentType, Headers};
|
||||
use hyper::header::{ContentDisposition, DispositionParam, DispositionType};
|
||||
use headers_core::HeaderMapExt;
|
||||
use headers_ext::{ContentLength, ContentType};
|
||||
use http::HeaderMap;
|
||||
use http::header::{self, HeaderValue};
|
||||
use ipc_channel::ipc;
|
||||
use mime::{Attr, Mime};
|
||||
use net_traits::NetworkError;
|
||||
use mime::{self, Mime};
|
||||
use net_traits::{http_percent_encode, NetworkError};
|
||||
use net_traits::blob_url_store::parse_blob_url;
|
||||
use net_traits::filemanager_thread::ReadFileProgress;
|
||||
use servo_url::ServoUrl;
|
||||
|
@ -20,7 +22,7 @@ use servo_url::ServoUrl;
|
|||
pub fn load_blob_sync
|
||||
(url: ServoUrl,
|
||||
filemanager: FileManager)
|
||||
-> Result<(Headers, Vec<u8>), NetworkError> {
|
||||
-> Result<(HeaderMap, Vec<u8>), NetworkError> {
|
||||
let (id, origin) = match parse_blob_url(&url) {
|
||||
Ok((id, origin)) => (id, origin),
|
||||
Err(()) => {
|
||||
|
@ -43,26 +45,32 @@ pub fn load_blob_sync
|
|||
}
|
||||
};
|
||||
|
||||
let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime!(Text / Plain));
|
||||
let charset = content_type.get_param(Attr::Charset);
|
||||
let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime::TEXT_PLAIN);
|
||||
let charset = content_type.get_param(mime::CHARSET);
|
||||
|
||||
let mut headers = Headers::new();
|
||||
let mut headers = HeaderMap::new();
|
||||
|
||||
if let Some(name) = blob_buf.filename {
|
||||
let charset = charset.and_then(|c| c.as_str().parse().ok());
|
||||
headers.set(ContentDisposition {
|
||||
disposition: DispositionType::Inline,
|
||||
parameters: vec![
|
||||
DispositionParam::Filename(charset.unwrap_or(Charset::Us_Ascii),
|
||||
None, name.as_bytes().to_vec())
|
||||
]
|
||||
});
|
||||
let charset = charset.map(|c| c.as_ref().into()).unwrap_or("us-ascii".to_owned());
|
||||
// TODO(eijebong): Replace this once the typed header is there
|
||||
headers.insert(
|
||||
header::CONTENT_DISPOSITION,
|
||||
HeaderValue::from_bytes(
|
||||
format!("inline; {}",
|
||||
if charset.to_lowercase() == "utf-8" {
|
||||
format!("filename=\"{}\"", String::from_utf8(name.as_bytes().into()).unwrap())
|
||||
} else {
|
||||
format!("filename*=\"{}\"''{}", charset, http_percent_encode(name.as_bytes()))
|
||||
}
|
||||
).as_bytes()
|
||||
).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
// Basic fetch, Step 4.
|
||||
headers.set(ContentLength(blob_buf.size as u64));
|
||||
headers.typed_insert(ContentLength(blob_buf.size as u64));
|
||||
// Basic fetch, Step 5.
|
||||
headers.set(ContentType(content_type.clone()));
|
||||
headers.typed_insert(ContentType::from(content_type.clone()));
|
||||
|
||||
let mut bytes = blob_buf.bytes;
|
||||
loop {
|
||||
|
|
|
@ -2,60 +2,132 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use flate2::read::GzDecoder;
|
||||
use hosts::replace_host;
|
||||
use hyper::client::Pool;
|
||||
use hyper::error::{Result as HyperResult, Error as HyperError};
|
||||
use hyper::net::{NetworkConnector, HttpsStream, HttpStream, SslClient};
|
||||
use hyper_openssl::OpensslClient;
|
||||
use openssl::ssl::{SSL_OP_NO_COMPRESSION, SSL_OP_NO_SSLV2, SSL_OP_NO_SSLV3};
|
||||
use openssl::ssl::{SslConnector, SslConnectorBuilder, SslMethod};
|
||||
use http_loader::Decoder;
|
||||
use hyper::{Body, Client};
|
||||
use hyper::body::Payload;
|
||||
use hyper::client::HttpConnector as HyperHttpConnector;
|
||||
use hyper::client::connect::{Connect, Destination};
|
||||
use hyper::rt::Future;
|
||||
use hyper_openssl::HttpsConnector;
|
||||
use openssl::ssl::{SslConnector, SslConnectorBuilder, SslMethod, SslOptions};
|
||||
use openssl::x509;
|
||||
use std::io;
|
||||
use std::net::TcpStream;
|
||||
use std::io::{Cursor, Read};
|
||||
use tokio::prelude::{Async, Stream};
|
||||
use tokio::prelude::future::Executor;
|
||||
|
||||
pub struct HttpsConnector {
|
||||
ssl: OpensslClient,
|
||||
pub const BUF_SIZE: usize = 32768;
|
||||
|
||||
pub struct HttpConnector {
|
||||
inner: HyperHttpConnector
|
||||
}
|
||||
|
||||
impl HttpsConnector {
|
||||
fn new(ssl: OpensslClient) -> HttpsConnector {
|
||||
HttpsConnector {
|
||||
ssl: ssl,
|
||||
impl HttpConnector {
|
||||
fn new() -> HttpConnector {
|
||||
let mut inner = HyperHttpConnector::new(4);
|
||||
inner.enforce_http(false);
|
||||
inner.set_happy_eyeballs_timeout(None);
|
||||
HttpConnector {
|
||||
inner
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl NetworkConnector for HttpsConnector {
|
||||
type Stream = HttpsStream<<OpensslClient as SslClient>::Stream>;
|
||||
|
||||
fn connect(&self, host: &str, port: u16, scheme: &str) -> HyperResult<Self::Stream> {
|
||||
if scheme != "http" && scheme != "https" {
|
||||
return Err(HyperError::Io(io::Error::new(io::ErrorKind::InvalidInput,
|
||||
"Invalid scheme for Http")));
|
||||
}
|
||||
impl Connect for HttpConnector {
|
||||
type Transport = <HyperHttpConnector as Connect>::Transport;
|
||||
type Error = <HyperHttpConnector as Connect>::Error;
|
||||
type Future = <HyperHttpConnector as Connect>::Future;
|
||||
|
||||
fn connect(&self, dest: Destination) -> Self::Future {
|
||||
// Perform host replacement when making the actual TCP connection.
|
||||
let addr = &(&*replace_host(host), port);
|
||||
let stream = HttpStream(TcpStream::connect(addr)?);
|
||||
let mut new_dest = dest.clone();
|
||||
let addr = replace_host(dest.host());
|
||||
new_dest.set_host(&*addr).unwrap();
|
||||
self.inner.connect(new_dest)
|
||||
}
|
||||
}
|
||||
|
||||
if scheme == "http" {
|
||||
Ok(HttpsStream::Http(stream))
|
||||
} else {
|
||||
// Do not perform host replacement on the host that is used
|
||||
// for verifying any SSL certificate encountered.
|
||||
self.ssl.wrap_client(stream, host).map(HttpsStream::Https)
|
||||
pub type Connector = HttpsConnector<HttpConnector>;
|
||||
pub struct WrappedBody {
|
||||
pub body: Body,
|
||||
pub decoder: Decoder,
|
||||
}
|
||||
|
||||
impl WrappedBody {
|
||||
pub fn new(body: Body) -> Self {
|
||||
Self::new_with_decoder(body, Decoder::Plain)
|
||||
}
|
||||
|
||||
pub fn new_with_decoder(body: Body, decoder: Decoder) -> Self {
|
||||
WrappedBody {
|
||||
body,
|
||||
decoder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type Connector = HttpsConnector;
|
||||
impl Payload for WrappedBody {
|
||||
type Data = <Body as Payload>::Data;
|
||||
type Error = <Body as Payload>::Error;
|
||||
fn poll_data(&mut self) -> Result<Async<Option<Self::Data>>, Self::Error> {
|
||||
self.body.poll_data()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_ssl_connector(certs: &str) -> SslConnector {
|
||||
impl Stream for WrappedBody {
|
||||
type Item = <Body as Stream>::Item;
|
||||
type Error = <Body as Stream>::Error;
|
||||
fn poll(&mut self) -> Result<Async<Option<Self::Item>>, Self::Error> {
|
||||
self.body.poll().map(|res| {
|
||||
res.map(|maybe_chunk| {
|
||||
if let Some(chunk) = maybe_chunk {
|
||||
match self.decoder {
|
||||
Decoder::Plain => Some(chunk),
|
||||
Decoder::Gzip(Some(ref mut decoder)) => {
|
||||
let mut buf = vec![0; BUF_SIZE];
|
||||
*decoder.get_mut() = Cursor::new(chunk.into_bytes());
|
||||
let len = decoder.read(&mut buf).ok()?;
|
||||
buf.truncate(len);
|
||||
Some(buf.into())
|
||||
}
|
||||
Decoder::Gzip(None) => {
|
||||
let mut buf = vec![0; BUF_SIZE];
|
||||
let mut decoder = GzDecoder::new(Cursor::new(chunk.into_bytes()));
|
||||
let len = decoder.read(&mut buf).ok()?;
|
||||
buf.truncate(len);
|
||||
self.decoder = Decoder::Gzip(Some(decoder));
|
||||
Some(buf.into())
|
||||
}
|
||||
Decoder::Deflate(ref mut decoder) => {
|
||||
let mut buf = vec![0; BUF_SIZE];
|
||||
*decoder.get_mut() = Cursor::new(chunk.into_bytes());
|
||||
let len = decoder.read(&mut buf).ok()?;
|
||||
buf.truncate(len);
|
||||
Some(buf.into())
|
||||
}
|
||||
Decoder::Brotli(ref mut decoder) => {
|
||||
let mut buf = vec![0; BUF_SIZE];
|
||||
decoder.get_mut().get_mut().extend(&chunk.into_bytes());
|
||||
let len = decoder.read(&mut buf).ok()?;
|
||||
buf.truncate(len);
|
||||
Some(buf.into())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_ssl_connector_builder(certs: &str) -> SslConnectorBuilder {
|
||||
// certs include multiple certificates. We could add all of them at once,
|
||||
// but if any of them were already added, openssl would fail to insert all
|
||||
// of them.
|
||||
let mut certs = certs;
|
||||
let mut ssl_connector_builder = SslConnectorBuilder::new(SslMethod::tls()).unwrap();
|
||||
let mut ssl_connector_builder = SslConnector::builder(SslMethod::tls()).unwrap();
|
||||
loop {
|
||||
let token = "-----END CERTIFICATE-----";
|
||||
if let Some(index) = certs.find(token) {
|
||||
|
@ -78,18 +150,17 @@ pub fn create_ssl_connector(certs: &str) -> SslConnector {
|
|||
}
|
||||
}
|
||||
ssl_connector_builder.set_cipher_list(DEFAULT_CIPHERS).expect("could not set ciphers");
|
||||
ssl_connector_builder.set_options(SSL_OP_NO_SSLV2 | SSL_OP_NO_SSLV3 | SSL_OP_NO_COMPRESSION);
|
||||
ssl_connector_builder.build()
|
||||
ssl_connector_builder.set_options(SslOptions::NO_SSLV2 | SslOptions::NO_SSLV3 | SslOptions::NO_COMPRESSION);
|
||||
ssl_connector_builder
|
||||
}
|
||||
|
||||
pub fn create_ssl_client(certs: &str) -> OpensslClient {
|
||||
let ssl_connector = create_ssl_connector(certs);
|
||||
OpensslClient::from(ssl_connector)
|
||||
}
|
||||
|
||||
pub fn create_http_connector(ssl_client: OpensslClient) -> Pool<Connector> {
|
||||
let https_connector = HttpsConnector::new(ssl_client);
|
||||
Pool::with_connector(Default::default(), https_connector)
|
||||
pub fn create_http_client<E>(ssl_connector_builder: SslConnectorBuilder, executor: E)
|
||||
-> Client<Connector, WrappedBody>
|
||||
where
|
||||
E: Executor<Box<Future<Error=(), Item=()> + Send + 'static>> + Sync + Send + 'static
|
||||
{
|
||||
let connector = HttpsConnector::with_connector(HttpConnector::new(), ssl_connector_builder).unwrap();
|
||||
Client::builder().http1_title_case_headers(true).executor(executor).build(connector)
|
||||
}
|
||||
|
||||
// The basic logic here is to prefer ciphers with ECDSA certificates, Forward
|
||||
|
|
|
@ -94,14 +94,14 @@ impl Cookie {
|
|||
|
||||
|
||||
// Step 10
|
||||
if cookie.http_only() && source == CookieSource::NonHTTP {
|
||||
if cookie.http_only().unwrap_or(false) && source == CookieSource::NonHTTP {
|
||||
return None;
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/draft-west-cookie-prefixes-04#section-4
|
||||
// Step 1 of cookie prefixes
|
||||
if (cookie.name().starts_with("__Secure-") || cookie.name().starts_with("__Host-")) &&
|
||||
!(cookie.secure() && request.is_secure_scheme())
|
||||
!(cookie.secure().unwrap_or(false) && request.is_secure_scheme())
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
@ -197,10 +197,10 @@ impl Cookie {
|
|||
}
|
||||
}
|
||||
|
||||
if self.cookie.secure() && !url.is_secure_scheme() {
|
||||
if self.cookie.secure().unwrap_or(false) && !url.is_secure_scheme() {
|
||||
return false;
|
||||
}
|
||||
if self.cookie.http_only() && source == CookieSource::NonHTTP {
|
||||
if self.cookie.http_only().unwrap_or(false) && source == CookieSource::NonHTTP {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ impl CookieStorage {
|
|||
let cookies = self.cookies_map.entry(domain).or_insert(vec![]);
|
||||
|
||||
// https://www.ietf.org/id/draft-ietf-httpbis-cookie-alone-01.txt Step 2
|
||||
if !cookie.cookie.secure() && !url.is_secure_scheme() {
|
||||
if !cookie.cookie.secure().unwrap_or(false) && !url.is_secure_scheme() {
|
||||
let new_domain = cookie.cookie.domain().as_ref().unwrap().to_owned();
|
||||
let new_path = cookie.cookie.path().as_ref().unwrap().to_owned();
|
||||
|
||||
|
@ -47,7 +47,7 @@ impl CookieStorage {
|
|||
let existing_path = c.cookie.path().as_ref().unwrap().to_owned();
|
||||
|
||||
c.cookie.name() == cookie.cookie.name() &&
|
||||
c.cookie.secure() &&
|
||||
c.cookie.secure().unwrap_or(false) &&
|
||||
(Cookie::domain_match(new_domain, existing_domain) ||
|
||||
Cookie::domain_match(existing_domain, new_domain)) &&
|
||||
Cookie::path_match(new_path, existing_path)
|
||||
|
@ -70,7 +70,7 @@ impl CookieStorage {
|
|||
let c = cookies.remove(ind);
|
||||
|
||||
// http://tools.ietf.org/html/rfc6265#section-5.3 step 11.2
|
||||
if c.cookie.http_only() && source == CookieSource::NonHTTP {
|
||||
if c.cookie.http_only().unwrap_or(false) && source == CookieSource::NonHTTP {
|
||||
// Undo the removal.
|
||||
cookies.push(c);
|
||||
Err(())
|
||||
|
@ -85,7 +85,7 @@ impl CookieStorage {
|
|||
// http://tools.ietf.org/html/rfc6265#section-5.3
|
||||
pub fn push(&mut self, mut cookie: Cookie, url: &ServoUrl, source: CookieSource) {
|
||||
// https://www.ietf.org/id/draft-ietf-httpbis-cookie-alone-01.txt Step 1
|
||||
if cookie.cookie.secure() && !url.is_secure_scheme() {
|
||||
if cookie.cookie.secure().unwrap_or(false) && !url.is_secure_scheme() {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -111,7 +111,7 @@ impl CookieStorage {
|
|||
let new_len = cookies.len();
|
||||
|
||||
// https://www.ietf.org/id/draft-ietf-httpbis-cookie-alone-01.txt
|
||||
if new_len == old_len && !evict_one_cookie(cookie.cookie.secure(), cookies) {
|
||||
if new_len == old_len && !evict_one_cookie(cookie.cookie.secure().unwrap_or(false), cookies) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -219,7 +219,7 @@ fn evict_one_cookie(is_secure_cookie: bool, cookies: &mut Vec<Cookie>) -> bool {
|
|||
fn get_oldest_accessed(is_secure_cookie: bool, cookies: &mut Vec<Cookie>) -> Option<(usize, Tm)> {
|
||||
let mut oldest_accessed: Option<(usize, Tm)> = None;
|
||||
for (i, c) in cookies.iter().enumerate() {
|
||||
if (c.cookie.secure() == is_secure_cookie) &&
|
||||
if (c.cookie.secure().unwrap_or(false) == is_secure_cookie) &&
|
||||
oldest_accessed.as_ref().map_or(true, |a| c.last_access < a.1) {
|
||||
oldest_accessed = Some((i, c.last_access));
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use base64;
|
||||
use hyper::mime::{Attr, Mime, SubLevel, TopLevel, Value};
|
||||
use mime::Mime;
|
||||
use servo_url::ServoUrl;
|
||||
use url::Position;
|
||||
use url::percent_encoding::percent_decode;
|
||||
|
@ -37,8 +37,7 @@ pub fn decode(url: &ServoUrl) -> Result<DecodeData, DecodeError> {
|
|||
};
|
||||
|
||||
let content_type = ct_str.parse().unwrap_or_else(|_| {
|
||||
Mime(TopLevel::Text, SubLevel::Plain,
|
||||
vec![(Attr::Charset, Value::Ext("US-ASCII".to_owned()))])
|
||||
"text/plain; charset=US-ASCII".parse().unwrap()
|
||||
});
|
||||
|
||||
let mut bytes = percent_decode(parts[1].as_bytes()).collect::<Vec<_>>();
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
//! This library will eventually become the core of the Fetch crate
|
||||
//! with CORSRequest being expanded into FetchRequest (etc)
|
||||
|
||||
use hyper::method::Method;
|
||||
use http::header::HeaderName;
|
||||
use hyper::Method;
|
||||
use net_traits::request::{CredentialsMode, Origin, Request};
|
||||
use servo_url::ServoUrl;
|
||||
use time::{self, Timespec};
|
||||
|
@ -19,14 +20,14 @@ use time::{self, Timespec};
|
|||
/// Each entry might pertain to a header or method
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum HeaderOrMethod {
|
||||
HeaderData(String),
|
||||
HeaderData(HeaderName),
|
||||
MethodData(Method)
|
||||
}
|
||||
|
||||
impl HeaderOrMethod {
|
||||
fn match_header(&self, header_name: &str) -> bool {
|
||||
fn match_header(&self, header_name: &HeaderName) -> bool {
|
||||
match *self {
|
||||
HeaderOrMethod::HeaderData(ref s) => (&**s).eq_ignore_ascii_case(header_name),
|
||||
HeaderOrMethod::HeaderData(ref n) => n == header_name,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
@ -80,7 +81,7 @@ impl CorsCache {
|
|||
}
|
||||
|
||||
fn find_entry_by_header<'a>(&'a mut self, request: &Request,
|
||||
header_name: &str) -> Option<&'a mut CorsCacheEntry> {
|
||||
header_name: &HeaderName) -> Option<&'a mut CorsCacheEntry> {
|
||||
self.cleanup();
|
||||
self.0.iter_mut().find(|e| match_headers(e, request) && e.header_or_method.match_header(header_name))
|
||||
}
|
||||
|
@ -113,7 +114,7 @@ impl CorsCache {
|
|||
|
||||
/// Returns true if an entry with a
|
||||
/// [matching header](https://fetch.spec.whatwg.org/#concept-cache-match-header) is found
|
||||
pub fn match_header(&mut self, request: &Request, header_name: &str) -> bool {
|
||||
pub fn match_header(&mut self, request: &Request, header_name: &HeaderName) -> bool {
|
||||
self.find_entry_by_header(&request, header_name).is_some()
|
||||
}
|
||||
|
||||
|
@ -122,13 +123,13 @@ impl CorsCache {
|
|||
///
|
||||
/// If not, it will insert an equivalent entry
|
||||
pub fn match_header_and_update(&mut self, request: &Request,
|
||||
header_name: &str, new_max_age: u32) -> bool {
|
||||
header_name: &HeaderName, new_max_age: u32) -> bool {
|
||||
match self.find_entry_by_header(&request, header_name).map(|e| e.max_age = new_max_age) {
|
||||
Some(_) => true,
|
||||
None => {
|
||||
self.insert(CorsCacheEntry::new(request.origin.clone(), request.current_url(), new_max_age,
|
||||
request.credentials_mode == CredentialsMode::Include,
|
||||
HeaderOrMethod::HeaderData(header_name.to_owned())));
|
||||
HeaderOrMethod::HeaderData(header_name.clone())));
|
||||
false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,16 +7,15 @@ use data_loader::decode;
|
|||
use devtools_traits::DevtoolsControlMsg;
|
||||
use fetch::cors_cache::CorsCache;
|
||||
use filemanager_thread::FileManager;
|
||||
use headers_core::HeaderMapExt;
|
||||
use headers_ext::{AccessControlExposeHeaders, ContentType, Range};
|
||||
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
|
||||
use http_loader::{HttpState, determine_request_referrer, http_fetch};
|
||||
use http_loader::{set_default_accept, set_default_accept_language};
|
||||
use hyper::{Error, Result as HyperResult};
|
||||
use hyper::header;
|
||||
use hyper::header::{Accept, AcceptLanguage, AccessControlExposeHeaders, ContentLanguage, ContentType};
|
||||
use hyper::header::{Header, HeaderFormat, HeaderView, Headers, Referer as RefererHeader};
|
||||
use hyper::method::Method;
|
||||
use hyper::mime::{Mime, SubLevel, TopLevel};
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::Method;
|
||||
use hyper::StatusCode;
|
||||
use ipc_channel::ipc::IpcReceiver;
|
||||
use mime::{self, Mime};
|
||||
use mime_guess::guess_mime_type;
|
||||
use net_traits::{FetchTaskTarget, NetworkError, ReferrerPolicy};
|
||||
use net_traits::request::{CredentialsMode, Destination, Referrer, Request, RequestMode};
|
||||
|
@ -25,16 +24,20 @@ use net_traits::response::{Response, ResponseBody, ResponseType};
|
|||
use servo_channel::{channel, Sender, Receiver};
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, BufRead, Seek, SeekFrom};
|
||||
use std::mem;
|
||||
use std::ops::Bound;
|
||||
use std::str;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::thread;
|
||||
use subresource_integrity::is_response_integrity_valid;
|
||||
|
||||
lazy_static! {
|
||||
static ref X_CONTENT_TYPE_OPTIONS: HeaderName = HeaderName::from_static("x-content-type-options");
|
||||
}
|
||||
|
||||
const FILE_CHUNK_SIZE: usize = 32768; //32 KB
|
||||
|
||||
pub type Target<'a> = &'a mut (FetchTaskTarget + Send);
|
||||
|
@ -173,11 +176,11 @@ pub fn main_fetch(request: &mut Request,
|
|||
Referrer::Client => {
|
||||
// FIXME(#14507): We should never get this value here; it should
|
||||
// already have been handled in the script thread.
|
||||
request.headers.remove::<RefererHeader>();
|
||||
request.headers.remove(header::REFERER);
|
||||
None
|
||||
},
|
||||
Referrer::ReferrerUrl(url) => {
|
||||
request.headers.remove::<RefererHeader>();
|
||||
request.headers.remove(header::REFERER);
|
||||
let current_url = request.current_url().clone();
|
||||
determine_request_referrer(&mut request.headers,
|
||||
request.referrer_policy.unwrap(),
|
||||
|
@ -238,7 +241,7 @@ pub fn main_fetch(request: &mut Request,
|
|||
} else if request.use_cors_preflight ||
|
||||
(request.unsafe_request &&
|
||||
(!is_cors_safelisted_method(&request.method) ||
|
||||
request.headers.iter().any(|h| !is_cors_safelisted_request_header(&h)))) {
|
||||
request.headers.iter().any(|(name, value)| !is_cors_safelisted_request_header(&name, &value)))) {
|
||||
// Substep 1.
|
||||
request.response_tainting = ResponseTainting::CorsTainting;
|
||||
// Substep 2.
|
||||
|
@ -269,18 +272,19 @@ pub fn main_fetch(request: &mut Request,
|
|||
// Substep 1.
|
||||
if request.response_tainting == ResponseTainting::CorsTainting {
|
||||
// Subsubstep 1.
|
||||
let header_names = response.headers.get::<AccessControlExposeHeaders>();
|
||||
let header_names: Option<Vec<HeaderName>> = response.headers.typed_get::<AccessControlExposeHeaders>()
|
||||
.map(|v| v.iter().collect());
|
||||
match header_names {
|
||||
// Subsubstep 2.
|
||||
Some(list) if request.credentials_mode != CredentialsMode::Include => {
|
||||
Some(ref list) if request.credentials_mode != CredentialsMode::Include => {
|
||||
if list.len() == 1 && list[0] == "*" {
|
||||
response.cors_exposed_header_name_list =
|
||||
response.headers.iter().map(|h| h.name().to_owned()).collect();
|
||||
response.headers.iter().map(|(name, _)| name.as_str().to_owned()).collect();
|
||||
}
|
||||
},
|
||||
// Subsubstep 3.
|
||||
Some(list) => {
|
||||
response.cors_exposed_header_name_list = list.iter().map(|h| (**h).clone()).collect();
|
||||
response.cors_exposed_header_name_list = list.iter().map(|h| h.as_str().to_owned()).collect();
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
|
@ -341,7 +345,7 @@ pub fn main_fetch(request: &mut Request,
|
|||
let not_network_error = !response_is_network_error && !internal_response.is_network_error();
|
||||
if not_network_error && (is_null_body_status(&internal_response.status) ||
|
||||
match request.method {
|
||||
Method::Head | Method::Connect => true,
|
||||
Method::HEAD | Method::CONNECT => true,
|
||||
_ => false }) {
|
||||
// when Fetch is used only asynchronously, we will need to make sure
|
||||
// that nothing tries to write to the body at this point
|
||||
|
@ -463,7 +467,7 @@ fn scheme_fetch(request: &mut Request,
|
|||
match url.scheme() {
|
||||
"about" if url.path() == "blank" => {
|
||||
let mut response = Response::new(url);
|
||||
response.headers.set(ContentType(mime!(Text / Html; Charset = Utf8)));
|
||||
response.headers.typed_insert(ContentType::from(mime::TEXT_HTML_UTF_8));
|
||||
*response.body.lock().unwrap() = ResponseBody::Done(vec![]);
|
||||
response
|
||||
},
|
||||
|
@ -477,7 +481,7 @@ fn scheme_fetch(request: &mut Request,
|
|||
Ok((mime, bytes)) => {
|
||||
let mut response = Response::new(url);
|
||||
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
|
||||
response.headers.set(ContentType(mime));
|
||||
response.headers.typed_insert(ContentType::from(mime));
|
||||
response
|
||||
},
|
||||
Err(_) => Response::network_error(NetworkError::Internal("Decoding data URL failed".into()))
|
||||
|
@ -485,7 +489,7 @@ fn scheme_fetch(request: &mut Request,
|
|||
},
|
||||
|
||||
"file" => {
|
||||
if request.method == Method::Get {
|
||||
if request.method == Method::GET {
|
||||
match url.to_file_path() {
|
||||
Ok(file_path) => {
|
||||
match File::open(file_path.clone()) {
|
||||
|
@ -493,7 +497,7 @@ fn scheme_fetch(request: &mut Request,
|
|||
let mime = guess_mime_type(file_path);
|
||||
|
||||
let mut response = Response::new(url);
|
||||
response.headers.set(ContentType(mime));
|
||||
response.headers.typed_insert(ContentType::from(mime));
|
||||
|
||||
let (done_sender, done_receiver) = channel();
|
||||
*done_chan = Some((done_sender.clone(), done_receiver));
|
||||
|
@ -503,22 +507,22 @@ fn scheme_fetch(request: &mut Request,
|
|||
|
||||
let cancellation_listener = context.cancellation_listener.clone();
|
||||
|
||||
let range = request.headers.get::<header::Range>();
|
||||
let (start, end) = if let Some(&header::Range::Bytes(ref range)) = range {
|
||||
match range.first().unwrap() {
|
||||
&header::ByteRangeSpec::AllFrom(start) => (start, None),
|
||||
&header::ByteRangeSpec::FromTo(start, end) => {
|
||||
let (start, end) = if let Some(ref range) = request.headers.typed_get::<Range>() {
|
||||
match range.iter().collect::<Vec<(Bound<u64>, Bound<u64>)>>().first() {
|
||||
Some(&(Bound::Included(start), Bound::Unbounded)) => (start, None),
|
||||
Some(&(Bound::Included(start), Bound::Included(end))) => {
|
||||
// `end` should be less or equal to `start`.
|
||||
(start, Some(u64::max(start, end)))
|
||||
},
|
||||
&header::ByteRangeSpec::Last(offset) => {
|
||||
Some(&(Bound::Unbounded, Bound::Included(offset))) => {
|
||||
if let Ok(metadata) = file.metadata() {
|
||||
// `offset` cannot be bigger than the file size.
|
||||
(metadata.len() - u64::min(metadata.len(), offset), None)
|
||||
} else {
|
||||
(0, None)
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => (0, None)
|
||||
}
|
||||
} else {
|
||||
(0, None)
|
||||
|
@ -591,7 +595,7 @@ fn scheme_fetch(request: &mut Request,
|
|||
"blob" => {
|
||||
println!("Loading blob {}", url.as_str());
|
||||
// Step 2.
|
||||
if request.method != Method::Get {
|
||||
if request.method != Method::GET {
|
||||
return Response::network_error(NetworkError::Internal("Unexpected method for blob".into()));
|
||||
}
|
||||
|
||||
|
@ -619,33 +623,33 @@ fn scheme_fetch(request: &mut Request,
|
|||
}
|
||||
|
||||
/// <https://fetch.spec.whatwg.org/#cors-safelisted-request-header>
|
||||
pub fn is_cors_safelisted_request_header(h: &HeaderView) -> bool {
|
||||
if h.is::<ContentType>() {
|
||||
match h.value() {
|
||||
Some(&ContentType(Mime(TopLevel::Text, SubLevel::Plain, _))) |
|
||||
Some(&ContentType(Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _))) |
|
||||
Some(&ContentType(Mime(TopLevel::Multipart, SubLevel::FormData, _))) => true,
|
||||
_ => false
|
||||
|
||||
pub fn is_cors_safelisted_request_header(name: &HeaderName, value: &HeaderValue) -> bool {
|
||||
if name == header::CONTENT_TYPE {
|
||||
if let Some(m) = value.to_str().ok().and_then(|s| s.parse::<Mime>().ok()) {
|
||||
m.type_() == mime::TEXT && m.subtype() == mime::PLAIN ||
|
||||
m.type_() == mime::APPLICATION && m.subtype() == mime::WWW_FORM_URLENCODED ||
|
||||
m.type_() == mime::MULTIPART && m.subtype() == mime::FORM_DATA
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
h.is::<Accept>() || h.is::<AcceptLanguage>() || h.is::<ContentLanguage>()
|
||||
name == header::ACCEPT || name == header::ACCEPT_LANGUAGE || name == header::CONTENT_LANGUAGE
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://fetch.spec.whatwg.org/#cors-safelisted-method>
|
||||
pub fn is_cors_safelisted_method(m: &Method) -> bool {
|
||||
match *m {
|
||||
Method::Get | Method::Head | Method::Post => true,
|
||||
Method::GET | Method::HEAD | Method::POST => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_null_body_status(status: &Option<StatusCode>) -> bool {
|
||||
fn is_null_body_status(status: &Option<(StatusCode, String)>) -> bool {
|
||||
match *status {
|
||||
Some(status) => match status {
|
||||
StatusCode::SwitchingProtocols | StatusCode::NoContent |
|
||||
StatusCode::ResetContent | StatusCode::NotModified => true,
|
||||
Some((status, _)) => match status {
|
||||
StatusCode::SWITCHING_PROTOCOLS | StatusCode::NO_CONTENT |
|
||||
StatusCode::RESET_CONTENT | StatusCode::NOT_MODIFIED => true,
|
||||
_ => false
|
||||
},
|
||||
_ => false
|
||||
|
@ -653,84 +657,56 @@ fn is_null_body_status(status: &Option<StatusCode>) -> bool {
|
|||
}
|
||||
|
||||
/// <https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff?>
|
||||
pub fn should_be_blocked_due_to_nosniff(destination: Destination, response_headers: &Headers) -> bool {
|
||||
/// <https://fetch.spec.whatwg.org/#x-content-type-options-header>
|
||||
/// This is needed to parse `X-Content-Type-Options` according to spec,
|
||||
/// which requires that we inspect only the first value.
|
||||
///
|
||||
/// A [unit-like struct](https://doc.rust-lang.org/book/structs.html#unit-like-structs)
|
||||
/// is sufficient since a valid header implies that we use `nosniff`.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct XContentTypeOptions;
|
||||
|
||||
impl Header for XContentTypeOptions {
|
||||
fn header_name() -> &'static str {
|
||||
"X-Content-Type-Options"
|
||||
}
|
||||
|
||||
/// https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-nosniff%3F #2
|
||||
fn parse_header(raw: &[Vec<u8>]) -> HyperResult<Self> {
|
||||
raw.first()
|
||||
.and_then(|v| str::from_utf8(v).ok())
|
||||
.and_then(|s| if s.trim().eq_ignore_ascii_case("nosniff") {
|
||||
Some(XContentTypeOptions)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
.ok_or(Error::Header)
|
||||
}
|
||||
}
|
||||
|
||||
impl HeaderFormat for XContentTypeOptions {
|
||||
fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("nosniff")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn should_be_blocked_due_to_nosniff(destination: Destination, response_headers: &HeaderMap) -> bool {
|
||||
// Steps 1-3.
|
||||
if response_headers.get::<XContentTypeOptions>().is_none() {
|
||||
// TODO(eijebong): Replace this once typed headers allow custom ones...
|
||||
if response_headers.get("x-content-type-options")
|
||||
.map_or(true, |val| val.to_str().unwrap_or("").to_lowercase() != "nosniff")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 4
|
||||
// Note: an invalid MIME type will produce a `None`.
|
||||
let content_type_header = response_headers.get::<ContentType>();
|
||||
let content_type_header = response_headers.typed_get::<ContentType>();
|
||||
|
||||
/// <https://html.spec.whatwg.org/multipage/#scriptingLanguages>
|
||||
#[inline]
|
||||
fn is_javascript_mime_type(mime_type: &Mime) -> bool {
|
||||
let javascript_mime_types: [Mime; 16] = [
|
||||
mime!(Application / ("ecmascript")),
|
||||
mime!(Application / ("javascript")),
|
||||
mime!(Application / ("x-ecmascript")),
|
||||
mime!(Application / ("x-javascript")),
|
||||
mime!(Text / ("ecmascript")),
|
||||
mime!(Text / ("javascript")),
|
||||
mime!(Text / ("javascript1.0")),
|
||||
mime!(Text / ("javascript1.1")),
|
||||
mime!(Text / ("javascript1.2")),
|
||||
mime!(Text / ("javascript1.3")),
|
||||
mime!(Text / ("javascript1.4")),
|
||||
mime!(Text / ("javascript1.5")),
|
||||
mime!(Text / ("jscript")),
|
||||
mime!(Text / ("livescript")),
|
||||
mime!(Text / ("x-ecmascript")),
|
||||
mime!(Text / ("x-javascript")),
|
||||
"application/ecmascript".parse().unwrap(),
|
||||
"application/javascript".parse().unwrap(),
|
||||
"application/x-ecmascript".parse().unwrap(),
|
||||
"application/x-javascript".parse().unwrap(),
|
||||
"text/ecmascript".parse().unwrap(),
|
||||
"text/javascript".parse().unwrap(),
|
||||
"text/javascript1.0".parse().unwrap(),
|
||||
"text/javascript1.1".parse().unwrap(),
|
||||
"text/javascript1.2".parse().unwrap(),
|
||||
"text/javascript1.3".parse().unwrap(),
|
||||
"text/javascript1.4".parse().unwrap(),
|
||||
"text/javascript1.5".parse().unwrap(),
|
||||
"text/jscript".parse().unwrap(),
|
||||
"text/livescript".parse().unwrap(),
|
||||
"text/x-ecmascript".parse().unwrap(),
|
||||
"text/x-javascript".parse().unwrap(),
|
||||
];
|
||||
|
||||
javascript_mime_types.iter()
|
||||
.any(|mime| mime.0 == mime_type.0 && mime.1 == mime_type.1)
|
||||
.any(|mime| mime.type_() == mime_type.type_() && mime.subtype() == mime_type.subtype())
|
||||
}
|
||||
|
||||
// Assumes str::starts_with is equivalent to mime::TopLevel
|
||||
match content_type_header {
|
||||
// Step 6
|
||||
Some(&ContentType(ref mime_type)) if destination.is_script_like()
|
||||
=> !is_javascript_mime_type(mime_type),
|
||||
Some(ref ct) if destination.is_script_like()
|
||||
=> !is_javascript_mime_type(&ct.clone().into()),
|
||||
|
||||
// Step 7
|
||||
Some(&ContentType(Mime(ref tl, ref sl, _))) if destination == Destination::Style
|
||||
=> *tl != TopLevel::Text && *sl != SubLevel::Css,
|
||||
Some(ref ct) if destination == Destination::Style
|
||||
=> {
|
||||
let m: mime::Mime = ct.clone().into();
|
||||
m.type_() != mime::TEXT && m.subtype() != mime::CSS
|
||||
},
|
||||
|
||||
None if destination == Destination::Style || destination.is_script_like() => true,
|
||||
// Step 8
|
||||
|
@ -739,23 +715,23 @@ pub fn should_be_blocked_due_to_nosniff(destination: Destination, response_heade
|
|||
}
|
||||
|
||||
/// <https://fetch.spec.whatwg.org/#should-response-to-request-be-blocked-due-to-mime-type?>
|
||||
fn should_be_blocked_due_to_mime_type(destination: Destination, response_headers: &Headers) -> bool {
|
||||
fn should_be_blocked_due_to_mime_type(destination: Destination, response_headers: &HeaderMap) -> bool {
|
||||
// Step 1
|
||||
let mime_type = match response_headers.get::<ContentType>() {
|
||||
Some(header) => header,
|
||||
let mime_type: mime::Mime = match response_headers.typed_get::<ContentType>() {
|
||||
Some(header) => header.into(),
|
||||
None => return false,
|
||||
};
|
||||
|
||||
// Step 2-3
|
||||
destination.is_script_like() && match *mime_type {
|
||||
ContentType(Mime(TopLevel::Audio, _, _)) |
|
||||
ContentType(Mime(TopLevel::Video, _, _)) |
|
||||
ContentType(Mime(TopLevel::Image, _, _)) => true,
|
||||
ContentType(Mime(TopLevel::Text, SubLevel::Ext(ref ext), _)) => ext == "csv",
|
||||
|
||||
// Step 4
|
||||
_ => false,
|
||||
}
|
||||
destination.is_script_like() &&
|
||||
match mime_type.type_() {
|
||||
mime::AUDIO |
|
||||
mime::VIDEO |
|
||||
mime::IMAGE => true,
|
||||
mime::TEXT if mime_type.subtype() == mime::CSV => true,
|
||||
// Step 4
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://fetch.spec.whatwg.org/#block-bad-port>
|
||||
|
|
|
@ -8,12 +8,11 @@
|
|||
//! and <http://tools.ietf.org/html/rfc7232>.
|
||||
|
||||
use fetch::methods::{Data, DoneChannel};
|
||||
use hyper::header;
|
||||
use hyper::header::ContentType;
|
||||
use hyper::header::Headers;
|
||||
use hyper::method::Method;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper_serde::Serde;
|
||||
use headers_core::HeaderMapExt;
|
||||
use headers_ext::{CacheControl, ContentRange, Expires, LastModified, Pragma, Range, Vary};
|
||||
use http::{header, HeaderMap};
|
||||
use http::header::HeaderValue;
|
||||
use hyper::{Method, StatusCode};
|
||||
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps, MallocUnconditionalSizeOf, MallocUnconditionalShallowSizeOf};
|
||||
use malloc_size_of::Measurable;
|
||||
use net_traits::{Metadata, FetchMetadata};
|
||||
|
@ -24,11 +23,12 @@ use servo_channel::{Sender, channel};
|
|||
use servo_config::prefs::PREFS;
|
||||
use servo_url::ServoUrl;
|
||||
use std::collections::HashMap;
|
||||
use std::str;
|
||||
use std::ops::Bound;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::time::SystemTime;
|
||||
use time;
|
||||
use time::{Duration, Tm};
|
||||
use time::{Duration, Timespec, Tm};
|
||||
|
||||
|
||||
/// The key used to differentiate requests in the cache.
|
||||
|
@ -59,7 +59,7 @@ impl CacheKey {
|
|||
/// A complete cached resource.
|
||||
#[derive(Clone)]
|
||||
struct CachedResource {
|
||||
request_headers: Arc<Mutex<Headers>>,
|
||||
request_headers: Arc<Mutex<HeaderMap>>,
|
||||
body: Arc<Mutex<ResponseBody>>,
|
||||
aborted: Arc<AtomicBool>,
|
||||
awaiting_body: Arc<Mutex<Vec<Sender<Data>>>>,
|
||||
|
@ -71,7 +71,7 @@ struct MeasurableCachedResource {
|
|||
metadata: CachedMetadata,
|
||||
location_url: Option<Result<ServoUrl, String>>,
|
||||
https_state: HttpsState,
|
||||
status: Option<StatusCode>,
|
||||
status: Option<(StatusCode, String)>,
|
||||
raw_status: Option<(u16, Vec<u8>)>,
|
||||
url_list: Vec<ServoUrl>,
|
||||
expires: Duration,
|
||||
|
@ -80,7 +80,7 @@ struct MeasurableCachedResource {
|
|||
|
||||
impl MallocSizeOf for CachedResource {
|
||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||
self.request_headers.unconditional_size_of(ops) +
|
||||
// TODO: self.request_headers.unconditional_size_of(ops) +
|
||||
self.body.unconditional_size_of(ops) +
|
||||
self.aborted.unconditional_size_of(ops) +
|
||||
self.awaiting_body.unconditional_size_of(ops) +
|
||||
|
@ -92,7 +92,7 @@ impl MallocSizeOf for CachedResource {
|
|||
#[derive(Clone)]
|
||||
struct CachedMetadata {
|
||||
/// Headers
|
||||
pub headers: Arc<Mutex<Headers>>,
|
||||
pub headers: Arc<Mutex<HeaderMap>>,
|
||||
/// Fields that implement MallocSizeOf
|
||||
pub data: Measurable<MeasurableCachedMetadata>
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ struct MeasurableCachedMetadata {
|
|||
/// Final URL after redirects.
|
||||
pub final_url: ServoUrl,
|
||||
/// MIME type / subtype.
|
||||
pub content_type: Option<Serde<ContentType>>,
|
||||
pub content_type: Option<String>,
|
||||
/// Character set.
|
||||
pub charset: Option<String>,
|
||||
/// HTTP Status
|
||||
|
@ -112,7 +112,7 @@ struct MeasurableCachedMetadata {
|
|||
impl MallocSizeOf for CachedMetadata {
|
||||
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
|
||||
self.headers.unconditional_shallow_size_of(ops) +
|
||||
self.headers.size_of(ops) +
|
||||
// TODO: self.headers.size_of(ops) +
|
||||
self.data.size_of(ops)
|
||||
}
|
||||
}
|
||||
|
@ -141,23 +141,25 @@ fn response_is_cacheable(metadata: &Metadata) -> bool {
|
|||
// 2. check for absence of the Authorization header field.
|
||||
let mut is_cacheable = false;
|
||||
let headers = metadata.headers.as_ref().unwrap();
|
||||
if headers.has::<header::Expires>() ||
|
||||
headers.has::<header::LastModified>() ||
|
||||
headers.has::<header::ETag>() {
|
||||
if headers.contains_key(header::EXPIRES) ||
|
||||
headers.contains_key(header::LAST_MODIFIED) ||
|
||||
headers.contains_key(header::ETAG) {
|
||||
is_cacheable = true;
|
||||
}
|
||||
if let Some(&header::CacheControl(ref directive)) = headers.get::<header::CacheControl>() {
|
||||
for directive in directive.iter() {
|
||||
match *directive {
|
||||
header::CacheDirective::NoStore => return false,
|
||||
header::CacheDirective::Public | header::CacheDirective::SMaxAge(_)
|
||||
| header::CacheDirective::MaxAge(_) | header::CacheDirective::NoCache => is_cacheable = true,
|
||||
_ => {},
|
||||
}
|
||||
if let Some(ref directive) = headers.typed_get::<CacheControl>() {
|
||||
if directive.no_store() {
|
||||
return false
|
||||
}
|
||||
if directive.public() || directive.s_max_age().is_some() ||
|
||||
directive.max_age().is_some() || directive.no_cache()
|
||||
{
|
||||
is_cacheable = true;
|
||||
}
|
||||
}
|
||||
if let Some(&header::Pragma::NoCache) = headers.get::<header::Pragma>() {
|
||||
return false;
|
||||
if let Some(pragma) = headers.typed_get::<Pragma>() {
|
||||
if pragma.is_no_cache() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
is_cacheable
|
||||
}
|
||||
|
@ -166,10 +168,11 @@ fn response_is_cacheable(metadata: &Metadata) -> bool {
|
|||
/// <https://tools.ietf.org/html/rfc7234#section-4.2.3>
|
||||
fn calculate_response_age(response: &Response) -> Duration {
|
||||
// TODO: follow the spec more closely (Date headers, request/response lag, ...)
|
||||
if let Some(secs) = response.headers.get_raw("Age") {
|
||||
let seconds_string = String::from_utf8_lossy(&secs[0]);
|
||||
if let Ok(secs) = seconds_string.parse::<i64>() {
|
||||
return Duration::seconds(secs);
|
||||
if let Some(secs) = response.headers.get(header::AGE) {
|
||||
if let Ok(seconds_string) = secs.to_str() {
|
||||
if let Ok(secs) = seconds_string.parse::<i64>() {
|
||||
return Duration::seconds(secs);
|
||||
}
|
||||
}
|
||||
}
|
||||
Duration::seconds(0i64)
|
||||
|
@ -180,42 +183,37 @@ fn calculate_response_age(response: &Response) -> Duration {
|
|||
fn get_response_expiry(response: &Response) -> Duration {
|
||||
// Calculating Freshness Lifetime <https://tools.ietf.org/html/rfc7234#section-4.2.1>
|
||||
let age = calculate_response_age(&response);
|
||||
if let Some(&header::CacheControl(ref directives)) = response.headers.get::<header::CacheControl>() {
|
||||
let has_no_cache_directive = directives.iter().any(|directive| {
|
||||
header::CacheDirective::NoCache == *directive
|
||||
});
|
||||
if has_no_cache_directive {
|
||||
if let Some(directives) = response.headers.typed_get::<CacheControl>() {
|
||||
if directives.no_cache() {
|
||||
// Requires validation on first use.
|
||||
return Duration::seconds(0i64);
|
||||
} else {
|
||||
for directive in directives {
|
||||
match *directive {
|
||||
header::CacheDirective::SMaxAge(secs) | header::CacheDirective::MaxAge(secs) => {
|
||||
let max_age = Duration::seconds(secs as i64);
|
||||
if max_age < age {
|
||||
return Duration::seconds(0i64);
|
||||
}
|
||||
return max_age - age;
|
||||
},
|
||||
_ => (),
|
||||
if let Some(secs) = directives.max_age().or(directives.s_max_age()) {
|
||||
let max_age = Duration::from_std(secs).unwrap();
|
||||
if max_age < age {
|
||||
return Duration::seconds(0i64);
|
||||
}
|
||||
return max_age - age;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(&header::Expires(header::HttpDate(t))) = response.headers.get::<header::Expires>() {
|
||||
// store the period of time from now until expiry
|
||||
let desired = t.to_timespec();
|
||||
let current = time::now().to_timespec();
|
||||
if desired > current {
|
||||
return desired - current;
|
||||
} else {
|
||||
return Duration::seconds(0i64);
|
||||
}
|
||||
} else {
|
||||
if let Some(_) = response.headers.get_raw("Expires") {
|
||||
// Malformed Expires header, shouldn't be used to construct a valid response.
|
||||
return Duration::seconds(0i64);
|
||||
}
|
||||
match response.headers.typed_get::<Expires>() {
|
||||
Some(t) => {
|
||||
// store the period of time from now until expiry
|
||||
let t: SystemTime = t.into();
|
||||
let t = t.duration_since(SystemTime::UNIX_EPOCH).unwrap();
|
||||
let desired = Timespec::new(t.as_secs() as i64, 0);
|
||||
let current = time::now().to_timespec();
|
||||
|
||||
if desired > current {
|
||||
return desired - current;
|
||||
} else {
|
||||
return Duration::seconds(0i64);
|
||||
}
|
||||
},
|
||||
// Malformed Expires header, shouldn't be used to construct a valid response.
|
||||
None if response.headers.contains_key(header::EXPIRES) => return Duration::seconds(0i64),
|
||||
_ => {},
|
||||
}
|
||||
// Calculating Heuristic Freshness
|
||||
// <https://tools.ietf.org/html/rfc7234#section-4.2.2>
|
||||
|
@ -224,13 +222,15 @@ fn get_response_expiry(response: &Response) -> Duration {
|
|||
// Since presently we do not generate a Warning header field with a 113 warn-code,
|
||||
// 24 hours minus response age is the max for heuristic calculation.
|
||||
let max_heuristic = Duration::hours(24) - age;
|
||||
let heuristic_freshness = if let Some(&header::LastModified(header::HttpDate(t))) =
|
||||
let heuristic_freshness = if let Some(last_modified) =
|
||||
// If the response has a Last-Modified header field,
|
||||
// caches are encouraged to use a heuristic expiration value
|
||||
// that is no more than some fraction of the interval since that time.
|
||||
response.headers.get::<header::LastModified>() {
|
||||
let last_modified = t.to_timespec();
|
||||
response.headers.typed_get::<LastModified>() {
|
||||
let current = time::now().to_timespec();
|
||||
let last_modified: SystemTime = last_modified.into();
|
||||
let last_modified = last_modified.duration_since(SystemTime::UNIX_EPOCH).unwrap();
|
||||
let last_modified = Timespec::new(last_modified.as_secs() as i64, 0);
|
||||
// A typical setting of this fraction might be 10%.
|
||||
let raw_heuristic_calc = (current - last_modified) / 10;
|
||||
let result = if raw_heuristic_calc < max_heuristic {
|
||||
|
@ -249,11 +249,9 @@ fn get_response_expiry(response: &Response) -> Duration {
|
|||
},
|
||||
_ => {
|
||||
// Other status codes can only use heuristic freshness if the public cache directive is present.
|
||||
if let Some(&header::CacheControl(ref directives)) = response.headers.get::<header::CacheControl>() {
|
||||
let has_public_directive = directives.iter().any(|directive| {
|
||||
header::CacheDirective::Public == *directive
|
||||
});
|
||||
if has_public_directive {
|
||||
if let Some(ref directives) = response.headers.typed_get::<CacheControl>()
|
||||
{
|
||||
if directives.public() {
|
||||
return heuristic_freshness;
|
||||
}
|
||||
}
|
||||
|
@ -267,48 +265,39 @@ fn get_response_expiry(response: &Response) -> Duration {
|
|||
/// Request Cache-Control Directives
|
||||
/// <https://tools.ietf.org/html/rfc7234#section-5.2.1>
|
||||
fn get_expiry_adjustment_from_request_headers(request: &Request, expires: Duration) -> Duration {
|
||||
let directive_data = match request.headers.get_raw("cache-control") {
|
||||
let directive = match request.headers.typed_get::<CacheControl>() {
|
||||
Some(data) => data,
|
||||
None => return expires,
|
||||
};
|
||||
let directives_string = String::from_utf8_lossy(&directive_data[0]);
|
||||
for directive in directives_string.split(",") {
|
||||
let mut directive_info = directive.split("=");
|
||||
match (directive_info.next(), directive_info.next()) {
|
||||
(Some("max-stale"), Some(sec_str)) => {
|
||||
if let Ok(secs) = sec_str.parse::<i64>() {
|
||||
return expires + Duration::seconds(secs);
|
||||
}
|
||||
},
|
||||
(Some("max-age"), Some(sec_str)) => {
|
||||
if let Ok(secs) = sec_str.parse::<i64>() {
|
||||
let max_age = Duration::seconds(secs);
|
||||
if expires > max_age {
|
||||
return Duration::min_value();
|
||||
}
|
||||
return expires - max_age;
|
||||
}
|
||||
},
|
||||
(Some("min-fresh"), Some(sec_str)) => {
|
||||
if let Ok(secs) = sec_str.parse::<i64>() {
|
||||
let min_fresh = Duration::seconds(secs);
|
||||
if expires < min_fresh {
|
||||
return Duration::min_value();
|
||||
}
|
||||
return expires - min_fresh;
|
||||
}
|
||||
},
|
||||
(Some("no-cache"), _) | (Some("no-store"), _) => return Duration::min_value(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Some(max_age) = directive.max_stale() {
|
||||
return expires + Duration::from_std(max_age).unwrap();
|
||||
}
|
||||
if let Some(max_age) = directive.max_age() {
|
||||
let max_age = Duration::from_std(max_age).unwrap();
|
||||
if expires > max_age {
|
||||
return Duration::min_value();
|
||||
}
|
||||
return expires - max_age;
|
||||
}
|
||||
if let Some(min_fresh) = directive.min_fresh() {
|
||||
let min_fresh = Duration::from_std(min_fresh).unwrap();
|
||||
if expires < min_fresh {
|
||||
return Duration::min_value();
|
||||
}
|
||||
return expires - min_fresh;
|
||||
}
|
||||
if directive.no_cache() || directive.no_store() {
|
||||
return Duration::min_value()
|
||||
}
|
||||
|
||||
expires
|
||||
}
|
||||
|
||||
/// Create a CachedResponse from a request and a CachedResource.
|
||||
fn create_cached_response(request: &Request,
|
||||
cached_resource: &CachedResource,
|
||||
cached_headers: &Headers,
|
||||
cached_headers: &HeaderMap,
|
||||
done_chan: &mut DoneChannel)
|
||||
-> CachedResponse {
|
||||
let mut response = Response::new(cached_resource.data.metadata.data.final_url.clone());
|
||||
|
@ -353,7 +342,7 @@ fn create_resource_with_bytes_from_resource(bytes: &[u8], resource: &CachedResou
|
|||
metadata: resource.data.metadata.clone(),
|
||||
location_url: resource.data.location_url.clone(),
|
||||
https_state: resource.data.https_state.clone(),
|
||||
status: Some(StatusCode::PartialContent),
|
||||
status: Some((StatusCode::PARTIAL_CONTENT, "Partial Content".into())),
|
||||
raw_status: Some((206, b"Partial Content".to_vec())),
|
||||
url_list: resource.data.url_list.clone(),
|
||||
expires: resource.data.expires.clone(),
|
||||
|
@ -365,7 +354,7 @@ fn create_resource_with_bytes_from_resource(bytes: &[u8], resource: &CachedResou
|
|||
/// Support for range requests <https://tools.ietf.org/html/rfc7233>.
|
||||
fn handle_range_request(request: &Request,
|
||||
candidates: Vec<&CachedResource>,
|
||||
range_spec: &[header::ByteRangeSpec],
|
||||
range_spec: Vec<(Bound<u64>, Bound<u64>)>,
|
||||
done_chan: &mut DoneChannel)
|
||||
-> Option<CachedResponse> {
|
||||
let mut complete_cached_resources = candidates.iter().filter(|resource| {
|
||||
|
@ -389,7 +378,7 @@ fn handle_range_request(request: &Request,
|
|||
// see <https://tools.ietf.org/html/rfc7233#section-4.3>.
|
||||
// TODO: add support for complete and partial resources,
|
||||
// whose body is in the ResponseBody::Receiving state.
|
||||
(&header::ByteRangeSpec::FromTo(beginning, end), Some(ref complete_resource)) => {
|
||||
(&(Bound::Included(beginning), Bound::Included(end)), Some(ref complete_resource)) => {
|
||||
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
|
||||
let b = beginning as usize;
|
||||
let e = end as usize + 1;
|
||||
|
@ -402,14 +391,18 @@ fn handle_range_request(request: &Request,
|
|||
}
|
||||
}
|
||||
},
|
||||
(&header::ByteRangeSpec::FromTo(beginning, end), None) => {
|
||||
(&(Bound::Included(beginning), Bound::Included(end)), None) => {
|
||||
for partial_resource in partial_cached_resources {
|
||||
let headers = partial_resource.data.metadata.headers.lock().unwrap();
|
||||
let content_range = headers.get::<header::ContentRange>();
|
||||
let content_range = headers.typed_get::<ContentRange>();
|
||||
let (res_beginning, res_end) = match content_range {
|
||||
Some(&header::ContentRange(
|
||||
header::ContentRangeSpec::Bytes {
|
||||
range: Some((res_beginning, res_end)), .. })) => (res_beginning, res_end),
|
||||
Some(range) => {
|
||||
if let Some(bytes_range) = range.bytes_range() {
|
||||
bytes_range
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
if res_beginning - 1 < beginning && res_end + 1 > end {
|
||||
|
@ -430,7 +423,7 @@ fn handle_range_request(request: &Request,
|
|||
}
|
||||
}
|
||||
},
|
||||
(&header::ByteRangeSpec::AllFrom(beginning), Some(ref complete_resource)) => {
|
||||
(&(Bound::Included(beginning), Bound::Unbounded), Some(ref complete_resource)) => {
|
||||
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
|
||||
let b = beginning as usize;
|
||||
let requested = body.get(b..);
|
||||
|
@ -442,16 +435,17 @@ fn handle_range_request(request: &Request,
|
|||
}
|
||||
}
|
||||
},
|
||||
(&header::ByteRangeSpec::AllFrom(beginning), None) => {
|
||||
(&(Bound::Included(beginning), Bound::Unbounded), None) => {
|
||||
for partial_resource in partial_cached_resources {
|
||||
let headers = partial_resource.data.metadata.headers.lock().unwrap();
|
||||
let content_range = headers.get::<header::ContentRange>();
|
||||
let (res_beginning, res_end, total) = match content_range {
|
||||
Some(&header::ContentRange(
|
||||
header::ContentRangeSpec::Bytes {
|
||||
range: Some((res_beginning, res_end)),
|
||||
instance_length: Some(total) })) => (res_beginning, res_end, total),
|
||||
_ => continue,
|
||||
let content_range = headers.typed_get::<ContentRange>();
|
||||
let (res_beginning, res_end, total) = if let Some(range) = content_range {
|
||||
match (range.bytes_range(), range.bytes_len()) {
|
||||
(Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total),
|
||||
_ => continue,
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
if res_beginning < beginning && res_end == total - 1 {
|
||||
let resource_body = &*partial_resource.body.lock().unwrap();
|
||||
|
@ -470,7 +464,7 @@ fn handle_range_request(request: &Request,
|
|||
}
|
||||
}
|
||||
},
|
||||
(&header::ByteRangeSpec::Last(offset), Some(ref complete_resource)) => {
|
||||
(&(Bound::Unbounded, Bound::Included(offset)), Some(ref complete_resource)) => {
|
||||
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
|
||||
let from_byte = body.len() - offset as usize;
|
||||
let requested = body.get(from_byte..);
|
||||
|
@ -482,16 +476,17 @@ fn handle_range_request(request: &Request,
|
|||
}
|
||||
}
|
||||
},
|
||||
(&header::ByteRangeSpec::Last(offset), None) => {
|
||||
(&(Bound::Unbounded, Bound::Included(offset)), None) => {
|
||||
for partial_resource in partial_cached_resources {
|
||||
let headers = partial_resource.data.metadata.headers.lock().unwrap();
|
||||
let content_range = headers.get::<header::ContentRange>();
|
||||
let (res_beginning, res_end, total) = match content_range {
|
||||
Some(&header::ContentRange(
|
||||
header::ContentRangeSpec::Bytes {
|
||||
range: Some((res_beginning, res_end)),
|
||||
instance_length: Some(total) })) => (res_beginning, res_end, total),
|
||||
_ => continue,
|
||||
let content_range = headers.typed_get::<ContentRange>();
|
||||
let (res_beginning, res_end, total) = if let Some(range) = content_range {
|
||||
match (range.bytes_range(), range.bytes_len()) {
|
||||
(Some(bytes_range), Some(total)) => (bytes_range.0, bytes_range.1, total),
|
||||
_ => continue,
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
if (total - res_beginning) > (offset - 1 ) && (total - res_end) < offset + 1 {
|
||||
let resource_body = &*partial_resource.body.lock().unwrap();
|
||||
|
@ -509,7 +504,9 @@ fn handle_range_request(request: &Request,
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
// All the cases with Bound::Excluded should be unreachable anyway
|
||||
_ => return None
|
||||
}
|
||||
None
|
||||
}
|
||||
|
@ -527,7 +524,7 @@ impl HttpCache {
|
|||
/// <https://tools.ietf.org/html/rfc7234#section-4>
|
||||
pub fn construct_response(&self, request: &Request, done_chan: &mut DoneChannel) -> Option<CachedResponse> {
|
||||
// TODO: generate warning headers as appropriate <https://tools.ietf.org/html/rfc7234#section-5.5>
|
||||
if request.method != Method::Get {
|
||||
if request.method != Method::GET {
|
||||
// Only Get requests are cached, avoid a url based match for others.
|
||||
return None;
|
||||
}
|
||||
|
@ -538,41 +535,35 @@ impl HttpCache {
|
|||
let mut can_be_constructed = true;
|
||||
let cached_headers = cached_resource.data.metadata.headers.lock().unwrap();
|
||||
let original_request_headers = cached_resource.request_headers.lock().unwrap();
|
||||
if let Some(vary_data) = cached_headers.get_raw("Vary") {
|
||||
// Calculating Secondary Keys with Vary <https://tools.ietf.org/html/rfc7234#section-4.1>
|
||||
let vary_data_string = String::from_utf8_lossy(&vary_data[0]);
|
||||
let vary_values = vary_data_string.split(",").map(|val| val.trim());
|
||||
for vary_val in vary_values {
|
||||
if let Some(vary_value) = cached_headers.typed_get::<Vary>() {
|
||||
if vary_value.is_any() {
|
||||
can_be_constructed = false
|
||||
} else {
|
||||
// For every header name found in the Vary header of the stored response.
|
||||
if vary_val == "*" {
|
||||
// A Vary header field-value of "*" always fails to match.
|
||||
can_be_constructed = false;
|
||||
break;
|
||||
}
|
||||
match request.headers.get_raw(vary_val) {
|
||||
Some(header_data) => {
|
||||
// If the header is present in the request.
|
||||
let request_header_data_string = String::from_utf8_lossy(&header_data[0]);
|
||||
if let Some(original_header_data) = original_request_headers.get_raw(vary_val) {
|
||||
// Check that the value of the nominated header field,
|
||||
// in the original request, matches the value in the current request.
|
||||
let original_request_header_data_string =
|
||||
String::from_utf8_lossy(&original_header_data[0]);
|
||||
if original_request_header_data_string != request_header_data_string {
|
||||
can_be_constructed = false;
|
||||
break;
|
||||
// Calculating Secondary Keys with Vary <https://tools.ietf.org/html/rfc7234#section-4.1>
|
||||
for vary_val in vary_value.iter_strs() {
|
||||
match request.headers.get(vary_val) {
|
||||
Some(header_data) => {
|
||||
// If the header is present in the request.
|
||||
if let Some(original_header_data) = original_request_headers.get(vary_val) {
|
||||
// Check that the value of the nominated header field,
|
||||
// in the original request, matches the value in the current request.
|
||||
if original_header_data != header_data {
|
||||
can_be_constructed = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
None => {
|
||||
// If a header field is absent from a request,
|
||||
// it can only match a stored response if those headers,
|
||||
// were also absent in the original request.
|
||||
can_be_constructed = original_request_headers.get_raw(vary_val).is_none();
|
||||
},
|
||||
}
|
||||
if !can_be_constructed {
|
||||
break;
|
||||
},
|
||||
None => {
|
||||
// If a header field is absent from a request,
|
||||
// it can only match a stored response if those headers,
|
||||
// were also absent in the original request.
|
||||
can_be_constructed = original_request_headers.get(vary_val).is_none();
|
||||
},
|
||||
}
|
||||
if !can_be_constructed {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -581,8 +572,8 @@ impl HttpCache {
|
|||
}
|
||||
}
|
||||
// Support for range requests
|
||||
if let Some(&header::Range::Bytes(ref range_spec)) = request.headers.get::<header::Range>() {
|
||||
return handle_range_request(request, candidates, &range_spec, done_chan);
|
||||
if let Some(range_spec) = request.headers.typed_get::<Range>() {
|
||||
return handle_range_request(request, candidates, range_spec.iter().collect(), done_chan);
|
||||
} else {
|
||||
// Not a Range request.
|
||||
if let Some(ref cached_resource) = candidates.first() {
|
||||
|
@ -620,7 +611,7 @@ impl HttpCache {
|
|||
/// Freshening Stored Responses upon Validation.
|
||||
/// <https://tools.ietf.org/html/rfc7234#section-4.3.4>
|
||||
pub fn refresh(&mut self, request: &Request, response: Response, done_chan: &mut DoneChannel) -> Option<Response> {
|
||||
assert_eq!(response.status, Some(StatusCode::NotModified));
|
||||
assert_eq!(response.status.map(|s| s.0), Some(StatusCode::NOT_MODIFIED));
|
||||
let entry_key = CacheKey::new(request.clone());
|
||||
if let Some(cached_resources) = self.entries.get_mut(&entry_key) {
|
||||
for cached_resource in cached_resources.iter_mut() {
|
||||
|
@ -654,7 +645,7 @@ impl HttpCache {
|
|||
constructed_response.url_list = cached_resource.data.url_list.clone();
|
||||
cached_resource.data.expires = get_response_expiry(&constructed_response);
|
||||
let mut stored_headers = cached_resource.data.metadata.headers.lock().unwrap();
|
||||
stored_headers.extend(response.headers.iter());
|
||||
stored_headers.extend(response.headers);
|
||||
constructed_response.headers = stored_headers.clone();
|
||||
return Some(constructed_response);
|
||||
}
|
||||
|
@ -674,17 +665,16 @@ impl HttpCache {
|
|||
/// Invalidation.
|
||||
/// <https://tools.ietf.org/html/rfc7234#section-4.4>
|
||||
pub fn invalidate(&mut self, request: &Request, response: &Response) {
|
||||
if let Some(&header::Location(ref location)) = response.headers.get::<header::Location>() {
|
||||
// TODO(eijebong): Once headers support typed_get, update this to use them
|
||||
if let Some(Ok(location)) = response.headers.get(header::LOCATION).map(HeaderValue::to_str) {
|
||||
if let Ok(url) = request.current_url().join(location) {
|
||||
self.invalidate_for_url(&url);
|
||||
}
|
||||
}
|
||||
// TODO: update hyper to use typed getter.
|
||||
if let Some(url_data) = response.headers.get_raw("Content-Location") {
|
||||
if let Ok(content_location) = str::from_utf8(&url_data[0]) {
|
||||
if let Ok(url) = request.current_url().join(content_location) {
|
||||
self.invalidate_for_url(&url);
|
||||
}
|
||||
if let Some(Ok(ref content_location)) = response.headers.get(header::CONTENT_LOCATION).map(HeaderValue::to_str)
|
||||
{
|
||||
if let Ok(url) = request.current_url().join(&content_location) {
|
||||
self.invalidate_for_url(&url);
|
||||
}
|
||||
}
|
||||
self.invalidate_for_url(&request.url());
|
||||
|
@ -696,7 +686,7 @@ impl HttpCache {
|
|||
if PREFS.get("network.http-cache.disabled").as_boolean().unwrap_or(false) {
|
||||
return
|
||||
}
|
||||
if request.method != Method::Get {
|
||||
if request.method != Method::GET {
|
||||
// Only Get requests are cached.
|
||||
return
|
||||
}
|
||||
|
@ -716,7 +706,7 @@ impl HttpCache {
|
|||
headers: Arc::new(Mutex::new(response.headers.clone())),
|
||||
data: Measurable(MeasurableCachedMetadata {
|
||||
final_url: metadata.final_url,
|
||||
content_type: metadata.content_type,
|
||||
content_type: metadata.content_type.map(|v| v.0.to_string()),
|
||||
charset: metadata.charset,
|
||||
status: metadata.status
|
||||
})
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -6,10 +6,14 @@
|
|||
|
||||
extern crate base64;
|
||||
extern crate brotli;
|
||||
extern crate bytes;
|
||||
extern crate cookie as cookie_rs;
|
||||
extern crate devtools_traits;
|
||||
extern crate embedder_traits;
|
||||
extern crate flate2;
|
||||
extern crate headers_core;
|
||||
extern crate headers_ext;
|
||||
extern crate http;
|
||||
extern crate hyper;
|
||||
extern crate hyper_openssl;
|
||||
extern crate hyper_serde;
|
||||
|
@ -21,7 +25,6 @@ extern crate lazy_static;
|
|||
extern crate malloc_size_of;
|
||||
#[macro_use] extern crate malloc_size_of_derive;
|
||||
#[macro_use] #[no_link] extern crate matches;
|
||||
#[macro_use]
|
||||
extern crate mime;
|
||||
extern crate mime_guess;
|
||||
extern crate msg;
|
||||
|
@ -38,7 +41,7 @@ extern crate servo_channel;
|
|||
extern crate servo_config;
|
||||
extern crate servo_url;
|
||||
extern crate time;
|
||||
extern crate unicase;
|
||||
extern crate tokio;
|
||||
extern crate url;
|
||||
extern crate uuid;
|
||||
extern crate webrender_api;
|
||||
|
|
|
@ -2,9 +2,8 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use hyper::mime::TopLevel;
|
||||
use mime::{self, Mime};
|
||||
use net_traits::LoadContext;
|
||||
use std::borrow::ToOwned;
|
||||
|
||||
pub struct MimeClassifier {
|
||||
image_classifier: GroupedClassifier,
|
||||
|
@ -49,34 +48,28 @@ pub enum NoSniffFlag {
|
|||
Off
|
||||
}
|
||||
|
||||
pub type MimeType = (TopLevel, String);
|
||||
|
||||
|
||||
impl MimeClassifier {
|
||||
//Performs MIME Type Sniffing Algorithm (sections 7 and 8)
|
||||
pub fn classify(&self,
|
||||
pub fn classify<'a>(&'a self,
|
||||
context: LoadContext,
|
||||
no_sniff_flag: NoSniffFlag,
|
||||
apache_bug_flag: ApacheBugFlag,
|
||||
supplied_type: &Option<MimeType>,
|
||||
data: &[u8]) -> MimeType {
|
||||
let supplied_type_or_octet_stream = supplied_type.clone()
|
||||
.unwrap_or((TopLevel::Application,
|
||||
"octet-stream".to_owned()));
|
||||
supplied_type: &Option<Mime>,
|
||||
data: &'a [u8]) -> Mime {
|
||||
let supplied_type_or_octet_stream = supplied_type.clone().unwrap_or(mime::APPLICATION_OCTET_STREAM);
|
||||
match context {
|
||||
LoadContext::Browsing => match *supplied_type {
|
||||
None => self.sniff_unknown_type(no_sniff_flag, data),
|
||||
Some(ref supplied_type) => {
|
||||
let &(ref media_type, ref media_subtype) = supplied_type;
|
||||
if MimeClassifier::is_explicit_unknown(media_type, media_subtype) {
|
||||
if MimeClassifier::is_explicit_unknown(supplied_type) {
|
||||
self.sniff_unknown_type(no_sniff_flag, data)
|
||||
} else {
|
||||
match no_sniff_flag {
|
||||
NoSniffFlag::On => supplied_type.clone(),
|
||||
NoSniffFlag::Off => match apache_bug_flag {
|
||||
ApacheBugFlag::On => self.sniff_text_or_data(data),
|
||||
ApacheBugFlag::Off => match MimeClassifier::get_media_type(media_type,
|
||||
media_subtype) {
|
||||
ApacheBugFlag::Off => match MimeClassifier::get_media_type(supplied_type) {
|
||||
Some(MediaType::Html) => self.feeds_classifier.classify(data),
|
||||
Some(MediaType::Image) => self.image_classifier.classify(data),
|
||||
Some(MediaType::AudioVideo) => self.audio_video_classifier.classify(data),
|
||||
|
@ -107,7 +100,7 @@ impl MimeClassifier {
|
|||
// This section was *not* finalized in the specs at the time
|
||||
// of this implementation.
|
||||
match *supplied_type {
|
||||
None => (TopLevel::Application, "octet-stream".to_owned()),
|
||||
None => mime::APPLICATION_OCTET_STREAM,
|
||||
_ => supplied_type_or_octet_stream,
|
||||
}
|
||||
},
|
||||
|
@ -117,7 +110,7 @@ impl MimeClassifier {
|
|||
// This section was *not* finalized in the specs at the time
|
||||
// of this implementation.
|
||||
match *supplied_type {
|
||||
None => (TopLevel::Text, "css".to_owned()),
|
||||
None => mime::TEXT_CSS,
|
||||
_ => supplied_type_or_octet_stream,
|
||||
}
|
||||
},
|
||||
|
@ -127,7 +120,7 @@ impl MimeClassifier {
|
|||
// This section was *not* finalized in the specs at the time
|
||||
// of this implementation.
|
||||
match *supplied_type {
|
||||
None => (TopLevel::Text, "javascript".to_owned()),
|
||||
None => mime::TEXT_JAVASCRIPT,
|
||||
_ => supplied_type_or_octet_stream,
|
||||
}
|
||||
},
|
||||
|
@ -143,14 +136,14 @@ impl MimeClassifier {
|
|||
//
|
||||
// This section was *not* finalized in the specs at the time
|
||||
// of this implementation.
|
||||
(TopLevel::Text, "vtt".to_owned())
|
||||
"text/vtt".parse().unwrap()
|
||||
},
|
||||
LoadContext::CacheManifest => {
|
||||
// 8.9 Sniffing in a cache manifest context
|
||||
//
|
||||
// This section was *not* finalized in the specs at the time
|
||||
// of this implementation.
|
||||
(TopLevel::Text, "cache-manifest".to_owned())
|
||||
"text/cache-manifest".parse().unwrap()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -181,7 +174,7 @@ impl MimeClassifier {
|
|||
}
|
||||
|
||||
//some sort of iterator over the classifiers might be better?
|
||||
fn sniff_unknown_type(&self, no_sniff_flag: NoSniffFlag, data: &[u8]) -> MimeType {
|
||||
fn sniff_unknown_type(&self, no_sniff_flag: NoSniffFlag, data: &[u8]) -> Mime {
|
||||
let should_sniff_scriptable = no_sniff_flag == NoSniffFlag::Off;
|
||||
let sniffed = if should_sniff_scriptable {
|
||||
self.scriptable_classifier.classify(data)
|
||||
|
@ -197,72 +190,60 @@ impl MimeClassifier {
|
|||
.expect("BinaryOrPlaintextClassifier always succeeds")
|
||||
}
|
||||
|
||||
fn sniff_text_or_data(&self, data: &[u8]) -> MimeType {
|
||||
fn sniff_text_or_data<'a>(&'a self, data: &'a [u8]) -> Mime {
|
||||
self.binary_or_plaintext.classify(data).expect("BinaryOrPlaintextClassifier always succeeds")
|
||||
}
|
||||
|
||||
fn is_xml(tp: &TopLevel, sub_tp: &str) -> bool {
|
||||
sub_tp.ends_with("+xml") ||
|
||||
match (tp, sub_tp) {
|
||||
(&TopLevel::Application, "xml") | (&TopLevel::Text, "xml") => true,
|
||||
_ => false
|
||||
}
|
||||
fn is_xml(mt: &Mime) -> bool {
|
||||
mt.suffix() == Some(mime::XML) ||
|
||||
(mt.type_() == mime::APPLICATION && mt.subtype() == mime::XML) ||
|
||||
(mt.type_() == mime::TEXT && mt.subtype() == mime::XML)
|
||||
}
|
||||
|
||||
fn is_html(tp: &TopLevel, sub_tp: &str) -> bool {
|
||||
*tp == TopLevel::Text && sub_tp == "html"
|
||||
fn is_html(mt: &Mime) -> bool {
|
||||
mt.type_() == mime::TEXT && mt.subtype() == mime::HTML
|
||||
}
|
||||
|
||||
fn is_image(tp: &TopLevel) -> bool {
|
||||
*tp == TopLevel::Image
|
||||
fn is_image(mt: &Mime) -> bool {
|
||||
mt.type_() == mime::IMAGE
|
||||
}
|
||||
|
||||
fn is_audio_video(tp: &TopLevel, sub_tp: &str) -> bool {
|
||||
*tp == TopLevel::Audio ||
|
||||
*tp == TopLevel::Video ||
|
||||
(*tp == TopLevel::Application && sub_tp == "ogg")
|
||||
fn is_audio_video(mt: &Mime) -> bool {
|
||||
mt.type_() == mime::AUDIO ||
|
||||
mt.type_() == mime::VIDEO ||
|
||||
mt.type_() == mime::APPLICATION && mt.subtype() == mime::OGG
|
||||
}
|
||||
|
||||
fn is_explicit_unknown(tp: &TopLevel, sub_tp: &str) -> bool {
|
||||
if let TopLevel::Ext(ref e) = *tp {
|
||||
return e == "unknown" && sub_tp == "unknown";
|
||||
}
|
||||
match (tp, sub_tp) {
|
||||
(&TopLevel::Application, "unknown") |
|
||||
(&TopLevel::Star, "*") => true,
|
||||
_ => false
|
||||
}
|
||||
fn is_explicit_unknown(mt: &Mime) -> bool {
|
||||
mt.type_().as_str() == "unknown" && mt.subtype().as_str() == "unknown" ||
|
||||
mt.type_() == mime::APPLICATION && mt.subtype().as_str() == "unknown" ||
|
||||
mt.type_() == mime::STAR && mt.subtype() == mime::STAR
|
||||
}
|
||||
|
||||
fn get_media_type(media_type: &TopLevel,
|
||||
media_subtype: &str) -> Option<MediaType> {
|
||||
if MimeClassifier::is_xml(media_type, media_subtype) {
|
||||
fn get_media_type(mime: &Mime) -> Option<MediaType> {
|
||||
if MimeClassifier::is_xml(&mime) {
|
||||
Some(MediaType::Xml)
|
||||
} else if MimeClassifier::is_html(media_type, media_subtype) {
|
||||
} else if MimeClassifier::is_html(&mime) {
|
||||
Some(MediaType::Html)
|
||||
} else if MimeClassifier::is_image(media_type) {
|
||||
} else if MimeClassifier::is_image(&mime) {
|
||||
Some(MediaType::Image)
|
||||
} else if MimeClassifier::is_audio_video(media_type, media_subtype) {
|
||||
} else if MimeClassifier::is_audio_video(&mime) {
|
||||
Some(MediaType::AudioVideo)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_get_media_type(supplied_type: &Option<MimeType>) -> Option<MediaType> {
|
||||
supplied_type.as_ref().and_then(|&(ref media_type, ref media_subtype)| {
|
||||
MimeClassifier::get_media_type(media_type, media_subtype)
|
||||
fn maybe_get_media_type(supplied_type: &Option<Mime>) -> Option<MediaType> {
|
||||
supplied_type.as_ref().and_then(|ref mime| {
|
||||
MimeClassifier::get_media_type(mime)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string_option(tup: Option<(TopLevel, &'static str)>) -> Option<MimeType> {
|
||||
tup.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
||||
}
|
||||
|
||||
//Interface used for composite types
|
||||
trait MIMEChecker {
|
||||
fn classify(&self, data: &[u8]) -> Option<MimeType>;
|
||||
fn classify(&self, data: &[u8]) -> Option<Mime>;
|
||||
/// Validate the MIME checker configuration
|
||||
fn validate(&self) -> Result<(), String>;
|
||||
}
|
||||
|
@ -303,7 +284,7 @@ struct ByteMatcher {
|
|||
pattern: &'static [u8],
|
||||
mask: &'static [u8],
|
||||
leading_ignore: &'static [u8],
|
||||
content_type: (TopLevel, &'static str)
|
||||
content_type: Mime,
|
||||
}
|
||||
|
||||
impl ByteMatcher {
|
||||
|
@ -328,31 +309,31 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
impl MIMEChecker for ByteMatcher {
|
||||
fn classify(&self, data: &[u8]) -> Option<MimeType> {
|
||||
fn classify(&self, data: &[u8]) -> Option<Mime> {
|
||||
self.matches(data).map(|_| {
|
||||
(self.content_type.0.to_owned(), self.content_type.1.to_owned())
|
||||
self.content_type.clone()
|
||||
})
|
||||
}
|
||||
|
||||
fn validate(&self) -> Result<(), String> {
|
||||
if self.pattern.len() == 0 {
|
||||
return Err(format!(
|
||||
"Zero length pattern for {}/{}",
|
||||
self.content_type.0, self.content_type.1
|
||||
"Zero length pattern for {:?}",
|
||||
self.content_type
|
||||
))
|
||||
}
|
||||
if self.pattern.len() != self.mask.len() {
|
||||
return Err(format!(
|
||||
"Unequal pattern and mask length for {}/{}",
|
||||
self.content_type.0, self.content_type.1
|
||||
"Unequal pattern and mask length for {:?}",
|
||||
self.content_type
|
||||
))
|
||||
}
|
||||
if self.pattern.iter().zip(self.mask.iter()).any(
|
||||
|(&pattern, &mask)| pattern & mask != pattern
|
||||
) {
|
||||
return Err(format!(
|
||||
"Pattern not pre-masked for {}/{}",
|
||||
self.content_type.0, self.content_type.1
|
||||
"Pattern not pre-masked for {:?}",
|
||||
self.content_type
|
||||
))
|
||||
}
|
||||
Ok(())
|
||||
|
@ -364,11 +345,10 @@ struct TagTerminatedByteMatcher {
|
|||
}
|
||||
|
||||
impl MIMEChecker for TagTerminatedByteMatcher {
|
||||
fn classify(&self, data: &[u8]) -> Option<MimeType> {
|
||||
fn classify(&self, data: &[u8]) -> Option<Mime> {
|
||||
self.matcher.matches(data).and_then(|j|
|
||||
if j < data.len() && (data[j] == b' ' || data[j] == b'>') {
|
||||
Some((self.matcher.content_type.0.to_owned(),
|
||||
self.matcher.content_type.1.to_owned()))
|
||||
Some(self.matcher.content_type.clone())
|
||||
} else {
|
||||
None
|
||||
})
|
||||
|
@ -405,9 +385,9 @@ impl Mp4Matcher {
|
|||
|
||||
}
|
||||
impl MIMEChecker for Mp4Matcher {
|
||||
fn classify(&self, data: &[u8]) -> Option<MimeType> {
|
||||
fn classify(&self, data: &[u8]) -> Option<Mime> {
|
||||
if self.matches(data) {
|
||||
Some((TopLevel::Video, "mp4".to_owned()))
|
||||
Some("video/mp4".parse().unwrap())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -421,25 +401,25 @@ impl MIMEChecker for Mp4Matcher {
|
|||
struct BinaryOrPlaintextClassifier;
|
||||
|
||||
impl BinaryOrPlaintextClassifier {
|
||||
fn classify_impl(&self, data: &[u8]) -> (TopLevel, &'static str) {
|
||||
fn classify_impl(&self, data: &[u8]) -> Mime {
|
||||
if data.starts_with(&[0xFFu8, 0xFEu8]) ||
|
||||
data.starts_with(&[0xFEu8, 0xFFu8]) ||
|
||||
data.starts_with(&[0xEFu8, 0xBBu8, 0xBFu8])
|
||||
{
|
||||
(TopLevel::Text, "plain")
|
||||
mime::TEXT_PLAIN
|
||||
} else if data.iter().any(|&x| x <= 0x08u8 ||
|
||||
x == 0x0Bu8 ||
|
||||
(x >= 0x0Eu8 && x <= 0x1Au8) ||
|
||||
(x >= 0x1Cu8 && x <= 0x1Fu8)) {
|
||||
(TopLevel::Application, "octet-stream")
|
||||
mime::APPLICATION_OCTET_STREAM
|
||||
} else {
|
||||
(TopLevel::Text, "plain")
|
||||
mime::TEXT_PLAIN
|
||||
}
|
||||
}
|
||||
}
|
||||
impl MIMEChecker for BinaryOrPlaintextClassifier {
|
||||
fn classify(&self, data: &[u8]) -> Option<MimeType> {
|
||||
as_string_option(Some(self.classify_impl(data)))
|
||||
fn classify(&self, data: &[u8]) -> Option<Mime> {
|
||||
Some(self.classify_impl(data))
|
||||
}
|
||||
|
||||
fn validate(&self) -> Result<(), String> {
|
||||
|
@ -538,7 +518,7 @@ impl GroupedClassifier {
|
|||
}
|
||||
}
|
||||
impl MIMEChecker for GroupedClassifier {
|
||||
fn classify(&self, data: &[u8]) -> Option<MimeType> {
|
||||
fn classify(&self, data: &[u8]) -> Option<Mime> {
|
||||
self.byte_matchers
|
||||
.iter()
|
||||
.filter_map(|matcher| matcher.classify(data))
|
||||
|
@ -591,7 +571,7 @@ where T: Iterator<Item=&'a u8> + Clone {
|
|||
struct FeedsClassifier;
|
||||
impl FeedsClassifier {
|
||||
// Implements sniffing for mislabeled feeds (https://mimesniff.spec.whatwg.org/#sniffing-a-mislabeled-feed)
|
||||
fn classify_impl(&self, data: &[u8]) -> Option<(TopLevel, &'static str)> {
|
||||
fn classify_impl(&self, data: &[u8]) -> Option<Mime> {
|
||||
// Step 4: can not be feed unless length is > 3
|
||||
if data.len() < 3 {
|
||||
return None;
|
||||
|
@ -622,11 +602,11 @@ impl FeedsClassifier {
|
|||
|
||||
// Step 5.2.5
|
||||
if matcher.matches(b"rss") {
|
||||
return Some((TopLevel::Application, "rss+xml"));
|
||||
return Some("application/rss+xml".parse().unwrap());
|
||||
}
|
||||
// Step 5.2.6
|
||||
if matcher.matches(b"feed") {
|
||||
return Some((TopLevel::Application, "atom+xml"));
|
||||
return Some("application/atom+xml".parse().unwrap());
|
||||
}
|
||||
// Step 5.2.7
|
||||
if matcher.matches(b"rdf:RDF") {
|
||||
|
@ -637,7 +617,7 @@ impl FeedsClassifier {
|
|||
.chain(|| eats_until(&mut matcher,
|
||||
b"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
|
||||
b"http://purl.org/rss/1.0/")) {
|
||||
Match::StartAndEnd => return Some((TopLevel::Application, "rss+xml")),
|
||||
Match::StartAndEnd => return Some("application/rss+xml".parse().unwrap()),
|
||||
Match::DidNotMatch => {},
|
||||
Match::Start => return None
|
||||
}
|
||||
|
@ -649,8 +629,8 @@ impl FeedsClassifier {
|
|||
}
|
||||
|
||||
impl MIMEChecker for FeedsClassifier {
|
||||
fn classify(&self, data: &[u8]) -> Option<MimeType> {
|
||||
as_string_option(self.classify_impl(data))
|
||||
fn classify(&self, data: &[u8]) -> Option<Mime> {
|
||||
self.classify_impl(data)
|
||||
}
|
||||
|
||||
fn validate(&self) -> Result<(), String> {
|
||||
|
@ -666,7 +646,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\x00\x00\x01\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "x-icon"),
|
||||
content_type: "image/x-icon".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -675,7 +655,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\x00\x00\x02\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "x-icon"),
|
||||
content_type: "image/x-icon".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -684,7 +664,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"BM",
|
||||
mask: b"\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "bmp"),
|
||||
content_type: mime::IMAGE_BMP,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -693,7 +673,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"GIF89a",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "gif"),
|
||||
content_type: mime::IMAGE_GIF,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -702,7 +682,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"GIF87a",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "gif"),
|
||||
content_type: mime::IMAGE_GIF,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -711,7 +691,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"RIFF\x00\x00\x00\x00WEBPVP",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "webp"),
|
||||
content_type: "image/webp".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -721,7 +701,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\x89PNG\r\n\x1A\n",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "png"),
|
||||
content_type: mime::IMAGE_PNG,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -730,7 +710,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\xFF\xD8\xFF",
|
||||
mask: b"\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Image, "jpeg"),
|
||||
content_type: mime::IMAGE_JPEG,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -739,7 +719,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\x1A\x45\xDF\xA3",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Video, "webm"),
|
||||
content_type: "video/webm".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -748,7 +728,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b".snd",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Audio, "basic"),
|
||||
content_type: "audio/basic".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -757,7 +737,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"FORM\x00\x00\x00\x00AIFF",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Audio, "aiff"),
|
||||
content_type: "audio/aiff".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -766,7 +746,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"ID3",
|
||||
mask: b"\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Audio, "mpeg"),
|
||||
content_type: "audio/mpeg".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -775,7 +755,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"OggS\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "ogg"),
|
||||
content_type: "application/ogg".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -785,7 +765,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"MThd\x00\x00\x00\x06",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Audio, "midi"),
|
||||
content_type: "audio/midi".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -794,7 +774,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"RIFF\x00\x00\x00\x00AVI ",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Video, "avi"),
|
||||
content_type: "video/avi".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -803,7 +783,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"RIFF\x00\x00\x00\x00WAVE",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Audio, "wave"),
|
||||
content_type: "audio/wave".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -813,7 +793,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<!DOCTYPE HTML",
|
||||
mask: b"\xFF\xFF\xDF\xDF\xDF\xDF\xDF\xDF\xDF\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -825,7 +805,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<HTML",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -837,7 +817,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<HEAD",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -849,7 +829,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<SCRIPT",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -861,7 +841,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<IFRAME",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -873,7 +853,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<H1",
|
||||
mask: b"\xFF\xDF\xFF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -885,7 +865,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<DIV",
|
||||
mask: b"\xFF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -897,7 +877,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<FONT",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -909,7 +889,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<TABLE",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -921,7 +901,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<A",
|
||||
mask: b"\xFF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -933,7 +913,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<STYLE",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -945,7 +925,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<TITLE",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -957,7 +937,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<B",
|
||||
mask: b"\xFF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -969,7 +949,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<BODY",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -981,7 +961,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<BR",
|
||||
mask: b"\xFF\xDF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -993,7 +973,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<P",
|
||||
mask: b"\xFF\xDF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -1005,7 +985,7 @@ impl ByteMatcher {
|
|||
matcher: ByteMatcher {
|
||||
pattern: b"<!--",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Text, "html"),
|
||||
content_type: mime::TEXT_HTML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -1016,7 +996,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"<?xml",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Text, "xml"),
|
||||
content_type: mime::TEXT_XML,
|
||||
leading_ignore: b"\t\n\x0C\r "
|
||||
}
|
||||
}
|
||||
|
@ -1025,7 +1005,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"%PDF-",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "pdf"),
|
||||
content_type: mime::APPLICATION_PDF,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1038,7 +1018,7 @@ impl ByteMatcher {
|
|||
mask: b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
|
||||
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
|
||||
\x00\x00\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "vnd.ms-fontobject"),
|
||||
content_type: "application/vnd.ms-fontobject".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1047,7 +1027,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\x00\x01\x00\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "font-sfnt"),
|
||||
content_type: "application/font-sfnt".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1056,7 +1036,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"OTTO",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "font-sfnt"),
|
||||
content_type: "application/font-sfnt".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1065,7 +1045,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"ttcf",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "font-sfnt"),
|
||||
content_type: "application/font-sfnt".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1074,7 +1054,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"wOFF",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "font-woff"),
|
||||
content_type: "application/font-woff".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1083,7 +1063,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\x1F\x8B\x08",
|
||||
mask: b"\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "x-gzip"),
|
||||
content_type: "application/x-gzip".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1092,7 +1072,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"PK\x03\x04",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "zip"),
|
||||
content_type: "application/zip".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1101,7 +1081,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"Rar \x1A\x07\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "x-rar-compressed"),
|
||||
content_type: "application/x-rar-compressed".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1110,7 +1090,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"%!PS-Adobe-",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: (TopLevel::Application, "postscript"),
|
||||
content_type: "application/postscript".parse().unwrap(),
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1119,7 +1099,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\xFE\xFF\x00\x00",
|
||||
mask: b"\xFF\xFF\x00\x00",
|
||||
content_type: (TopLevel::Text, "plain"),
|
||||
content_type: mime::TEXT_PLAIN,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1128,7 +1108,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\xFF\xFE\x00\x00",
|
||||
mask: b"\xFF\xFF\x00\x00",
|
||||
content_type: (TopLevel::Text, "plain"),
|
||||
content_type: mime::TEXT_PLAIN,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
@ -1137,7 +1117,7 @@ impl ByteMatcher {
|
|||
ByteMatcher {
|
||||
pattern: b"\xEF\xBB\xBF\x00",
|
||||
mask: b"\xFF\xFF\xFF\x00",
|
||||
content_type: (TopLevel::Text, "plain"),
|
||||
content_type: mime::TEXT_PLAIN,
|
||||
leading_ignore: &[]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
//! A thread that takes a URL and streams back the binary data.
|
||||
use connector::{create_http_connector, create_ssl_client};
|
||||
use connector::{create_http_client, create_ssl_connector_builder};
|
||||
use cookie;
|
||||
use cookie_rs;
|
||||
use cookie_storage::CookieStorage;
|
||||
|
@ -15,7 +15,7 @@ use fetch::methods::{CancellationListener, FetchContext, fetch};
|
|||
use filemanager_thread::FileManager;
|
||||
use hsts::HstsList;
|
||||
use http_cache::HttpCache;
|
||||
use http_loader::{HttpState, http_redirect_fetch};
|
||||
use http_loader::{HANDLE, HttpState, http_redirect_fetch};
|
||||
use hyper_serde::Serde;
|
||||
use ipc_channel::ipc::{self, IpcReceiver, IpcReceiverSet, IpcSender};
|
||||
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
|
||||
|
@ -129,18 +129,17 @@ fn create_http_states(config_dir: Option<&Path>) -> (Arc<HttpState>, Arc<HttpSta
|
|||
},
|
||||
};
|
||||
|
||||
let ssl_client = create_ssl_client(&certs);
|
||||
let ssl_connector_builder = create_ssl_connector_builder(&certs);
|
||||
let http_state = HttpState {
|
||||
cookie_jar: RwLock::new(cookie_jar),
|
||||
auth_cache: RwLock::new(auth_cache),
|
||||
http_cache: RwLock::new(http_cache),
|
||||
hsts_list: RwLock::new(hsts_list),
|
||||
history_states: RwLock::new(HashMap::new()),
|
||||
ssl_client: ssl_client.clone(),
|
||||
connector: create_http_connector(ssl_client),
|
||||
client: create_http_client(ssl_connector_builder, HANDLE.lock().unwrap().executor()),
|
||||
};
|
||||
|
||||
let private_ssl_client = create_ssl_client(&certs);
|
||||
let private_ssl_client = create_ssl_connector_builder(&certs);
|
||||
let private_http_state = HttpState::new(private_ssl_client);
|
||||
|
||||
(Arc::new(http_state), Arc::new(private_http_state))
|
||||
|
|
|
@ -4,10 +4,11 @@
|
|||
|
||||
use base64;
|
||||
use net_traits::response::{Response, ResponseBody, ResponseType};
|
||||
use openssl::hash::{MessageDigest, hash2};
|
||||
use openssl::hash::{MessageDigest, hash};
|
||||
use std::iter::Filter;
|
||||
use std::str::Split;
|
||||
use std::sync::MutexGuard;
|
||||
|
||||
const SUPPORTED_ALGORITHM: &'static [&'static str] = &[
|
||||
"sha256",
|
||||
"sha384",
|
||||
|
@ -119,7 +120,7 @@ fn apply_algorithm_to_response(body: MutexGuard<ResponseBody>,
|
|||
message_digest: MessageDigest)
|
||||
-> String {
|
||||
if let ResponseBody::Done(ref vec) = *body {
|
||||
let response_digest = hash2(message_digest, vec).unwrap(); //Now hash2
|
||||
let response_digest = hash(message_digest, vec).unwrap(); //Now hash
|
||||
base64::encode(&response_digest)
|
||||
} else {
|
||||
unreachable!("Tried to calculate digest of incomplete response body")
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use cookie_rs;
|
||||
use hyper::header::{Header, SetCookie};
|
||||
use net::cookie::Cookie;
|
||||
use net::cookie_storage::CookieStorage;
|
||||
use net_traits::CookieSource;
|
||||
|
@ -90,7 +89,7 @@ fn fn_cookie_constructor() {
|
|||
let cookie = Cookie::new_wrapped(cookie, url, CookieSource::HTTP).unwrap();
|
||||
assert_eq!(cookie.cookie.value(), "bar");
|
||||
assert_eq!(cookie.cookie.name(), "baz");
|
||||
assert!(cookie.cookie.secure());
|
||||
assert!(cookie.cookie.secure().unwrap_or(false));
|
||||
assert_eq!(&cookie.cookie.path().as_ref().unwrap()[..], "/foo/bar/");
|
||||
assert_eq!(&cookie.cookie.domain().as_ref().unwrap()[..], "example.com");
|
||||
assert!(cookie.host_only);
|
||||
|
@ -324,13 +323,8 @@ fn add_retrieve_cookies(set_location: &str,
|
|||
|
||||
// Add all cookies to the store
|
||||
for str_cookie in set_cookies {
|
||||
let bytes = str_cookie.to_string().into_bytes();
|
||||
let header = Header::parse_header(&[bytes]).unwrap();
|
||||
let SetCookie(cookies) = header;
|
||||
for bare_cookie in cookies {
|
||||
let cookie = Cookie::from_cookie_string(bare_cookie, &url, source).unwrap();
|
||||
storage.push(cookie, &url, source);
|
||||
}
|
||||
let cookie = Cookie::from_cookie_string(str_cookie.to_owned(), &url, source).unwrap();
|
||||
storage.push(cookie, &url, source);
|
||||
}
|
||||
|
||||
// Get cookies for the test location
|
||||
|
|
|
@ -2,13 +2,11 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use hyper::header::{Header, SetCookie};
|
||||
use net::cookie::Cookie;
|
||||
use net::cookie_storage::CookieStorage;
|
||||
use net_traits::CookieSource;
|
||||
use servo_url::ServoUrl;
|
||||
|
||||
|
||||
fn run(set_location: &str, set_cookies: &[&str], final_location: &str) -> String {
|
||||
let mut storage = CookieStorage::new(150);
|
||||
let url = ServoUrl::parse(set_location).unwrap();
|
||||
|
@ -16,14 +14,8 @@ fn run(set_location: &str, set_cookies: &[&str], final_location: &str) -> String
|
|||
|
||||
// Add all cookies to the store
|
||||
for str_cookie in set_cookies {
|
||||
let bytes = str_cookie.to_string().into_bytes();
|
||||
let header = Header::parse_header(&[bytes]);
|
||||
if let Ok(SetCookie(cookies)) = header {
|
||||
for bare_cookie in cookies {
|
||||
if let Some(cookie) = Cookie::from_cookie_string(bare_cookie, &url, source) {
|
||||
storage.push(cookie, &url, source);
|
||||
}
|
||||
}
|
||||
if let Some(cookie) = Cookie::from_cookie_string(str_cookie.to_owned().into(), &url, source) {
|
||||
storage.push(cookie, &url, source);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,9 +3,10 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use fetch;
|
||||
use hyper::header::ContentType;
|
||||
use hyper::mime::{Attr, Mime, SubLevel, TopLevel, Value};
|
||||
use headers_core::HeaderMapExt;
|
||||
use headers_ext::ContentType;
|
||||
use hyper_serde::Serde;
|
||||
use mime::{self, Mime};
|
||||
use net_traits::{FetchMetadata, FilteredMetadata, NetworkError};
|
||||
use net_traits::request::{Origin, Request};
|
||||
use net_traits::response::ResponseBody;
|
||||
|
@ -28,8 +29,8 @@ fn assert_parse(url: &'static str,
|
|||
assert!(!response.is_network_error());
|
||||
assert_eq!(response.headers.len(), 1);
|
||||
|
||||
let header_content_type = response.headers.get::<ContentType>();
|
||||
assert_eq!(header_content_type, content_type.as_ref());
|
||||
let header_content_type = response.headers.typed_get::<ContentType>();
|
||||
assert_eq!(header_content_type, content_type);
|
||||
|
||||
let metadata = match response.metadata() {
|
||||
Ok(FetchMetadata::Filtered { filtered: FilteredMetadata::Basic(m), .. }) => m,
|
||||
|
@ -62,9 +63,8 @@ fn empty_invalid() {
|
|||
fn plain() {
|
||||
assert_parse(
|
||||
"data:,hello%20world",
|
||||
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain,
|
||||
vec!((Attr::Charset, Value::Ext("US-ASCII".to_owned())))))),
|
||||
Some("US-ASCII"),
|
||||
Some(ContentType::from("text/plain; charset=US-ASCII".parse::<Mime>().unwrap())),
|
||||
Some("us-ascii"),
|
||||
Some(b"hello world"));
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ fn plain() {
|
|||
fn plain_ct() {
|
||||
assert_parse(
|
||||
"data:text/plain,hello",
|
||||
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec!()))),
|
||||
Some(ContentType::from(mime::TEXT_PLAIN)),
|
||||
None,
|
||||
Some(b"hello"));
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ fn plain_ct() {
|
|||
fn plain_html() {
|
||||
assert_parse(
|
||||
"data:text/html,<p>Servo</p>",
|
||||
Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, vec!()))),
|
||||
Some(ContentType::from(mime::TEXT_HTML)),
|
||||
None,
|
||||
Some(b"<p>Servo</p>"));
|
||||
}
|
||||
|
@ -90,9 +90,7 @@ fn plain_html() {
|
|||
fn plain_charset() {
|
||||
assert_parse(
|
||||
"data:text/plain;charset=latin1,hello",
|
||||
Some(ContentType(Mime(TopLevel::Text,
|
||||
SubLevel::Plain,
|
||||
vec!((Attr::Charset, Value::Ext("latin1".to_owned())))))),
|
||||
Some(ContentType::from("text/plain; charset=latin1".parse::<Mime>().unwrap())),
|
||||
Some("latin1"),
|
||||
Some(b"hello"));
|
||||
}
|
||||
|
@ -101,9 +99,7 @@ fn plain_charset() {
|
|||
fn plain_only_charset() {
|
||||
assert_parse(
|
||||
"data:;charset=utf-8,hello",
|
||||
Some(ContentType(Mime(TopLevel::Text,
|
||||
SubLevel::Plain,
|
||||
vec!((Attr::Charset, Value::Utf8))))),
|
||||
Some(ContentType::from(mime::TEXT_PLAIN_UTF_8)),
|
||||
Some("utf-8"),
|
||||
Some(b"hello"));
|
||||
}
|
||||
|
@ -112,10 +108,8 @@ fn plain_only_charset() {
|
|||
fn base64() {
|
||||
assert_parse(
|
||||
"data:;base64,C62+7w==",
|
||||
Some(ContentType(Mime(TopLevel::Text,
|
||||
SubLevel::Plain,
|
||||
vec!((Attr::Charset, Value::Ext("US-ASCII".to_owned())))))),
|
||||
Some("US-ASCII"),
|
||||
Some(ContentType::from("text/plain; charset=US-ASCII".parse::<Mime>().unwrap())),
|
||||
Some("us-ascii"),
|
||||
Some(&[0x0B, 0xAD, 0xBE, 0xEF]));
|
||||
}
|
||||
|
||||
|
@ -123,7 +117,7 @@ fn base64() {
|
|||
fn base64_ct() {
|
||||
assert_parse(
|
||||
"data:application/octet-stream;base64,C62+7w==",
|
||||
Some(ContentType(Mime(TopLevel::Application, SubLevel::Ext("octet-stream".to_owned()), vec!()))),
|
||||
Some(ContentType::from(mime::APPLICATION_OCTET_STREAM)),
|
||||
None,
|
||||
Some(&[0x0B, 0xAD, 0xBE, 0xEF]));
|
||||
}
|
||||
|
@ -132,8 +126,7 @@ fn base64_ct() {
|
|||
fn base64_charset() {
|
||||
assert_parse(
|
||||
"data:text/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==",
|
||||
Some(ContentType(Mime(TopLevel::Text, SubLevel::Plain,
|
||||
vec!((Attr::Charset, Value::Ext("koi8-r".to_owned())))))),
|
||||
Some(ContentType::from("text/plain; charset=koi8-r".parse::<Mime>().unwrap())),
|
||||
Some("koi8-r"),
|
||||
Some(&[0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4]));
|
||||
}
|
||||
|
|
|
@ -2,27 +2,24 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use {DEFAULT_USER_AGENT, new_fetch_context, create_embedder_proxy, fetch, make_server};
|
||||
use {DEFAULT_USER_AGENT, new_fetch_context, create_embedder_proxy, fetch, make_server, make_ssl_server};
|
||||
use devtools_traits::HttpRequest as DevtoolsHttpRequest;
|
||||
use devtools_traits::HttpResponse as DevtoolsHttpResponse;
|
||||
use fetch_with_context;
|
||||
use fetch_with_cors_cache;
|
||||
use headers_core::HeaderMapExt;
|
||||
use headers_ext::{AccessControlAllowCredentials, AccessControlAllowHeaders, AccessControlAllowOrigin};
|
||||
use headers_ext::{AccessControlAllowMethods, AccessControlMaxAge};
|
||||
use headers_ext::{CacheControl, ContentLength, ContentType, Expires, Host, LastModified, Pragma, UserAgent};
|
||||
use http::{Method, StatusCode};
|
||||
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
|
||||
use http::uri::Authority;
|
||||
use http_loader::{expect_devtools_http_request, expect_devtools_http_response};
|
||||
use hyper::LanguageTag;
|
||||
use hyper::header::{Accept, AccessControlAllowCredentials, AccessControlAllowHeaders, AccessControlAllowOrigin};
|
||||
use hyper::header::{AcceptEncoding, AcceptLanguage, AccessControlAllowMethods, AccessControlMaxAge};
|
||||
use hyper::header::{AccessControlRequestHeaders, AccessControlRequestMethod, Date, UserAgent};
|
||||
use hyper::header::{CacheControl, ContentLanguage, ContentLength, ContentType, Expires, LastModified};
|
||||
use hyper::header::{Encoding, Location, Pragma, Quality, QualityItem, SetCookie, qitem};
|
||||
use hyper::header::{Headers, Host, HttpDate, Referer as HyperReferer};
|
||||
use hyper::method::Method;
|
||||
use hyper::mime::{Mime, SubLevel, TopLevel};
|
||||
use hyper::server::{Request as HyperRequest, Response as HyperResponse, Server};
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::uri::RequestUri;
|
||||
use hyper_openssl;
|
||||
use hyper::{Request as HyperRequest, Response as HyperResponse};
|
||||
use hyper::body::Body;
|
||||
use mime::{self, Mime};
|
||||
use msg::constellation_msg::TEST_PIPELINE_ID;
|
||||
use net::connector::create_ssl_client;
|
||||
use net::connector::create_ssl_connector_builder;
|
||||
use net::fetch::cors_cache::CorsCache;
|
||||
use net::fetch::methods::{CancellationListener, FetchContext};
|
||||
use net::filemanager_thread::FileManager;
|
||||
|
@ -37,21 +34,21 @@ use servo_channel::{channel, Sender};
|
|||
use servo_url::{ImmutableOrigin, ServoUrl};
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::iter::FromIterator;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use time::{self, Duration};
|
||||
use unicase::UniCase;
|
||||
use std::time::{SystemTime, Duration};
|
||||
|
||||
// TODO write a struct that impls Handler for storing test values
|
||||
|
||||
#[test]
|
||||
fn test_fetch_response_is_not_network_error() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -79,10 +76,10 @@ fn test_fetch_on_bad_port_is_network_error() {
|
|||
#[test]
|
||||
fn test_fetch_response_body_matches_const_message() {
|
||||
static MESSAGE: &'static [u8] = b"Hello World!";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -142,11 +139,11 @@ fn test_fetch_blob() {
|
|||
|
||||
assert_eq!(fetch_response.headers.len(), 2);
|
||||
|
||||
let content_type: &ContentType = fetch_response.headers.get().unwrap();
|
||||
assert_eq!(**content_type, Mime(TopLevel::Text, SubLevel::Plain, vec![]));
|
||||
let content_type: Mime = fetch_response.headers.typed_get::<ContentType>().unwrap().into();
|
||||
assert_eq!(content_type, mime::TEXT_PLAIN);
|
||||
|
||||
let content_length: &ContentLength = fetch_response.headers.get().unwrap();
|
||||
assert_eq!(**content_length, bytes.len() as u64);
|
||||
let content_length: ContentLength = fetch_response.headers.typed_get().unwrap();
|
||||
assert_eq!(content_length.0, bytes.len() as u64);
|
||||
|
||||
assert_eq!(*fetch_response.body.lock().unwrap(),
|
||||
ResponseBody::Done(bytes.to_vec()));
|
||||
|
@ -162,8 +159,8 @@ fn test_fetch_file() {
|
|||
let fetch_response = fetch(&mut request, None);
|
||||
assert!(!fetch_response.is_network_error());
|
||||
assert_eq!(fetch_response.headers.len(), 1);
|
||||
let content_type: &ContentType = fetch_response.headers.get().unwrap();
|
||||
assert_eq!(**content_type, Mime(TopLevel::Text, SubLevel::Css, vec![]));
|
||||
let content_type: Mime = fetch_response.headers.typed_get::<ContentType>().unwrap().into();
|
||||
assert_eq!(content_type, mime::TEXT_CSS);
|
||||
|
||||
let resp_body = fetch_response.body.lock().unwrap();
|
||||
let mut file = File::open(path).unwrap();
|
||||
|
@ -202,20 +199,20 @@ fn test_fetch_bogus_scheme() {
|
|||
fn test_cors_preflight_fetch() {
|
||||
static ACK: &'static [u8] = b"ACK";
|
||||
let state = Arc::new(AtomicUsize::new(0));
|
||||
let handler = move |request: HyperRequest, mut response: HyperResponse| {
|
||||
if request.method == Method::Options && state.clone().fetch_add(1, Ordering::SeqCst) == 0 {
|
||||
assert!(request.headers.has::<AccessControlRequestMethod>());
|
||||
assert!(!request.headers.has::<AccessControlRequestHeaders>());
|
||||
assert!(!request.headers.get::<HyperReferer>().unwrap().contains("a.html"));
|
||||
response.headers_mut().set(AccessControlAllowOrigin::Any);
|
||||
response.headers_mut().set(AccessControlAllowCredentials);
|
||||
response.headers_mut().set(AccessControlAllowMethods(vec![Method::Get]));
|
||||
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0 {
|
||||
assert!(request.headers().contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
|
||||
assert!(!request.headers().contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
|
||||
assert!(!request.headers().get(header::REFERER).unwrap().to_str().unwrap().contains("a.html"));
|
||||
response.headers_mut().typed_insert(AccessControlAllowOrigin::ANY);
|
||||
response.headers_mut().typed_insert(AccessControlAllowCredentials);
|
||||
response.headers_mut().typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
|
||||
} else {
|
||||
response.headers_mut().set(AccessControlAllowOrigin::Any);
|
||||
response.send(ACK).unwrap();
|
||||
response.headers_mut().typed_insert(AccessControlAllowOrigin::ANY);
|
||||
*response.body_mut() = ACK.to_vec().into();
|
||||
}
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let target_url = url.clone().join("a.html").unwrap();
|
||||
|
||||
|
@ -241,20 +238,20 @@ fn test_cors_preflight_cache_fetch() {
|
|||
let state = Arc::new(AtomicUsize::new(0));
|
||||
let counter = state.clone();
|
||||
let mut cache = CorsCache::new();
|
||||
let handler = move |request: HyperRequest, mut response: HyperResponse| {
|
||||
if request.method == Method::Options && state.clone().fetch_add(1, Ordering::SeqCst) == 0 {
|
||||
assert!(request.headers.has::<AccessControlRequestMethod>());
|
||||
assert!(!request.headers.has::<AccessControlRequestHeaders>());
|
||||
response.headers_mut().set(AccessControlAllowOrigin::Any);
|
||||
response.headers_mut().set(AccessControlAllowCredentials);
|
||||
response.headers_mut().set(AccessControlAllowMethods(vec![Method::Get]));
|
||||
response.headers_mut().set(AccessControlMaxAge(6000));
|
||||
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0 {
|
||||
assert!(request.headers().contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
|
||||
assert!(!request.headers().contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
|
||||
response.headers_mut().typed_insert(AccessControlAllowOrigin::ANY);
|
||||
response.headers_mut().typed_insert(AccessControlAllowCredentials);
|
||||
response.headers_mut().typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
|
||||
response.headers_mut().typed_insert(AccessControlMaxAge::from(Duration::new(6000, 0)));
|
||||
} else {
|
||||
response.headers_mut().set(AccessControlAllowOrigin::Any);
|
||||
response.send(ACK).unwrap();
|
||||
response.headers_mut().typed_insert(AccessControlAllowOrigin::ANY);
|
||||
*response.body_mut() = ACK.to_vec().into();
|
||||
}
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(ImmutableOrigin::new_opaque());
|
||||
let mut request = Request::new(url.clone(), Some(origin.clone()), None);
|
||||
|
@ -274,8 +271,8 @@ fn test_cors_preflight_cache_fetch() {
|
|||
assert_eq!(1, counter.load(Ordering::SeqCst));
|
||||
|
||||
// The entry exists in the CORS-preflight cache
|
||||
assert_eq!(true, cache.match_method(&wrapped_request0, Method::Get));
|
||||
assert_eq!(true, cache.match_method(&wrapped_request1, Method::Get));
|
||||
assert_eq!(true, cache.match_method(&wrapped_request0, Method::GET));
|
||||
assert_eq!(true, cache.match_method(&wrapped_request1, Method::GET));
|
||||
|
||||
match *fetch_response0.body.lock().unwrap() {
|
||||
ResponseBody::Done(ref body) => assert_eq!(&**body, ACK),
|
||||
|
@ -291,23 +288,23 @@ fn test_cors_preflight_cache_fetch() {
|
|||
fn test_cors_preflight_fetch_network_error() {
|
||||
static ACK: &'static [u8] = b"ACK";
|
||||
let state = Arc::new(AtomicUsize::new(0));
|
||||
let handler = move |request: HyperRequest, mut response: HyperResponse| {
|
||||
if request.method == Method::Options && state.clone().fetch_add(1, Ordering::SeqCst) == 0 {
|
||||
assert!(request.headers.has::<AccessControlRequestMethod>());
|
||||
assert!(!request.headers.has::<AccessControlRequestHeaders>());
|
||||
response.headers_mut().set(AccessControlAllowOrigin::Any);
|
||||
response.headers_mut().set(AccessControlAllowCredentials);
|
||||
response.headers_mut().set(AccessControlAllowMethods(vec![Method::Get]));
|
||||
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
if request.method() == Method::OPTIONS && state.clone().fetch_add(1, Ordering::SeqCst) == 0 {
|
||||
assert!(request.headers().contains_key(header::ACCESS_CONTROL_REQUEST_METHOD));
|
||||
assert!(!request.headers().contains_key(header::ACCESS_CONTROL_REQUEST_HEADERS));
|
||||
response.headers_mut().typed_insert(AccessControlAllowOrigin::ANY);
|
||||
response.headers_mut().typed_insert(AccessControlAllowCredentials);
|
||||
response.headers_mut().typed_insert(AccessControlAllowMethods::from_iter(vec![Method::GET]));
|
||||
} else {
|
||||
response.headers_mut().set(AccessControlAllowOrigin::Any);
|
||||
response.send(ACK).unwrap();
|
||||
response.headers_mut().typed_insert(AccessControlAllowOrigin::ANY);
|
||||
*response.body_mut() = ACK.to_vec().into();
|
||||
}
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(ImmutableOrigin::new_opaque());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
request.method = Method::Extension("CHICKEN".to_owned());
|
||||
request.method = Method::from_bytes(b"CHICKEN").unwrap();
|
||||
request.referrer = Referrer::NoReferrer;
|
||||
request.use_cors_preflight = true;
|
||||
request.mode = RequestMode::CorsMode;
|
||||
|
@ -320,14 +317,17 @@ fn test_cors_preflight_fetch_network_error() {
|
|||
#[test]
|
||||
fn test_fetch_response_is_basic_filtered() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |_: HyperRequest, mut response: HyperResponse| {
|
||||
response.headers_mut().set(SetCookie(vec![]));
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
response.headers_mut().insert(header::SET_COOKIE, HeaderValue::from_static(""));
|
||||
// this header is obsoleted, so hyper doesn't implement it, but it's still covered by the spec
|
||||
response.headers_mut().set_raw("Set-Cookie2", vec![]);
|
||||
response.headers_mut().insert(
|
||||
HeaderName::from_static("set-cookie2"),
|
||||
HeaderValue::from_bytes(&vec![]).unwrap()
|
||||
);
|
||||
|
||||
response.send(MESSAGE).unwrap();
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -339,39 +339,42 @@ fn test_fetch_response_is_basic_filtered() {
|
|||
assert_eq!(fetch_response.response_type, ResponseType::Basic);
|
||||
|
||||
let headers = fetch_response.headers;
|
||||
assert!(!headers.has::<SetCookie>());
|
||||
assert!(headers.get_raw("Set-Cookie2").is_none());
|
||||
assert!(!headers.contains_key(header::SET_COOKIE));
|
||||
assert!(headers.get(HeaderName::from_static("set-cookie2")).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_response_is_cors_filtered() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |_: HyperRequest, mut response: HyperResponse| {
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
// this is mandatory for the Cors Check to pass
|
||||
// TODO test using different url encodings with this value ie. punycode
|
||||
response.headers_mut().set(AccessControlAllowOrigin::Any);
|
||||
response.headers_mut().typed_insert(AccessControlAllowOrigin::ANY);
|
||||
|
||||
// these are the headers that should be kept after filtering
|
||||
response.headers_mut().set(CacheControl(vec![]));
|
||||
response.headers_mut().set(ContentLanguage(vec![]));
|
||||
response.headers_mut().set(ContentType::html());
|
||||
response.headers_mut().set(Expires(HttpDate(time::now() + Duration::days(1))));
|
||||
response.headers_mut().set(LastModified(HttpDate(time::now())));
|
||||
response.headers_mut().set(Pragma::NoCache);
|
||||
response.headers_mut().typed_insert(CacheControl::new());
|
||||
response.headers_mut().insert(header::CONTENT_LANGUAGE, HeaderValue::from_bytes(&vec![]).unwrap());
|
||||
response.headers_mut().typed_insert(ContentType::from(mime::TEXT_HTML));
|
||||
response.headers_mut().typed_insert(Expires::from(SystemTime::now() + Duration::new(86400, 0)));
|
||||
response.headers_mut().typed_insert(LastModified::from(SystemTime::now()));
|
||||
response.headers_mut().typed_insert(Pragma::no_cache());
|
||||
|
||||
// these headers should not be kept after filtering, even though they are given a pass
|
||||
response.headers_mut().set(SetCookie(vec![]));
|
||||
response.headers_mut().set_raw("Set-Cookie2", vec![]);
|
||||
response.headers_mut().set(
|
||||
AccessControlAllowHeaders(vec![
|
||||
UniCase("set-cookie".to_owned()),
|
||||
UniCase("set-cookie2".to_owned())
|
||||
response.headers_mut().insert(header::SET_COOKIE, HeaderValue::from_static(""));
|
||||
response.headers_mut().insert(
|
||||
HeaderName::from_static("set-cookie2"),
|
||||
HeaderValue::from_bytes(&vec![]).unwrap()
|
||||
);
|
||||
response.headers_mut().typed_insert(
|
||||
AccessControlAllowHeaders::from_iter(vec![
|
||||
HeaderName::from_static("set-cookie"),
|
||||
HeaderName::from_static("set-cookie2")
|
||||
])
|
||||
);
|
||||
|
||||
response.send(MESSAGE).unwrap();
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
// an origin mis-match will stop it from defaulting to a basic filtered response
|
||||
let origin = Origin::Origin(ImmutableOrigin::new_opaque());
|
||||
|
@ -385,25 +388,25 @@ fn test_fetch_response_is_cors_filtered() {
|
|||
assert_eq!(fetch_response.response_type, ResponseType::Cors);
|
||||
|
||||
let headers = fetch_response.headers;
|
||||
assert!(headers.has::<CacheControl>());
|
||||
assert!(headers.has::<ContentLanguage>());
|
||||
assert!(headers.has::<ContentType>());
|
||||
assert!(headers.has::<Expires>());
|
||||
assert!(headers.has::<LastModified>());
|
||||
assert!(headers.has::<Pragma>());
|
||||
assert!(headers.contains_key(header::CACHE_CONTROL));
|
||||
assert!(headers.contains_key(header::CONTENT_LANGUAGE));
|
||||
assert!(headers.contains_key(header::CONTENT_TYPE));
|
||||
assert!(headers.contains_key(header::EXPIRES));
|
||||
assert!(headers.contains_key(header::LAST_MODIFIED));
|
||||
assert!(headers.contains_key(header::PRAGMA));
|
||||
|
||||
assert!(!headers.has::<AccessControlAllowOrigin>());
|
||||
assert!(!headers.has::<SetCookie>());
|
||||
assert!(headers.get_raw("Set-Cookie2").is_none());
|
||||
assert!(!headers.contains_key(header::ACCESS_CONTROL_ALLOW_ORIGIN));
|
||||
assert!(!headers.contains_key(header::SET_COOKIE));
|
||||
assert!(headers.get(HeaderName::from_static("set-cookie2")).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_response_is_opaque_filtered() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
// an origin mis-match will fall through to an Opaque filtered response
|
||||
let origin = Origin::Origin(ImmutableOrigin::new_opaque());
|
||||
|
@ -419,7 +422,7 @@ fn test_fetch_response_is_opaque_filtered() {
|
|||
assert!(fetch_response.url_list.is_empty());
|
||||
// this also asserts that status message is "the empty byte sequence"
|
||||
assert!(fetch_response.status.is_none());
|
||||
assert_eq!(fetch_response.headers, Headers::new());
|
||||
assert_eq!(fetch_response.headers, HeaderMap::new());
|
||||
match *fetch_response.body.lock().unwrap() {
|
||||
ResponseBody::Empty => { },
|
||||
_ => panic!()
|
||||
|
@ -433,25 +436,18 @@ fn test_fetch_response_is_opaque_filtered() {
|
|||
#[test]
|
||||
fn test_fetch_response_is_opaque_redirect_filtered() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |request: HyperRequest, mut response: HyperResponse| {
|
||||
let redirects = match request.uri {
|
||||
RequestUri::AbsolutePath(url) =>
|
||||
url.split("/").collect::<String>().parse::<u32>().unwrap_or(0),
|
||||
RequestUri::AbsoluteUri(url) =>
|
||||
url.path_segments().unwrap().next_back().unwrap().parse::<u32>().unwrap_or(0),
|
||||
_ => panic!()
|
||||
};
|
||||
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
let redirects = request.uri().path().split("/").collect::<String>().parse::<u32>().unwrap_or(0);
|
||||
|
||||
if redirects == 1 {
|
||||
response.send(MESSAGE).unwrap();
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
} else {
|
||||
*response.status_mut() = StatusCode::Found;
|
||||
let url = format!("{}", 1);
|
||||
response.headers_mut().set(Location(url.to_owned()));
|
||||
*response.status_mut() = StatusCode::FOUND;
|
||||
response.headers_mut().insert(header::LOCATION, HeaderValue::from_static("1"));
|
||||
}
|
||||
};
|
||||
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -465,7 +461,7 @@ fn test_fetch_response_is_opaque_redirect_filtered() {
|
|||
|
||||
// this also asserts that status message is "the empty byte sequence"
|
||||
assert!(fetch_response.status.is_none());
|
||||
assert_eq!(fetch_response.headers, Headers::new());
|
||||
assert_eq!(fetch_response.headers, HeaderMap::new());
|
||||
match *fetch_response.body.lock().unwrap() {
|
||||
ResponseBody::Empty => { },
|
||||
_ => panic!()
|
||||
|
@ -481,10 +477,10 @@ fn test_fetch_with_local_urls_only() {
|
|||
// If flag `local_urls_only` is set, fetching a non-local URL must result in network error.
|
||||
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, server_url) = make_server(handler);
|
||||
let (server, server_url) = make_server(handler);
|
||||
|
||||
let do_fetch = |url: ServoUrl| {
|
||||
let origin = Origin::Origin(url.origin());
|
||||
|
@ -506,7 +502,6 @@ fn test_fetch_with_local_urls_only() {
|
|||
assert!(!local_response.is_network_error());
|
||||
assert!(server_response.is_network_error());
|
||||
}
|
||||
|
||||
// NOTE(emilio): If this test starts failing:
|
||||
//
|
||||
// openssl req -x509 -nodes -days 3650 -newkey rsa:2048 \
|
||||
|
@ -517,22 +512,17 @@ fn test_fetch_with_local_urls_only() {
|
|||
#[test]
|
||||
fn test_fetch_with_hsts() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
|
||||
let cert_path = Path::new("../../resources/self_signed_certificate_for_testing.crt").canonicalize().unwrap();
|
||||
let key_path = Path::new("../../resources/privatekey_for_testing.key").canonicalize().unwrap();
|
||||
|
||||
let ssl = hyper_openssl::OpensslServer::from_files(key_path, cert_path.clone())
|
||||
.unwrap();
|
||||
|
||||
//takes an address and something that implements hyper::net::Ssl
|
||||
let mut server = Server::https("0.0.0.0:0", ssl).unwrap().handle_threads(handler, 1).unwrap();
|
||||
let (server, url) = make_ssl_server(handler, cert_path.clone(), key_path.clone());
|
||||
|
||||
let mut ca_content = String::new();
|
||||
File::open(cert_path).unwrap().read_to_string(&mut ca_content).unwrap();
|
||||
let ssl_client = create_ssl_client(&ca_content);
|
||||
let ssl_client = create_ssl_connector_builder(&ca_content);
|
||||
|
||||
let context = FetchContext {
|
||||
state: Arc::new(HttpState::new(ssl_client)),
|
||||
|
@ -547,15 +537,13 @@ fn test_fetch_with_hsts() {
|
|||
list.push(HstsEntry::new("localhost".to_owned(), IncludeSubdomains::NotIncluded, None)
|
||||
.unwrap());
|
||||
}
|
||||
let url_string = format!("http://localhost:{}", server.socket.port());
|
||||
let url = ServoUrl::parse(&url_string).unwrap();
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
request.referrer = Referrer::NoReferrer;
|
||||
// Set the flag.
|
||||
request.local_urls_only = false;
|
||||
let response = fetch_with_context(&mut request, &context);
|
||||
let _ = server.close();
|
||||
server.close();
|
||||
assert_eq!(response.internal_response.unwrap().url().unwrap().scheme(),
|
||||
"https");
|
||||
}
|
||||
|
@ -563,10 +551,10 @@ fn test_fetch_with_hsts() {
|
|||
#[test]
|
||||
fn test_fetch_with_sri_network_error() {
|
||||
static MESSAGE: &'static [u8] = b"alert('Hello, Network Error');";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -587,10 +575,10 @@ fn test_fetch_with_sri_network_error() {
|
|||
#[test]
|
||||
fn test_fetch_with_sri_sucess() {
|
||||
static MESSAGE: &'static [u8] = b"alert('Hello, world.');";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -616,20 +604,20 @@ fn test_fetch_blocked_nosniff() {
|
|||
mime: Mime,
|
||||
should_error: bool) {
|
||||
const MESSAGE: &'static [u8] = b"";
|
||||
const HEADER: &'static str = "X-Content-Type-Options";
|
||||
const HEADER: &'static str = "x-content-type-options";
|
||||
const VALUE: &'static [u8] = b"nosniff";
|
||||
|
||||
let handler = move |_: HyperRequest, mut response: HyperResponse| {
|
||||
let mime_header = ContentType(mime.clone());
|
||||
response.headers_mut().set(mime_header);
|
||||
assert!(response.headers().has::<ContentType>());
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
let mime_header = ContentType::from(mime.clone());
|
||||
response.headers_mut().typed_insert(mime_header);
|
||||
assert!(response.headers().contains_key(header::CONTENT_TYPE));
|
||||
// Add the nosniff header
|
||||
response.headers_mut().set_raw(HEADER, vec![VALUE.to_vec()]);
|
||||
response.headers_mut().insert(HeaderName::from_static(HEADER), HeaderValue::from_bytes(VALUE).unwrap());
|
||||
|
||||
response.send(MESSAGE).unwrap();
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -641,9 +629,9 @@ fn test_fetch_blocked_nosniff() {
|
|||
}
|
||||
|
||||
let tests = vec![
|
||||
(Destination::Script, Mime(TopLevel::Text, SubLevel::Javascript, vec![]), false),
|
||||
(Destination::Script, Mime(TopLevel::Text, SubLevel::Css, vec![]), true),
|
||||
(Destination::Style, Mime(TopLevel::Text, SubLevel::Css, vec![]), false),
|
||||
(Destination::Script, mime::TEXT_JAVASCRIPT, false),
|
||||
(Destination::Script, mime::TEXT_CSS, true),
|
||||
(Destination::Style, mime::TEXT_CSS, false),
|
||||
];
|
||||
|
||||
for test in tests {
|
||||
|
@ -653,25 +641,19 @@ fn test_fetch_blocked_nosniff() {
|
|||
}
|
||||
|
||||
fn setup_server_and_fetch(message: &'static [u8], redirect_cap: u32) -> Response {
|
||||
let handler = move |request: HyperRequest, mut response: HyperResponse| {
|
||||
let redirects = match request.uri {
|
||||
RequestUri::AbsolutePath(url) =>
|
||||
url.split("/").collect::<String>().parse::<u32>().unwrap_or(0),
|
||||
RequestUri::AbsoluteUri(url) =>
|
||||
url.path_segments().unwrap().next_back().unwrap().parse::<u32>().unwrap_or(0),
|
||||
_ => panic!()
|
||||
};
|
||||
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
let redirects = request.uri().path().split("/").collect::<String>().parse::<u32>().unwrap_or(0);
|
||||
|
||||
if redirects >= redirect_cap {
|
||||
response.send(message).unwrap();
|
||||
*response.body_mut() = message.to_vec().into();
|
||||
} else {
|
||||
*response.status_mut() = StatusCode::Found;
|
||||
*response.status_mut() = StatusCode::FOUND;
|
||||
let url = format!("{redirects}", redirects = redirects + 1);
|
||||
response.headers_mut().set(Location(url.to_owned()));
|
||||
response.headers_mut().insert(header::LOCATION, HeaderValue::from_str(&url).unwrap());
|
||||
}
|
||||
};
|
||||
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -720,30 +702,24 @@ fn test_fetch_redirect_updates_method_runner(tx: Sender<bool>, status_code: Stat
|
|||
let handler_method = method.clone();
|
||||
let handler_tx = Arc::new(Mutex::new(tx));
|
||||
|
||||
let handler = move |request: HyperRequest, mut response: HyperResponse| {
|
||||
let redirects = match request.uri {
|
||||
RequestUri::AbsolutePath(url) =>
|
||||
url.split("/").collect::<String>().parse::<u32>().unwrap_or(0),
|
||||
RequestUri::AbsoluteUri(url) =>
|
||||
url.path_segments().unwrap().next_back().unwrap().parse::<u32>().unwrap_or(0),
|
||||
_ => panic!()
|
||||
};
|
||||
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
let redirects = request.uri().path().split("/").collect::<String>().parse::<u32>().unwrap_or(0);
|
||||
|
||||
let mut test_pass = true;
|
||||
|
||||
if redirects == 0 {
|
||||
*response.status_mut() = StatusCode::TemporaryRedirect;
|
||||
response.headers_mut().set(Location("1".to_owned()));
|
||||
*response.status_mut() = StatusCode::TEMPORARY_REDIRECT;
|
||||
response.headers_mut().insert(header::LOCATION, HeaderValue::from_static("1"));
|
||||
|
||||
} else if redirects == 1 {
|
||||
// this makes sure that the request method does't change from the wrong status code
|
||||
if handler_method != Method::Get && request.method == Method::Get {
|
||||
if handler_method != Method::GET && request.method() == Method::GET {
|
||||
test_pass = false;
|
||||
}
|
||||
*response.status_mut() = status_code;
|
||||
response.headers_mut().set(Location("2".to_owned()));
|
||||
response.headers_mut().insert(header::LOCATION, HeaderValue::from_static("2"));
|
||||
|
||||
} else if request.method != Method::Get {
|
||||
} else if request.method() != Method::GET {
|
||||
test_pass = false;
|
||||
}
|
||||
|
||||
|
@ -754,7 +730,7 @@ fn test_fetch_redirect_updates_method_runner(tx: Sender<bool>, status_code: Stat
|
|||
|
||||
};
|
||||
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -769,36 +745,36 @@ fn test_fetch_redirect_updates_method_runner(tx: Sender<bool>, status_code: Stat
|
|||
fn test_fetch_redirect_updates_method() {
|
||||
let (tx, rx) = channel();
|
||||
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::MovedPermanently, Method::Post);
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::MOVED_PERMANENTLY, Method::POST);
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
// make sure the test doesn't send more data than expected
|
||||
assert_eq!(rx.try_recv().is_none(), true);
|
||||
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::Found, Method::Post);
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::FOUND, Method::POST);
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
assert_eq!(rx.try_recv().is_none(), true);
|
||||
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::SeeOther, Method::Get);
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::SEE_OTHER, Method::GET);
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
assert_eq!(rx.try_recv().is_none(), true);
|
||||
|
||||
let extension = Method::Extension("FOO".to_owned());
|
||||
let extension = Method::from_bytes(b"FOO").unwrap();
|
||||
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::MovedPermanently, extension.clone());
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::MOVED_PERMANENTLY, extension.clone());
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
// for MovedPermanently and Found, Method should only be changed if it was Post
|
||||
assert_eq!(rx.recv().unwrap(), false);
|
||||
assert_eq!(rx.try_recv().is_none(), true);
|
||||
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::Found, extension.clone());
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::FOUND, extension.clone());
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
assert_eq!(rx.recv().unwrap(), false);
|
||||
assert_eq!(rx.try_recv().is_none(), true);
|
||||
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::SeeOther, extension.clone());
|
||||
test_fetch_redirect_updates_method_runner(tx.clone(), StatusCode::SEE_OTHER, extension.clone());
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
// for SeeOther, Method should always be changed, so this should be true
|
||||
assert_eq!(rx.recv().unwrap(), true);
|
||||
|
@ -826,10 +802,10 @@ fn response_is_done(response: &Response) -> bool {
|
|||
#[test]
|
||||
fn test_fetch_async_returns_complete_response() {
|
||||
static MESSAGE: &'static [u8] = b"this message should be retrieved in full";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -844,10 +820,10 @@ fn test_fetch_async_returns_complete_response() {
|
|||
#[test]
|
||||
fn test_opaque_filtered_fetch_async_returns_complete_response() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
// an origin mis-match will fall through to an Opaque filtered response
|
||||
let origin = Origin::Origin(ImmutableOrigin::new_opaque());
|
||||
|
@ -865,25 +841,18 @@ fn test_opaque_filtered_fetch_async_returns_complete_response() {
|
|||
#[test]
|
||||
fn test_opaque_redirect_filtered_fetch_async_returns_complete_response() {
|
||||
static MESSAGE: &'static [u8] = b"";
|
||||
let handler = move |request: HyperRequest, mut response: HyperResponse| {
|
||||
let redirects = match request.uri {
|
||||
RequestUri::AbsolutePath(url) =>
|
||||
url.split("/").collect::<String>().parse::<u32>().unwrap_or(0),
|
||||
RequestUri::AbsoluteUri(url) =>
|
||||
url.path_segments().unwrap().last().unwrap().parse::<u32>().unwrap_or(0),
|
||||
_ => panic!()
|
||||
};
|
||||
let handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
let redirects = request.uri().path().split("/").collect::<String>().parse::<u32>().unwrap_or(0);
|
||||
|
||||
if redirects == 1 {
|
||||
response.send(MESSAGE).unwrap();
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
} else {
|
||||
*response.status_mut() = StatusCode::Found;
|
||||
let url = format!("{}", 1);
|
||||
response.headers_mut().set(Location(url.to_owned()));
|
||||
*response.status_mut() = StatusCode::FOUND;
|
||||
response.headers_mut().insert(header::LOCATION, HeaderValue::from_static("1"));
|
||||
}
|
||||
};
|
||||
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url, Some(origin), None);
|
||||
|
@ -901,11 +870,11 @@ fn test_opaque_redirect_filtered_fetch_async_returns_complete_response() {
|
|||
#[test]
|
||||
fn test_fetch_with_devtools() {
|
||||
static MESSAGE: &'static [u8] = b"Yay!";
|
||||
let handler = move |_: HyperRequest, response: HyperResponse| {
|
||||
response.send(MESSAGE).unwrap();
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
*response.body_mut() = MESSAGE.to_vec().into();
|
||||
};
|
||||
|
||||
let (mut server, url) = make_server(handler);
|
||||
let (server, url) = make_server(handler);
|
||||
|
||||
let origin = Origin::Origin(url.origin());
|
||||
let mut request = Request::new(url.clone(), Some(origin), Some(TEST_PIPELINE_ID));
|
||||
|
@ -921,36 +890,23 @@ fn test_fetch_with_devtools() {
|
|||
let mut devhttpresponse = expect_devtools_http_response(&devtools_port);
|
||||
|
||||
//Creating default headers for request
|
||||
let mut headers = Headers::new();
|
||||
let mut headers = HeaderMap::new();
|
||||
|
||||
headers.set(AcceptEncoding(vec![
|
||||
qitem(Encoding::Gzip),
|
||||
qitem(Encoding::Deflate),
|
||||
qitem(Encoding::EncodingExt("br".to_owned()))
|
||||
]));
|
||||
headers.insert(header::ACCEPT_ENCODING, HeaderValue::from_static("gzip, deflate, br"));
|
||||
headers.typed_insert(
|
||||
Host::from(format!("{}:{}", url.host_str().unwrap(), url.port().unwrap()).parse::<Authority>().unwrap()));
|
||||
|
||||
headers.set(Host { hostname: url.host_str().unwrap().to_owned() , port: url.port().to_owned() });
|
||||
headers.insert(header::ACCEPT, HeaderValue::from_static("*/*"));
|
||||
|
||||
let accept = Accept(vec![qitem(Mime(TopLevel::Star, SubLevel::Star, vec![]))]);
|
||||
headers.set(accept);
|
||||
headers.insert(header::ACCEPT_LANGUAGE, HeaderValue::from_static("en-US, en; q=0.5"));
|
||||
|
||||
let mut en_us: LanguageTag = Default::default();
|
||||
en_us.language = Some("en".to_owned());
|
||||
en_us.region = Some("US".to_owned());
|
||||
let mut en: LanguageTag = Default::default();
|
||||
en.language = Some("en".to_owned());
|
||||
headers.set(AcceptLanguage(vec![
|
||||
qitem(en_us),
|
||||
QualityItem::new(en, Quality(500)),
|
||||
]));
|
||||
|
||||
headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned()));
|
||||
headers.typed_insert::<UserAgent>(DEFAULT_USER_AGENT.parse().unwrap());
|
||||
|
||||
let httprequest = DevtoolsHttpRequest {
|
||||
url: url,
|
||||
method: Method::Get,
|
||||
method: Method::GET,
|
||||
headers: headers,
|
||||
body: None,
|
||||
body: Some(vec![]),
|
||||
pipeline_id: TEST_PIPELINE_ID,
|
||||
startedDateTime: devhttprequest.startedDateTime,
|
||||
timeStamp: devhttprequest.timeStamp,
|
||||
|
@ -960,9 +916,9 @@ fn test_fetch_with_devtools() {
|
|||
};
|
||||
|
||||
let content = "Yay!";
|
||||
let mut response_headers = Headers::new();
|
||||
response_headers.set(ContentLength(content.len() as u64));
|
||||
devhttpresponse.headers.as_mut().unwrap().remove::<Date>();
|
||||
let mut response_headers = HeaderMap::new();
|
||||
response_headers.typed_insert(ContentLength(content.len() as u64));
|
||||
devhttpresponse.headers.as_mut().unwrap().remove(header::DATE);
|
||||
|
||||
let httpresponse = DevtoolsHttpResponse {
|
||||
headers: Some(response_headers),
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -8,19 +8,27 @@ extern crate cookie as cookie_rs;
|
|||
extern crate devtools_traits;
|
||||
extern crate embedder_traits;
|
||||
extern crate flate2;
|
||||
extern crate futures;
|
||||
extern crate headers_core;
|
||||
extern crate headers_ext;
|
||||
extern crate http;
|
||||
extern crate hyper;
|
||||
extern crate hyper_openssl;
|
||||
extern crate hyper_serde;
|
||||
extern crate ipc_channel;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
extern crate mime;
|
||||
extern crate msg;
|
||||
extern crate net;
|
||||
extern crate net_traits;
|
||||
extern crate openssl;
|
||||
extern crate profile_traits;
|
||||
extern crate servo_channel;
|
||||
extern crate servo_config;
|
||||
extern crate servo_url;
|
||||
extern crate time;
|
||||
extern crate unicase;
|
||||
extern crate tokio;
|
||||
extern crate tokio_openssl;
|
||||
extern crate url;
|
||||
|
||||
mod cookie;
|
||||
|
@ -38,8 +46,12 @@ mod subresource_integrity;
|
|||
use devtools_traits::DevtoolsControlMsg;
|
||||
use embedder_traits::{EmbedderProxy, EventLoopWaker};
|
||||
use embedder_traits::resources::{self, Resource};
|
||||
use hyper::server::{Handler, Listening, Server};
|
||||
use net::connector::create_ssl_client;
|
||||
use futures::{Future, Stream};
|
||||
use hyper::{Body, Request as HyperRequest, Response as HyperResponse};
|
||||
use hyper::server::Server as HyperServer;
|
||||
use hyper::server::conn::Http;
|
||||
use hyper::service::service_fn_ok;
|
||||
use net::connector::create_ssl_connector_builder;
|
||||
use net::fetch::cors_cache::CorsCache;
|
||||
use net::fetch::methods::{self, CancellationListener, FetchContext};
|
||||
use net::filemanager_thread::FileManager;
|
||||
|
@ -47,9 +59,21 @@ use net::test::HttpState;
|
|||
use net_traits::FetchTaskTarget;
|
||||
use net_traits::request::Request;
|
||||
use net_traits::response::Response;
|
||||
use openssl::ssl::{SslAcceptor, SslFiletype, SslMethod};
|
||||
use servo_channel::{channel, Sender};
|
||||
use servo_url::ServoUrl;
|
||||
use std::net::TcpListener as StdTcpListener;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::runtime::Runtime;
|
||||
use tokio_openssl::SslAcceptorExt;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref HANDLE: Mutex<Runtime> = {
|
||||
Mutex::new(Runtime::new().unwrap())
|
||||
};
|
||||
}
|
||||
|
||||
const DEFAULT_USER_AGENT: &'static str = "Such Browser. Very Layout. Wow.";
|
||||
|
||||
|
@ -84,10 +108,10 @@ fn create_embedder_proxy() -> EmbedderProxy {
|
|||
}
|
||||
|
||||
fn new_fetch_context(dc: Option<Sender<DevtoolsControlMsg>>, fc: Option<EmbedderProxy>) -> FetchContext {
|
||||
let ssl_client = create_ssl_client(&resources::read_string(Resource::SSLCertificates));
|
||||
let ssl_connector = create_ssl_connector_builder(&resources::read_string(Resource::SSLCertificates));
|
||||
let sender = fc.unwrap_or_else(|| create_embedder_proxy());
|
||||
FetchContext {
|
||||
state: Arc::new(HttpState::new(ssl_client)),
|
||||
state: Arc::new(HttpState::new(ssl_connector)),
|
||||
user_agent: DEFAULT_USER_AGENT.into(),
|
||||
devtools_chan: dc,
|
||||
filemanager: FileManager::new(sender),
|
||||
|
@ -131,10 +155,78 @@ fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Respon
|
|||
receiver.recv().unwrap()
|
||||
}
|
||||
|
||||
fn make_server<H: Handler + 'static>(handler: H) -> (Listening, ServoUrl) {
|
||||
// this is a Listening server because of handle_threads()
|
||||
let server = Server::http("0.0.0.0:0").unwrap().handle_threads(handler, 2).unwrap();
|
||||
let url_string = format!("http://localhost:{}", server.socket.port());
|
||||
pub(crate) struct Server {
|
||||
pub close_channel: futures::sync::oneshot::Sender<()>,
|
||||
}
|
||||
|
||||
impl Server {
|
||||
fn close(self) {
|
||||
self.close_channel.send(()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
fn make_server<H>(handler: H) -> (Server, ServoUrl)
|
||||
where
|
||||
H: Fn(HyperRequest<Body>, &mut HyperResponse<Body>) + Send + Sync + 'static,
|
||||
{
|
||||
let handler = Arc::new(handler);
|
||||
let listener = StdTcpListener::bind("0.0.0.0:0").unwrap();
|
||||
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
|
||||
let url = ServoUrl::parse(&url_string).unwrap();
|
||||
let (tx, rx) = futures::sync::oneshot::channel::<()>();
|
||||
let server = HyperServer::from_tcp(listener).unwrap().serve(
|
||||
move || {
|
||||
let handler = handler.clone();
|
||||
service_fn_ok(move |req: HyperRequest<Body>| {
|
||||
let mut response = HyperResponse::new(Vec::<u8>::new().into());
|
||||
handler(req, &mut response);
|
||||
response
|
||||
})
|
||||
}
|
||||
)
|
||||
.with_graceful_shutdown(rx)
|
||||
.map_err(|_|());
|
||||
|
||||
HANDLE.lock().unwrap().spawn(server);
|
||||
let server = Server { close_channel: tx };
|
||||
(server, url)
|
||||
}
|
||||
|
||||
fn make_ssl_server<H>(handler: H, cert_path: PathBuf, key_path: PathBuf) -> (Server, ServoUrl)
|
||||
where
|
||||
H: Fn(HyperRequest<Body>, &mut HyperResponse<Body>) + Send + Sync + 'static,
|
||||
{
|
||||
let handler = Arc::new(handler);
|
||||
let listener = StdTcpListener::bind("[::0]:0").unwrap();
|
||||
let listener = TcpListener::from_std(listener, &HANDLE.lock().unwrap().reactor()).unwrap();
|
||||
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
|
||||
let url = ServoUrl::parse(&url_string).unwrap();
|
||||
|
||||
let server = listener.incoming()
|
||||
.map_err(|_| ())
|
||||
.for_each(move |sock| {
|
||||
let mut ssl_builder = SslAcceptor::mozilla_modern(SslMethod::tls()).unwrap();
|
||||
ssl_builder.set_certificate_file(&cert_path, SslFiletype::PEM).unwrap();
|
||||
ssl_builder.set_private_key_file(&key_path, SslFiletype::PEM).unwrap();
|
||||
|
||||
let handler = handler.clone();
|
||||
ssl_builder.build().accept_async(sock).map_err(|_| ()).and_then(move |ssl| {
|
||||
Http::new().serve_connection(ssl,
|
||||
service_fn_ok(move |req: HyperRequest<Body>| {
|
||||
let mut response = HyperResponse::new(Vec::<u8>::new().into());
|
||||
handler(req, &mut response);
|
||||
response
|
||||
})
|
||||
)
|
||||
.map_err(|_|())
|
||||
})
|
||||
});
|
||||
|
||||
let (tx, rx) = futures::sync::oneshot::channel::<()>();
|
||||
let server = server.select(rx.map_err(|_| ())).map(|_| ()).map_err(|_| ());
|
||||
|
||||
HANDLE.lock().unwrap().spawn(server);
|
||||
|
||||
let server = Server { close_channel: tx };
|
||||
(server, url)
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use mime::{self, Mime};
|
||||
use net::mime_classifier::{ApacheBugFlag, MimeClassifier, Mp4Matcher, NoSniffFlag};
|
||||
use net::mime_classifier::as_string_option;
|
||||
use net_traits::LoadContext;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
|
@ -58,11 +58,10 @@ fn test_validate_classifier() {
|
|||
|
||||
#[cfg(test)]
|
||||
fn test_sniff_with_flags(filename_orig: &path::Path,
|
||||
type_string: &str,
|
||||
subtype_string: &str,
|
||||
supplied_type: Option<(&'static str, &'static str)>,
|
||||
no_sniff_flag: NoSniffFlag,
|
||||
apache_bug_flag: ApacheBugFlag) {
|
||||
expected_mime: Mime,
|
||||
supplied_type: Option<Mime>,
|
||||
no_sniff_flag: NoSniffFlag,
|
||||
apache_bug_flag: ApacheBugFlag) {
|
||||
let current_working_directory = env::current_dir().unwrap();
|
||||
println!("The current directory is {}", current_working_directory.display());
|
||||
|
||||
|
@ -75,17 +74,15 @@ fn test_sniff_with_flags(filename_orig: &path::Path,
|
|||
|
||||
match read_result {
|
||||
Ok(data) => {
|
||||
let supplied_type = supplied_type.map(|(x, y)| (x.parse().unwrap(), y));
|
||||
let (parsed_type, parsed_subtp) = classifier.classify(LoadContext::Browsing,
|
||||
no_sniff_flag,
|
||||
apache_bug_flag,
|
||||
&as_string_option(supplied_type),
|
||||
&data);
|
||||
if (&parsed_type[..] != type_string) ||
|
||||
(&parsed_subtp[..] != subtype_string) {
|
||||
panic!("File {:?} parsed incorrectly should be {}/{}, parsed as {}/{}",
|
||||
filename, type_string, subtype_string,
|
||||
parsed_type, parsed_subtp);
|
||||
let parsed_mime = classifier.classify(LoadContext::Browsing,
|
||||
no_sniff_flag,
|
||||
apache_bug_flag,
|
||||
&supplied_type,
|
||||
&data);
|
||||
if (parsed_mime.type_() != expected_mime.type_()) ||
|
||||
(parsed_mime.subtype() != expected_mime.subtype()) {
|
||||
panic!("File {:?} parsed incorrectly should be {:?}, parsed as {:?}",
|
||||
filename, expected_mime, parsed_mime);
|
||||
}
|
||||
}
|
||||
Err(e) => panic!("Couldn't read from file {:?} with error {}",
|
||||
|
@ -94,407 +91,407 @@ fn test_sniff_with_flags(filename_orig: &path::Path,
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn test_sniff_full(filename_orig: &path::Path, type_string: &str, subtype_string: &str,
|
||||
supplied_type: Option<(&'static str, &'static str)>) {
|
||||
fn test_sniff_full(filename_orig: &path::Path, expected_mime: Mime,
|
||||
supplied_type: Option<Mime>) {
|
||||
test_sniff_with_flags(filename_orig,
|
||||
type_string,
|
||||
subtype_string,
|
||||
expected_mime,
|
||||
supplied_type,
|
||||
NoSniffFlag::Off,
|
||||
ApacheBugFlag::Off)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn test_sniff_classification(file: &str, type_string: &str, subtype_string: &str,
|
||||
supplied_type: Option<(&'static str, &'static str)>) {
|
||||
fn test_sniff_classification(file: &str, expected_mime: Mime, supplied_type: Option<Mime>) {
|
||||
let mut x = PathBuf::from("./");
|
||||
x.push(type_string);
|
||||
x.push(subtype_string);
|
||||
x.push(expected_mime.type_().as_str());
|
||||
x.push(expected_mime.subtype().as_str());
|
||||
x.push(file);
|
||||
test_sniff_full(&x, type_string, subtype_string, supplied_type);
|
||||
test_sniff_full(&x, expected_mime, supplied_type);
|
||||
}
|
||||
#[cfg(test)]
|
||||
fn test_sniff_classification_sup(file: &str, type_string: &'static str, subtype_string: &str) {
|
||||
test_sniff_classification(file, type_string, subtype_string, None);
|
||||
let class_type = Some((type_string, ""));
|
||||
test_sniff_classification(file, type_string, subtype_string, class_type);
|
||||
fn test_sniff_classification_sup(file: &str, expected_mime: Mime) {
|
||||
test_sniff_classification(file, expected_mime.clone(), None);
|
||||
let no_sub = format!("{}/", expected_mime.type_()).parse().unwrap();
|
||||
test_sniff_classification(file, expected_mime, Some(no_sub));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_x_icon() {
|
||||
test_sniff_classification_sup("test.ico", "image", "x-icon");
|
||||
test_sniff_classification_sup("test.ico", "image/x-icon".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_x_icon_cursor() {
|
||||
test_sniff_classification_sup("test_cursor.ico", "image", "x-icon");
|
||||
test_sniff_classification_sup("test_cursor.ico", "image/x-icon".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_bmp() {
|
||||
test_sniff_classification_sup("test.bmp", "image", "bmp");
|
||||
test_sniff_classification_sup("test.bmp", mime::IMAGE_BMP);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_gif87a() {
|
||||
test_sniff_classification_sup("test87a", "image", "gif");
|
||||
test_sniff_classification_sup("test87a", mime::IMAGE_GIF);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_gif89a() {
|
||||
test_sniff_classification_sup("test89a.gif", "image", "gif");
|
||||
test_sniff_classification_sup("test89a.gif", mime::IMAGE_GIF);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_webp() {
|
||||
test_sniff_classification_sup("test.webp", "image", "webp");
|
||||
test_sniff_classification_sup("test.webp", "image/webp".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_png() {
|
||||
test_sniff_classification_sup("test.png", "image", "png");
|
||||
test_sniff_classification_sup("test.png", mime::IMAGE_PNG);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_jpg() {
|
||||
test_sniff_classification_sup("test.jpg", "image", "jpeg");
|
||||
test_sniff_classification_sup("test.jpg", mime::IMAGE_JPEG);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_webm() {
|
||||
test_sniff_classification_sup("test.webm", "video", "webm");
|
||||
test_sniff_classification_sup("test.webm", "video/webm".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_mp4() {
|
||||
test_sniff_classification_sup("test.mp4", "video", "mp4");
|
||||
test_sniff_classification_sup("test.mp4", "video/mp4".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_avi() {
|
||||
test_sniff_classification_sup("test.avi", "video", "avi");
|
||||
test_sniff_classification_sup("test.avi", "video/avi".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_basic() {
|
||||
test_sniff_classification_sup("test.au", "audio", "basic");
|
||||
test_sniff_classification_sup("test.au", "audio/basic".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_aiff() {
|
||||
test_sniff_classification_sup("test.aif", "audio", "aiff");
|
||||
test_sniff_classification_sup("test.aif", "audio/aiff".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_mpeg() {
|
||||
test_sniff_classification_sup("test.mp3", "audio", "mpeg");
|
||||
test_sniff_classification_sup("test.mp3", "audio/mpeg".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_midi() {
|
||||
test_sniff_classification_sup("test.mid", "audio", "midi");
|
||||
test_sniff_classification_sup("test.mid", "audio/midi".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_wave() {
|
||||
test_sniff_classification_sup("test.wav", "audio", "wave");
|
||||
test_sniff_classification_sup("test.wav", "audio/wave".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_ogg() {
|
||||
test_sniff_classification("small.ogg", "application", "ogg", None);
|
||||
test_sniff_classification("small.ogg", "application", "ogg", Some(("audio", "")));
|
||||
test_sniff_classification("small.ogg", "application/ogg".parse().unwrap(), None);
|
||||
test_sniff_classification("small.ogg", "application/ogg".parse().unwrap(), Some("audio/".parse().unwrap()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_sniff_vsn_ms_fontobject() {
|
||||
test_sniff_classification_sup("vnd.ms-fontobject", "application", "vnd.ms-fontobject");
|
||||
test_sniff_classification_sup("vnd.ms-fontobject", "application/vnd.ms-fontobject".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_sniff_true_type() {
|
||||
test_sniff_full(&PathBuf::from("unknown/true_type.ttf"), "(TrueType)", "", None);
|
||||
test_sniff_full(&PathBuf::from("unknown/true_type.ttf"), "(TrueType)/".parse().unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_sniff_open_type() {
|
||||
test_sniff_full(&PathBuf::from("unknown/open_type"), "(OpenType)", "", None);
|
||||
test_sniff_full(&PathBuf::from("unknown/open_type"), "(OpenType)/".parse().unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_sniff_true_type_collection() {
|
||||
test_sniff_full(&PathBuf::from("unknown/true_type_collection.ttc"), "(TrueType Collection)", "", None);
|
||||
test_sniff_full(&PathBuf::from("unknown/true_type_collection.ttc"), "(TrueType Collection)/".parse().unwrap(),
|
||||
None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_sniff_woff() {
|
||||
test_sniff_classification_sup("test.wof", "application", "font-woff");
|
||||
test_sniff_classification_sup("test.wof", "application/font-woff".parse().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_gzip() {
|
||||
test_sniff_classification("test.gz", "application", "x-gzip", None);
|
||||
test_sniff_classification("test.gz", "application/x-gzip".parse().unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_zip() {
|
||||
test_sniff_classification("test.zip", "application", "zip", None);
|
||||
test_sniff_classification("test.zip", "application/zip".parse().unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_rar() {
|
||||
test_sniff_classification("test.rar", "application", "x-rar-compressed", None);
|
||||
test_sniff_classification("test.rar", "application/x-rar-compressed".parse().unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_doctype_20() {
|
||||
test_sniff_classification("text_html_doctype_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_doctype_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_doctype_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_doctype_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_doctype_3e() {
|
||||
test_sniff_classification("text_html_doctype_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_doctype_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_doctype_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_doctype_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_page_20() {
|
||||
test_sniff_classification("text_html_page_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_page_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_page_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_page_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_page_3e() {
|
||||
test_sniff_classification("text_html_page_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_page_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_page_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_page_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_head_20() {
|
||||
test_sniff_classification("text_html_head_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_head_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_head_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_head_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_head_3e() {
|
||||
test_sniff_classification("text_html_head_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_head_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_head_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_head_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_script_20() {
|
||||
test_sniff_classification("text_html_script_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_script_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_script_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_script_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_script_3e() {
|
||||
test_sniff_classification("text_html_script_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_script_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_script_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_script_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_iframe_20() {
|
||||
test_sniff_classification("text_html_iframe_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_iframe_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_iframe_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_iframe_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_iframe_3e() {
|
||||
test_sniff_classification("text_html_iframe_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_iframe_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_iframe_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_iframe_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_h1_20() {
|
||||
test_sniff_classification("text_html_h1_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_h1_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_h1_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_h1_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_h1_3e() {
|
||||
test_sniff_classification("text_html_h1_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_h1_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_h1_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_h1_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_div_20() {
|
||||
test_sniff_classification("text_html_div_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_div_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_div_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_div_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_div_3e() {
|
||||
test_sniff_classification("text_html_div_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_div_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_div_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_div_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_font_20() {
|
||||
test_sniff_classification("text_html_font_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_font_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_font_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_font_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_font_3e() {
|
||||
test_sniff_classification("text_html_font_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_font_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_font_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_font_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_table_20() {
|
||||
test_sniff_classification("text_html_table_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_table_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_table_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_table_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_table_3e() {
|
||||
test_sniff_classification("text_html_table_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_table_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_table_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_table_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_a_20() {
|
||||
test_sniff_classification("text_html_a_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_a_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_a_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_a_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_a_3e() {
|
||||
test_sniff_classification("text_html_a_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_a_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_a_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_a_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_style_20() {
|
||||
test_sniff_classification("text_html_style_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_style_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_style_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_style_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_style_3e() {
|
||||
test_sniff_classification("text_html_style_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_style_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_style_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_style_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_title_20() {
|
||||
test_sniff_classification("text_html_title_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_title_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_title_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_title_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_title_3e() {
|
||||
test_sniff_classification("text_html_title_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_title_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_title_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_title_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_b_20() {
|
||||
test_sniff_classification("text_html_b_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_b_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_b_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_b_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_b_3e() {
|
||||
test_sniff_classification("text_html_b_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_b_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_b_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_b_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_body_20() {
|
||||
test_sniff_classification("text_html_body_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_body_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_body_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_body_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_body_3e() {
|
||||
test_sniff_classification("text_html_body_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_body_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_body_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_body_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_br_20() {
|
||||
test_sniff_classification("text_html_br_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_br_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_br_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_br_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_br_3e() {
|
||||
test_sniff_classification("text_html_br_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_br_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_br_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_br_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_p_20() {
|
||||
test_sniff_classification("text_html_p_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_p_20_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_p_20.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_p_20_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
#[test]
|
||||
fn test_sniff_text_html_p_3e() {
|
||||
test_sniff_classification("text_html_p_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_p_3e_u.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_p_3e.html", mime::TEXT_HTML, None);
|
||||
test_sniff_classification("text_html_p_3e_u.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_comment_20() {
|
||||
test_sniff_classification("text_html_comment_20.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_comment_20.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_text_html_comment_3e() {
|
||||
test_sniff_classification("text_html_comment_3e.html", "text", "html", None);
|
||||
test_sniff_classification("text_html_comment_3e.html", mime::TEXT_HTML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_xml() {
|
||||
test_sniff_classification("test.xml", "text", "xml", None);
|
||||
test_sniff_classification("test.xml", mime::TEXT_XML, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_pdf() {
|
||||
test_sniff_classification("test.pdf", "application", "pdf", None);
|
||||
test_sniff_classification("test.pdf", mime::APPLICATION_PDF, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_postscript() {
|
||||
test_sniff_classification("test.ps", "application", "postscript", None);
|
||||
test_sniff_classification("test.ps", "application/postscript".parse().unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_utf_16be_bom() {
|
||||
test_sniff_classification("utf16bebom.txt", "text", "plain", None);
|
||||
test_sniff_classification("utf16bebom.txt", mime::TEXT_PLAIN, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_utf_16le_bom() {
|
||||
test_sniff_classification("utf16lebom.txt", "text", "plain", None);
|
||||
test_sniff_classification("utf16lebom.txt", mime::TEXT_PLAIN, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_utf_8_bom() {
|
||||
test_sniff_classification("utf8bom.txt", "text", "plain", None);
|
||||
test_sniff_classification("utf8bom.txt", mime::TEXT_PLAIN, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_rss_feed() {
|
||||
// RSS feeds
|
||||
test_sniff_full(&PathBuf::from("text/xml/feed.rss"), "application", "rss+xml", Some(("text", "html")));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss.xml"), "application", "rss+xml", Some(("text", "html")));
|
||||
test_sniff_full(&PathBuf::from("text/xml/feed.rss"), "application/rss+xml".parse().unwrap(), Some(mime::TEXT_HTML));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss.xml"), "application/rss+xml".parse().unwrap(),
|
||||
Some(mime::TEXT_HTML));
|
||||
// Not RSS feeds
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_1.xml"), "text", "html", Some(("text", "html")));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_2.xml"), "text", "html", Some(("text", "html")));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_3.xml"), "text", "html", Some(("text", "html")));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_4.xml"), "text", "html", Some(("text", "html")));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_1.xml"), mime::TEXT_HTML, Some(mime::TEXT_HTML));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_2.xml"), mime::TEXT_HTML, Some(mime::TEXT_HTML));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_3.xml"), mime::TEXT_HTML, Some(mime::TEXT_HTML));
|
||||
test_sniff_full(&PathBuf::from("text/xml/rdf_rss_ko_4.xml"), mime::TEXT_HTML, Some(mime::TEXT_HTML));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_atom_feed() {
|
||||
test_sniff_full(&PathBuf::from("text/xml/feed.atom"), "application", "atom+xml", Some(("text", "html")));
|
||||
test_sniff_full(&PathBuf::from("text/xml/feed.atom"), "application/atom+xml".parse().unwrap(),
|
||||
Some(mime::TEXT_HTML));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_binary_file() {
|
||||
test_sniff_full(&PathBuf::from("unknown/binary_file"), "application", "octet-stream", None);
|
||||
test_sniff_full(&PathBuf::from("unknown/binary_file"), mime::APPLICATION_OCTET_STREAM, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sniff_atom_feed_with_no_sniff_flag_on() {
|
||||
test_sniff_with_flags(&PathBuf::from("text/xml/feed.atom"),
|
||||
"text",
|
||||
"html",
|
||||
Some(("text", "html")),
|
||||
mime::TEXT_HTML,
|
||||
Some(mime::TEXT_HTML),
|
||||
NoSniffFlag::On,
|
||||
ApacheBugFlag::Off);
|
||||
}
|
||||
|
@ -502,9 +499,8 @@ fn test_sniff_atom_feed_with_no_sniff_flag_on() {
|
|||
#[test]
|
||||
fn test_sniff_with_no_sniff_flag_on_and_apache_flag_on() {
|
||||
test_sniff_with_flags(&PathBuf::from("text/xml/feed.atom"),
|
||||
"text",
|
||||
"html",
|
||||
Some(("text", "html")),
|
||||
mime::TEXT_HTML,
|
||||
Some(mime::TEXT_HTML),
|
||||
NoSniffFlag::On,
|
||||
ApacheBugFlag::On);
|
||||
}
|
||||
|
@ -512,9 +508,8 @@ fn test_sniff_with_no_sniff_flag_on_and_apache_flag_on() {
|
|||
#[test]
|
||||
fn test_sniff_utf_8_bom_with_apache_flag_on() {
|
||||
test_sniff_with_flags(&PathBuf::from("text/plain/utf8bom.txt"),
|
||||
"text",
|
||||
"plain",
|
||||
Some(("dummy", "text")),
|
||||
mime::TEXT_PLAIN,
|
||||
Some("dummy/text".parse().unwrap()),
|
||||
NoSniffFlag::Off,
|
||||
ApacheBugFlag::On);
|
||||
}
|
||||
|
@ -522,9 +517,8 @@ fn test_sniff_utf_8_bom_with_apache_flag_on() {
|
|||
#[test]
|
||||
fn test_sniff_utf_16be_bom_with_apache_flag_on() {
|
||||
test_sniff_with_flags(&PathBuf::from("text/plain/utf16bebom.txt"),
|
||||
"text",
|
||||
"plain",
|
||||
Some(("dummy", "text")),
|
||||
mime::TEXT_PLAIN,
|
||||
Some("dummy/text".parse().unwrap()),
|
||||
NoSniffFlag::Off,
|
||||
ApacheBugFlag::On);
|
||||
}
|
||||
|
@ -532,9 +526,8 @@ fn test_sniff_utf_16be_bom_with_apache_flag_on() {
|
|||
#[test]
|
||||
fn test_sniff_utf_16le_bom_with_apache_flag_on() {
|
||||
test_sniff_with_flags(&PathBuf::from("text/plain/utf16lebom.txt"),
|
||||
"text",
|
||||
"plain",
|
||||
Some(("dummy", "text")),
|
||||
mime::TEXT_PLAIN,
|
||||
Some("dummy/text".parse().unwrap()),
|
||||
NoSniffFlag::Off,
|
||||
ApacheBugFlag::On);
|
||||
}
|
||||
|
@ -542,9 +535,8 @@ fn test_sniff_utf_16le_bom_with_apache_flag_on() {
|
|||
#[test]
|
||||
fn test_sniff_octet_stream_apache_flag_on() {
|
||||
test_sniff_with_flags(&PathBuf::from("unknown/binary_file"),
|
||||
"application",
|
||||
"octet-stream",
|
||||
Some(("dummy", "binary")),
|
||||
mime::APPLICATION_OCTET_STREAM,
|
||||
Some("dummy/binary".parse().unwrap()),
|
||||
NoSniffFlag::Off,
|
||||
ApacheBugFlag::On);
|
||||
}
|
||||
|
@ -552,9 +544,8 @@ fn test_sniff_octet_stream_apache_flag_on() {
|
|||
#[test]
|
||||
fn test_sniff_mp4_video_apache_flag_on() {
|
||||
test_sniff_with_flags(&PathBuf::from("video/mp4/test.mp4"),
|
||||
"application",
|
||||
"octet-stream",
|
||||
Some(("video", "mp4")),
|
||||
mime::APPLICATION_OCTET_STREAM,
|
||||
Some("video/mp4".parse().unwrap()),
|
||||
NoSniffFlag::Off,
|
||||
ApacheBugFlag::On);
|
||||
}
|
||||
|
|
|
@ -2,13 +2,15 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use connector::create_ssl_connector;
|
||||
use connector::create_ssl_connector_builder;
|
||||
use cookie::Cookie;
|
||||
use embedder_traits::resources::{self, Resource};
|
||||
use fetch::methods::should_be_blocked_due_to_bad_port;
|
||||
use headers_ext::Host;
|
||||
use hosts::replace_host;
|
||||
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
|
||||
use http::uri::Authority;
|
||||
use http_loader::HttpState;
|
||||
use hyper::header::{Headers, Host, SetCookie};
|
||||
use ipc_channel::ipc::{IpcReceiver, IpcSender};
|
||||
use net_traits::{CookieSource, MessageData};
|
||||
use net_traits::{WebSocketDomAction, WebSocketNetworkEvent};
|
||||
|
@ -69,17 +71,19 @@ impl<'a> Handler for Client<'a> {
|
|||
}
|
||||
|
||||
fn on_open(&mut self, shake: Handshake) -> WebSocketResult<()> {
|
||||
let mut headers = Headers::new();
|
||||
let mut headers = HeaderMap::new();
|
||||
for &(ref name, ref value) in shake.response.headers().iter() {
|
||||
headers.set_raw(name.clone(), vec![value.clone()]);
|
||||
let name = HeaderName::from_bytes(name.as_bytes()).unwrap();
|
||||
let value = HeaderValue::from_bytes(&value).unwrap();
|
||||
|
||||
headers.insert(name, value);
|
||||
}
|
||||
|
||||
if let Some(cookies) = headers.get::<SetCookie>() {
|
||||
let mut jar = self.http_state.cookie_jar.write().unwrap();
|
||||
for cookie in &**cookies {
|
||||
if let Some(cookie) =
|
||||
Cookie::from_cookie_string(cookie.clone(), self.resource_url, CookieSource::HTTP)
|
||||
{
|
||||
let mut jar = self.http_state.cookie_jar.write().unwrap();
|
||||
// TODO(eijebong): Replace thise once typed headers settled on a cookie impl
|
||||
for cookie in headers.get_all(header::SET_COOKIE) {
|
||||
if let Ok(s) = cookie.to_str() {
|
||||
if let Some(cookie) = Cookie::from_cookie_string(s.into(), self.resource_url, CookieSource::HTTP) {
|
||||
jar.push(cookie, self.resource_url, CookieSource::HTTP);
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +148,7 @@ impl<'a> Handler for Client<'a> {
|
|||
WebSocketErrorKind::Protocol,
|
||||
format!("Unable to parse domain from {}. Needed for SSL.", url),
|
||||
))?;
|
||||
let connector = create_ssl_connector(&certs);
|
||||
let connector = create_ssl_connector_builder(&certs).build();
|
||||
connector.connect(domain, stream).map_err(WebSocketError::from)
|
||||
}
|
||||
|
||||
|
@ -180,10 +184,11 @@ pub fn init(
|
|||
let mut net_url = req_init.url.clone().into_url();
|
||||
net_url.set_host(Some(&host)).unwrap();
|
||||
|
||||
let host = Host {
|
||||
hostname: req_init.url.host_str().unwrap().to_owned(),
|
||||
port: req_init.url.port_or_known_default(),
|
||||
};
|
||||
let host = Host::from(
|
||||
format!("{}{}", req_init.url.host_str().unwrap(),
|
||||
req_init.url.port_or_known_default().map(|v| format!(":{}", v)).unwrap_or("".into())
|
||||
).parse::<Authority>().unwrap()
|
||||
);
|
||||
|
||||
let client = Client {
|
||||
origin: &req_init.origin.ascii_serialization(),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue