From 610ef40105e944f4e83e3b42042784b4d7d17331 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 2 Aug 2015 09:14:50 +1000 Subject: [PATCH 01/43] Refactors http_loader::load to be synchronous w/ an async wrapper This simplifies the arguments that are passed in and should make testing errors/responses easier once they're mocked servo/servo#6727 --- components/net/http_loader.rs | 144 ++++++++++++++++++++-------------- 1 file changed, 85 insertions(+), 59 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index e59ad1521bf..0e013f536a3 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -11,6 +11,9 @@ use net_traits::{ControlMsg, CookieSource, LoadData, Metadata, LoadConsumer, Inc use resource_task::{start_sending_opt, start_sending_sniffed_opt}; use file_loader; +use ipc_channel::ipc::{self, IpcSender}; +use log; +use std::collections::HashSet; use flate2::read::{DeflateDecoder, GzDecoder}; use hyper::Error as HttpError; use hyper::client::Request; @@ -37,14 +40,15 @@ use util::task::spawn_named; use std::borrow::ToOwned; use std::boxed::FnBox; use uuid; +use std::fs::File; pub fn factory(resource_mgr_chan: IpcSender, devtools_chan: Option>, hsts_list: Arc>) -> Box) + Send> { - box move |load_data: LoadData, senders, classifier| { + box move |load_data, senders, classifier| { spawn_named(format!("http_loader for {}", load_data.url.serialize()), - move || load(load_data, senders, classifier, resource_mgr_chan, devtools_chan, hsts_list)) + move || load_for_consumer(load_data, senders, classifier, resource_mgr_chan, devtools_chan, hsts_list)) } } @@ -85,12 +89,54 @@ fn request_must_be_secured(hsts_list: &HSTSList, url: &Url) -> bool { } } -fn load(mut load_data: LoadData, +fn inner_url(url: &Url) -> Url { + let inner_url = url.non_relative_scheme_data().unwrap(); + Url::parse(inner_url).unwrap() +} + +fn load_for_consumer(load_data: LoadData, start_chan: LoadConsumer, classifier: Arc, resource_mgr_chan: IpcSender, devtools_chan: Option>, hsts_list: Arc>) { + match load(load_data, resource_mgr_chan, devtools_chan, hsts_list) { + Err(LoadError::UnsupportedScheme(url)) => { + let s = format!("{} request, but we don't support that scheme", &*url.scheme); + send_error(url, s, start_chan) + } + Err(LoadError::Client(url, e)) => { + send_error(url, e, start_chan) + } + Err(LoadError::MaxRedirects(url)) => { + send_error(url, "too many redirects".to_string(), start_chan) + } + Err(LoadError::Cors(url, msg)) | + Err(LoadError::InvalidRedirect(url, msg)) | + Err(LoadError::Decoding(url, msg)) | + Err(LoadError::InvalidFile(url, msg)) => { + send_error(url, msg, start_chan) + } + Ok((mut response_reader, metadata)) => { + send_data(&mut response_reader, start_chan, metadata, classifier); + } + } +} + +enum LoadError { + UnsupportedScheme(Url), + Client(Url, String), + Cors(Url, String), + InvalidRedirect(Url, String), + Decoding(Url, String), + InvalidFile(Url, String), + MaxRedirects(Url) +} + +fn load(mut load_data: LoadData, + resource_mgr_chan: IpcSender, + devtools_chan: Option>, + hsts_list: Arc>) -> Result<(Box, Metadata), LoadError> { // FIXME: At the time of writing this FIXME, servo didn't have any central // location for configuration. If you're reading this and such a // repository DOES exist, please update this constant to use it. @@ -109,17 +155,13 @@ fn load(mut load_data: LoadData, // the source rather than rendering the contents of the URL. let viewing_source = url.scheme == "view-source"; if viewing_source { - let inner_url = load_data.url.non_relative_scheme_data().unwrap(); - doc_url = Url::parse(inner_url).unwrap(); - url = replace_hosts(&doc_url); - match &*url.scheme { - "http" | "https" => {} - _ => { - let s = format!("The {} scheme with view-source is not supported", url.scheme); - send_error(url, s, start_chan); - return; - } - }; + let inner_url = replace_hosts(&inner_url(&load_data.url)); + doc_url = inner_url.clone(); + if &*inner_url.scheme != "http" && &*inner_url.scheme != "https" { + return Err(LoadError::UnsupportedScheme(inner_url)); + } else { + url = inner_url; + } } // Loop to handle redirects. @@ -132,21 +174,16 @@ fn load(mut load_data: LoadData, } if iters > max_redirects { - send_error(url, "too many redirects".to_string(), start_chan); - return; + return Err(LoadError::MaxRedirects(url)); } - match &*url.scheme { - "http" | "https" => {} - _ => { - let s = format!("{} request, but we don't support that scheme", url.scheme); - send_error(url, s, start_chan); - return; - } + if &*url.scheme != "http" && &*url.scheme != "https" { + return Err(LoadError::UnsupportedScheme(url)); } info!("requesting {}", url.serialize()); + // TODO - Is no ssl still needed? let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ function: \"SSL3_GET_SERVER_CERTIFICATE\", \ reason: \"certificate verify failed\" }]))"; @@ -171,14 +208,15 @@ reason: \"certificate verify failed\" }]))"; ) => { let mut image = resources_dir_path(); image.push("badcert.html"); - let load_data = LoadData::new(Url::from_file_path(&*image).unwrap(), None); - file_loader::factory(load_data, start_chan, classifier); - return; + let file_url = Url::from_file_path(&*image).unwrap(); + + match File::open(image.clone()) { + Ok(f) => return Ok((Box::new(f), Metadata::default(file_url))), + Err(_) => return Err(LoadError::InvalidFile(file_url, image.to_str().unwrap().to_string())) + } }, Err(e) => { - println!("{:?}", e); - send_error(url, e.description().to_string(), start_chan); - return; + return Err(LoadError::Client(url, e.description().to_string())); } }; @@ -237,17 +275,17 @@ reason: \"certificate verify failed\" }]))"; let writer = match load_data.data { Some(ref data) if iters == 1 => { req.headers_mut().set(ContentLength(data.len() as u64)); + let mut writer = match req.start() { Ok(w) => w, Err(e) => { - send_error(url, e.description().to_string(), start_chan); - return; + return Err(LoadError::Client(url, e.description().to_string())); } }; + match writer.write_all(&*data) { Err(e) => { - send_error(url, e.description().to_string(), start_chan); - return; + return Err(LoadError::Client(url, e.description().to_string())); } _ => {} }; @@ -261,8 +299,7 @@ reason: \"certificate verify failed\" }]))"; match req.start() { Ok(w) => w, Err(e) => { - send_error(url, e.description().to_string(), start_chan); - return; + return Err(LoadError::Client(url, e.description().to_string())); } } } @@ -281,11 +318,10 @@ reason: \"certificate verify failed\" }]))"; net_event))).unwrap(); } - let mut response = match writer.send() { + let response = match writer.send() { Ok(r) => r, Err(e) => { - send_error(url, e.description().to_string(), start_chan); - return; + return Err(LoadError::Client(url, e.description().to_string())); } }; @@ -337,11 +373,7 @@ reason: \"certificate verify failed\" }]))"; match load_data.cors { Some(ref c) => { if c.preflight { - // The preflight lied - send_error(url, - "Preflight fetch inconsistent with main fetch".to_string(), - start_chan); - return; + return Err(LoadError::Cors(url, "Preflight fetch inconsistent with main fetch".to_string())); } else { // XXXManishearth There are some CORS-related steps here, // but they don't seem necessary until credentials are implemented @@ -352,8 +384,7 @@ reason: \"certificate verify failed\" }]))"; let new_doc_url = match UrlParser::new().base_url(&doc_url).parse(&new_url) { Ok(u) => u, Err(e) => { - send_error(doc_url, e.to_string(), start_chan); - return; + return Err(LoadError::InvalidRedirect(doc_url, e.to_string())); } }; info!("redirecting to {}", new_doc_url); @@ -368,9 +399,8 @@ reason: \"certificate verify failed\" }]))"; load_data.method = Method::Get; } - if redirected_to.contains(&doc_url) { - send_error(doc_url, "redirect loop".to_string(), start_chan); - return; + if redirected_to.contains(&url) { + return Err(LoadError::InvalidRedirect(doc_url, "redirect loop".to_string())); } redirected_to.insert(doc_url.clone()); @@ -384,7 +414,7 @@ reason: \"certificate verify failed\" }]))"; if viewing_source { adjusted_headers.set(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec![]))); } - let mut metadata: Metadata = Metadata::default(doc_url); + let mut metadata: Metadata = Metadata::default(doc_url.clone()); metadata.set_content_type(match adjusted_headers.get() { Some(&ContentType(ref mime)) => Some(mime), None => None @@ -422,26 +452,22 @@ reason: \"certificate verify failed\" }]))"; if encoding == "gzip" { let result = GzDecoder::new(response); match result { - Ok(mut response_decoding) => { - send_data(&mut response_decoding, start_chan, metadata, classifier); + Ok(response_decoding) => { + return Ok((Box::new(response_decoding), metadata)); } Err(err) => { - send_error(metadata.final_url, err.to_string(), start_chan); - return; + return Err(LoadError::Decoding(metadata.final_url, err.to_string())); } } } else if encoding == "deflate" { - let mut response_decoding = DeflateDecoder::new(response); - send_data(&mut response_decoding, start_chan, metadata, classifier); + let response_decoding = DeflateDecoder::new(response); + return Ok((Box::new(response_decoding), metadata)); } }, None => { - send_data(&mut response, start_chan, metadata, classifier); + return Ok((Box::new(response), metadata)); } } - - // We didn't get redirected. - break; } } From c8c36f44901a3c1fa0b8049c86f2d8161526cae0 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 2 Aug 2015 11:44:13 +1000 Subject: [PATCH 02/43] Injects the network connector as a dependency into http_loader::load servo/servo#6727 --- components/net/http_loader.rs | 37 ++++++++++++++++++++++------------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 0e013f536a3..c488fab23ea 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -21,7 +21,7 @@ use hyper::header::StrictTransportSecurity; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, Quality, QualityItem}; use hyper::method::Method; use hyper::mime::{Mime, TopLevel, SubLevel}; -use hyper::net::{HttpConnector, HttpsConnector, Openssl}; +use hyper::net::{HttpStream, HttpConnector, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; use hyper::status::{StatusCode, StatusClass}; use ipc_channel::ipc::{self, IpcSender}; use log; @@ -100,7 +100,21 @@ fn load_for_consumer(load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, hsts_list: Arc>) { - match load(load_data, resource_mgr_chan, devtools_chan, hsts_list) { + + let connector = { + // TODO: Is this still necessary? The type system is making it really hard to support both + // connectors. SSL is working, so it's not clear to me if the original rationale still + // stands + // if opts::get().nossl { + // &HttpConnector + let mut context = SslContext::new(SslMethod::Sslv23).unwrap(); + context.set_verify(SSL_VERIFY_PEER, None); + context.set_CA_file(&resources_dir_path().join("certs")).unwrap(); + + &HttpsConnector::new(Openssl { context: Arc::new(context) }) + }; + + match load(load_data, resource_mgr_chan, devtools_chan, hsts_list, connector) { Err(LoadError::UnsupportedScheme(url)) => { let s = format!("{} request, but we don't support that scheme", &*url.scheme); send_error(url, s, start_chan) @@ -133,10 +147,14 @@ enum LoadError { MaxRedirects(Url) } -fn load(mut load_data: LoadData, +fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, - hsts_list: Arc>) -> Result<(Box, Metadata), LoadError> { + hsts_list: Arc>, + connector: &C) -> Result<(Box, Metadata), LoadError> where + C: NetworkConnector, + S: Into> { + // FIXME: At the time of writing this FIXME, servo didn't have any central // location for configuration. If you're reading this and such a // repository DOES exist, please update this constant to use it. @@ -183,20 +201,11 @@ fn load(mut load_data: LoadData, info!("requesting {}", url.serialize()); - // TODO - Is no ssl still needed? let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ function: \"SSL3_GET_SERVER_CERTIFICATE\", \ reason: \"certificate verify failed\" }]))"; - let req = if opts::get().nossl { - Request::with_connector(load_data.method.clone(), url.clone(), &HttpConnector) - } else { - let mut context = SslContext::new(SslMethod::Sslv23).unwrap(); - context.set_verify(SSL_VERIFY_PEER, None); - context.set_CA_file(&resources_dir_path().join("certs")).unwrap(); - Request::with_connector(load_data.method.clone(), url.clone(), - &HttpsConnector::new(Openssl { context: Arc::new(context) })) - }; + let req = Request::with_connector(load_data.method.clone(), url.clone(), connector); let mut req = match req { Ok(req) => req, From 9ac250c62c8c8dd97b03c77ea00f5f819e5731d4 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 2 Aug 2015 13:53:39 +1000 Subject: [PATCH 03/43] Abstracts out initial client connection and SSL errors (which I don't think are working?) servo/servo#6727 --- components/net/http_loader.rs | 99 +++++++++++++++++++++-------------- 1 file changed, 60 insertions(+), 39 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index c488fab23ea..177d27382d1 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -9,6 +9,8 @@ use net_traits::ProgressMsg::{Payload, Done}; use net_traits::hosts::replace_hosts; use net_traits::{ControlMsg, CookieSource, LoadData, Metadata, LoadConsumer, IncludeSubdomains}; use resource_task::{start_sending_opt, start_sending_sniffed_opt}; +use hsts::{HSTSList, secure_url}; +use file_loader; use file_loader; use ipc_channel::ipc::{self, IpcSender}; @@ -21,7 +23,7 @@ use hyper::header::StrictTransportSecurity; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, Quality, QualityItem}; use hyper::method::Method; use hyper::mime::{Mime, TopLevel, SubLevel}; -use hyper::net::{HttpStream, HttpConnector, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; +use hyper::net::{Fresh, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; use hyper::status::{StatusCode, StatusClass}; use ipc_channel::ipc::{self, IpcSender}; use log; @@ -32,6 +34,8 @@ use std::io::{self, Read, Write}; use std::sync::Arc; use std::sync::Mutex; use std::sync::mpsc::{Sender, channel}; +use util::task::spawn_named; +use util::resource_files::resources_dir_path; use url::{Url, UrlParser}; use util::opts; use util::resource_files::resources_dir_path; @@ -104,7 +108,7 @@ fn load_for_consumer(load_data: LoadData, let connector = { // TODO: Is this still necessary? The type system is making it really hard to support both // connectors. SSL is working, so it's not clear to me if the original rationale still - // stands + // stands? // if opts::get().nossl { // &HttpConnector let mut context = SslContext::new(SslMethod::Sslv23).unwrap(); @@ -119,7 +123,7 @@ fn load_for_consumer(load_data: LoadData, let s = format!("{} request, but we don't support that scheme", &*url.scheme); send_error(url, s, start_chan) } - Err(LoadError::Client(url, e)) => { + Err(LoadError::Connection(url, e)) => { send_error(url, e, start_chan) } Err(LoadError::MaxRedirects(url)) => { @@ -127,23 +131,66 @@ fn load_for_consumer(load_data: LoadData, } Err(LoadError::Cors(url, msg)) | Err(LoadError::InvalidRedirect(url, msg)) | - Err(LoadError::Decoding(url, msg)) | - Err(LoadError::InvalidFile(url, msg)) => { + Err(LoadError::Decoding(url, msg)) => { send_error(url, msg, start_chan) } + Err(LoadError::Ssl(url, msg)) => { + info!("ssl validation error {}, '{}'", url.serialize(), msg); + + let mut image = resources_dir_path(); + image.push("badcert.html"); + let load_data = LoadData::new(Url::from_file_path(&*image).unwrap(), None); + + file_loader::factory(load_data, start_chan, classifier) + + } Ok((mut response_reader, metadata)) => { - send_data(&mut response_reader, start_chan, metadata, classifier); + send_data(&mut response_reader, start_chan, metadata, classifier) + } + } +} + +#[inline(always)] +fn connect(url: Url, + method: Method, + connector: &C) -> Result, LoadError> where + C: NetworkConnector, + S: Into> { + let connection = Request::with_connector(method, url.clone(), connector); + + let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ +function: \"SSL3_GET_SERVER_CERTIFICATE\", \ +reason: \"certificate verify failed\" }]))"; + + match connection { + Ok(req) => Ok(req), + + Err(HttpError::Io(ref io_error)) if ( + io_error.kind() == io::ErrorKind::Other && + io_error.description() == "Error in OpenSSL" && + // FIXME: This incredibly hacky. Make it more robust, and at least test it. + format!("{:?}", io_error.cause()) == ssl_err_string + ) => { + Err( + LoadError::Ssl( + url.clone(), + format!("ssl error {:?}: {:?} {:?}", io_error.kind(), io_error.description(), io_error.cause()) + ) + ) + }, + Err(e) => { + Err(LoadError::Connection(url, e.description().to_string())) } } } enum LoadError { UnsupportedScheme(Url), - Client(Url, String), + Connection(Url, String), Cors(Url, String), + Ssl(Url, String), InvalidRedirect(Url, String), Decoding(Url, String), - InvalidFile(Url, String), MaxRedirects(Url) } @@ -201,33 +248,7 @@ fn load(mut load_data: LoadData, info!("requesting {}", url.serialize()); - let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ -function: \"SSL3_GET_SERVER_CERTIFICATE\", \ -reason: \"certificate verify failed\" }]))"; - - let req = Request::with_connector(load_data.method.clone(), url.clone(), connector); - - let mut req = match req { - Ok(req) => req, - Err(HttpError::Io(ref io_error)) if ( - io_error.kind() == io::ErrorKind::Other && - io_error.description() == "Error in OpenSSL" && - // FIXME: This incredibly hacky. Make it more robust, and at least test it. - format!("{:?}", io_error.cause()) == ssl_err_string - ) => { - let mut image = resources_dir_path(); - image.push("badcert.html"); - let file_url = Url::from_file_path(&*image).unwrap(); - - match File::open(image.clone()) { - Ok(f) => return Ok((Box::new(f), Metadata::default(file_url))), - Err(_) => return Err(LoadError::InvalidFile(file_url, image.to_str().unwrap().to_string())) - } - }, - Err(e) => { - return Err(LoadError::Client(url, e.description().to_string())); - } - }; + let mut req = try!(connect(url.clone(), load_data.method.clone(), connector)); //Ensure that the host header is set from the original url let host = Host { @@ -288,13 +309,13 @@ reason: \"certificate verify failed\" }]))"; let mut writer = match req.start() { Ok(w) => w, Err(e) => { - return Err(LoadError::Client(url, e.description().to_string())); + return Err(LoadError::Connection(url, e.description().to_string())); } }; match writer.write_all(&*data) { Err(e) => { - return Err(LoadError::Client(url, e.description().to_string())); + return Err(LoadError::Connection(url, e.description().to_string())); } _ => {} }; @@ -308,7 +329,7 @@ reason: \"certificate verify failed\" }]))"; match req.start() { Ok(w) => w, Err(e) => { - return Err(LoadError::Client(url, e.description().to_string())); + return Err(LoadError::Connection(url, e.description().to_string())); } } } @@ -330,7 +351,7 @@ reason: \"certificate verify failed\" }]))"; let response = match writer.send() { Ok(r) => r, Err(e) => { - return Err(LoadError::Client(url, e.description().to_string())); + return Err(LoadError::Connection(url, e.description().to_string())); } }; From 67aa11323b0da458551a4c8b0ad71f894ec7e2ed Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 2 Aug 2015 14:35:27 +1000 Subject: [PATCH 04/43] Adds simple POC unit test for load servo/servo#6727 --- components/net/http_loader.rs | 28 ++++-- tests/unit/net/http_loader.rs | 179 ++++++++++++++++++++++++++++++++++ tests/unit/net/lib.rs | 2 + 3 files changed, 200 insertions(+), 9 deletions(-) create mode 100644 tests/unit/net/http_loader.rs diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 177d27382d1..95578ba2774 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -184,7 +184,7 @@ reason: \"certificate verify failed\" }]))"; } } -enum LoadError { +pub enum LoadError { UnsupportedScheme(Url), Connection(Url, String), Cors(Url, String), @@ -220,13 +220,8 @@ fn load(mut load_data: LoadData, // the source rather than rendering the contents of the URL. let viewing_source = url.scheme == "view-source"; if viewing_source { - let inner_url = replace_hosts(&inner_url(&load_data.url)); - doc_url = inner_url.clone(); - if &*inner_url.scheme != "http" && &*inner_url.scheme != "https" { - return Err(LoadError::UnsupportedScheme(inner_url)); - } else { - url = inner_url; - } + url = inner_url(&load_data.url); + doc_url = url.clone(); } // Loop to handle redirects. @@ -270,6 +265,8 @@ fn load(mut load_data: LoadData, req.headers_mut().set(host); + + // --- Set default accept header if !req.headers().has::() { let accept = Accept(vec![ qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])), @@ -280,6 +277,7 @@ fn load(mut load_data: LoadData, req.headers_mut().set(accept); } + // --- Fetch cookies let (tx, rx) = ipc::channel().unwrap(); resource_mgr_chan.send(ControlMsg::GetCookiesForUrl(doc_url.clone(), tx, @@ -290,9 +288,11 @@ fn load(mut load_data: LoadData, req.headers_mut().set_raw("Cookie".to_owned(), v); } + // --- Set default accept encoding if !req.headers().has::() { req.headers_mut().set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); } + if log_enabled!(log::LogLevel::Info) { info!("{}", load_data.method); for header in req.headers().iter() { @@ -301,6 +301,7 @@ fn load(mut load_data: LoadData, info!("{:?}", load_data.data); } + // --- Start sending the request // Avoid automatically sending request body if a redirect has occurred. let writer = match load_data.data { Some(ref data) if iters == 1 => { @@ -335,6 +336,7 @@ fn load(mut load_data: LoadData, } }; + // --- Tell devtools we've made a request // Send an HttpRequest message to devtools with a unique request_id // TODO: Do this only if load_data has some pipeline_id, and send the pipeline_id in the message let request_id = uuid::Uuid::new_v4().to_simple_string(); @@ -348,6 +350,7 @@ fn load(mut load_data: LoadData, net_event))).unwrap(); } + // --- Finish writing the request and read the response let response = match writer.send() { Ok(r) => r, Err(e) => { @@ -363,6 +366,7 @@ fn load(mut load_data: LoadData, } } + // --- Update the resource manager that we've gotten a cookie if let Some(cookies) = response.headers.get_raw("set-cookie") { for cookie in cookies { if let Ok(cookies) = String::from_utf8(cookie.clone()) { @@ -395,7 +399,7 @@ fn load(mut load_data: LoadData, } } - + // --- Loop if there's a redirect if response.status.class() == StatusClass::Redirection { match response.headers.get::() { Some(&Location(ref new_url)) => { @@ -441,10 +445,12 @@ fn load(mut load_data: LoadData, } let mut adjusted_headers = response.headers.clone(); + if viewing_source { adjusted_headers.set(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec![]))); } let mut metadata: Metadata = Metadata::default(doc_url.clone()); + metadata.set_content_type(match adjusted_headers.get() { Some(&ContentType(ref mime)) => Some(mime), None => None @@ -453,6 +459,8 @@ fn load(mut load_data: LoadData, metadata.status = Some(response.status_raw().clone()); let mut encoding_str: Option = None; + + //TODO: This is now in hyper, just need to implement //FIXME: Implement Content-Encoding Header https://github.com/hyperium/hyper/issues/391 if let Some(encodings) = response.headers.get_raw("content-encoding") { for encoding in encodings { @@ -465,6 +473,7 @@ fn load(mut load_data: LoadData, } } + // --- Tell devtools that we got a response // Send an HttpResponse message to devtools with the corresponding request_id // TODO: Send this message only if load_data has a pipeline_id that is not None if let Some(ref chan) = devtools_chan { @@ -477,6 +486,7 @@ fn load(mut load_data: LoadData, net_event_response))).unwrap(); } + // --- Stream the results depending on the encoding type. match encoding_str { Some(encoding) => { if encoding == "gzip" { diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs new file mode 100644 index 00000000000..9e27c47d73c --- /dev/null +++ b/tests/unit/net/http_loader.rs @@ -0,0 +1,179 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use net::http_loader::{load, LoadError}; +use hyper::net::{NetworkStream, NetworkConnector}; +use hyper; +use url::Url; +use std::io::{self, Read, Write, Cursor}; +use std::fmt; +use std::net::SocketAddr; +use std::sync::{Arc, Mutex}; +use ipc_channel::ipc; +use net_traits::LoadData; +use net::hsts::HSTSList; +use net::hsts::HSTSEntry; + +#[derive(Clone)] +pub struct MockStream { + pub read: Cursor>, + pub write: Vec, + #[cfg(feature = "timeouts")] + pub read_timeout: Cell>, + #[cfg(feature = "timeouts")] + pub write_timeout: Cell> +} + +impl fmt::Debug for MockStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "MockStream {{ read: {:?}, write: {:?} }}", self.read.get_ref(), self.write) + } +} + +impl PartialEq for MockStream { + fn eq(&self, other: &MockStream) -> bool { + self.read.get_ref() == other.read.get_ref() && self.write == other.write + } +} + +impl MockStream { + pub fn new() -> MockStream { + MockStream::with_input(b"") + } + + #[cfg(not(feature = "timeouts"))] + pub fn with_input(input: &[u8]) -> MockStream { + MockStream { + read: Cursor::new(input.to_vec()), + write: vec![] + } + } + + #[cfg(feature = "timeouts")] + pub fn with_input(input: &[u8]) -> MockStream { + MockStream { + read: Cursor::new(input.to_vec()), + write: vec![], + read_timeout: Cell::new(None), + write_timeout: Cell::new(None), + } + } +} + +impl Read for MockStream { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.read.read(buf) + } +} + +impl Write for MockStream { + fn write(&mut self, msg: &[u8]) -> io::Result { + Write::write(&mut self.write, msg) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +impl NetworkStream for MockStream { + fn peer_addr(&mut self) -> io::Result { + Ok("127.0.0.1:1337".parse().unwrap()) + } + + #[cfg(feature = "timeouts")] + fn set_read_timeout(&self, dur: Option) -> io::Result<()> { + self.read_timeout.set(dur); + Ok(()) + } + + #[cfg(feature = "timeouts")] + fn set_write_timeout(&self, dur: Option) -> io::Result<()> { + self.write_timeout.set(dur); + Ok(()) + } +} + +/// A wrapper around a `MockStream` that allows one to clone it and keep an independent copy to the +/// same underlying stream. +#[derive(Clone)] +pub struct CloneableMockStream { + pub inner: Arc>, +} + +impl Write for CloneableMockStream { + fn write(&mut self, msg: &[u8]) -> io::Result { + self.inner.lock().unwrap().write(msg) + } + + fn flush(&mut self) -> io::Result<()> { + self.inner.lock().unwrap().flush() + } +} + +impl Read for CloneableMockStream { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.inner.lock().unwrap().read(buf) + } +} + +impl NetworkStream for CloneableMockStream { + fn peer_addr(&mut self) -> io::Result { + self.inner.lock().unwrap().peer_addr() + } + + #[cfg(feature = "timeouts")] + fn set_read_timeout(&self, dur: Option) -> io::Result<()> { + self.inner.lock().unwrap().set_read_timeout(dur) + } + + #[cfg(feature = "timeouts")] + fn set_write_timeout(&self, dur: Option) -> io::Result<()> { + self.inner.lock().unwrap().set_write_timeout(dur) + } +} + +impl CloneableMockStream { + pub fn with_stream(stream: MockStream) -> CloneableMockStream { + CloneableMockStream { + inner: Arc::new(Mutex::new(stream)), + } + } +} + +pub struct MockConnector; + +impl NetworkConnector for MockConnector { + type Stream = MockStream; + + fn connect(&self, _host: &str, _port: u16, _scheme: &str) -> hyper::Result { + Ok(MockStream::new()) + } +} + +#[test] +fn test_load_errors_when_scheme_is_not_http_or_https() { + let url = Url::parse("ftp://not-supported").unwrap(); + let (cookies_chan, _) = ipc::channel().unwrap(); + let load_data = LoadData::new(url, None); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + match load(load_data, cookies_chan, None, hsts_list, &MockConnector) { + Err(LoadError::UnsupportedScheme(_)) => {} + _ => panic!("expected ftp scheme to be unsupported") + } +} + +#[test] +fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_https() { + let url = Url::parse("view-source:ftp://not-supported").unwrap(); + let (cookies_chan, _) = ipc::channel().unwrap(); + let load_data = LoadData::new(url, None); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + match load(load_data, cookies_chan, None, hsts_list, &MockConnector) { + Err(LoadError::UnsupportedScheme(_)) => {} + _ => panic!("expected ftp scheme to be unsupported") + } +} diff --git a/tests/unit/net/lib.rs b/tests/unit/net/lib.rs index 3f737fc1783..203a554a5eb 100644 --- a/tests/unit/net/lib.rs +++ b/tests/unit/net/lib.rs @@ -10,9 +10,11 @@ extern crate net_traits; extern crate url; extern crate util; extern crate time; +extern crate hyper; #[cfg(test)] mod cookie; #[cfg(test)] mod data_loader; #[cfg(test)] mod mime_classifier; #[cfg(test)] mod resource_task; #[cfg(test)] mod hsts; +#[cfg(test)] mod http_loader; From 6cba33a50b19b0f1a3e7a41a53ed1fcaf0493da8 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 7 Aug 2015 11:10:41 +1000 Subject: [PATCH 05/43] Abstracts the hyper connection to a servo HttpRequester trait servo/servo#6727 --- components/net/http_loader.rs | 118 ++++++++++++++----------- tests/unit/net/http_loader.rs | 156 +++------------------------------- 2 files changed, 82 insertions(+), 192 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 95578ba2774..5f3a1ebe330 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -21,6 +21,10 @@ use hyper::Error as HttpError; use hyper::client::Request; use hyper::header::StrictTransportSecurity; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, Quality, QualityItem}; +use hyper::client::{Request, Response}; +use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, StrictTransportSecurity}; +use hyper::header::{Quality, QualityItem}; +use hyper::Error as HttpError; use hyper::method::Method; use hyper::mime::{Mime, TopLevel, SubLevel}; use hyper::net::{Fresh, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; @@ -105,20 +109,9 @@ fn load_for_consumer(load_data: LoadData, devtools_chan: Option>, hsts_list: Arc>) { - let connector = { - // TODO: Is this still necessary? The type system is making it really hard to support both - // connectors. SSL is working, so it's not clear to me if the original rationale still - // stands? - // if opts::get().nossl { - // &HttpConnector - let mut context = SslContext::new(SslMethod::Sslv23).unwrap(); - context.set_verify(SSL_VERIFY_PEER, None); - context.set_CA_file(&resources_dir_path().join("certs")).unwrap(); + let requester = NetworkHttpRequester::new(); - &HttpsConnector::new(Openssl { context: Arc::new(context) }) - }; - - match load(load_data, resource_mgr_chan, devtools_chan, hsts_list, connector) { + match load(load_data, resource_mgr_chan, devtools_chan, hsts_list, &requester) { Err(LoadError::UnsupportedScheme(url)) => { let s = format!("{} request, but we don't support that scheme", &*url.scheme); send_error(url, s, start_chan) @@ -150,40 +143,67 @@ fn load_for_consumer(load_data: LoadData, } } -#[inline(always)] -fn connect(url: Url, - method: Method, - connector: &C) -> Result, LoadError> where - C: NetworkConnector, - S: Into> { - let connection = Request::with_connector(method, url.clone(), connector); +struct NetworkHttpRequester { + connector: HttpsConnector +} - let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ -function: \"SSL3_GET_SERVER_CERTIFICATE\", \ -reason: \"certificate verify failed\" }]))"; +impl NetworkHttpRequester { + fn new() -> NetworkHttpRequester { + // TODO: Is this still necessary? The type system is making it really hard to support both + // connectors. SSL is working, so it's not clear to me if the original rationale still + // stands? + // if opts::get().nossl { + // &HttpConnector + let mut context = SslContext::new(SslMethod::Sslv23).unwrap(); + context.set_verify(SSL_VERIFY_PEER, None); + context.set_CA_file(&resources_dir_path().join("certs")).unwrap(); - match connection { - Ok(req) => Ok(req), - - Err(HttpError::Io(ref io_error)) if ( - io_error.kind() == io::ErrorKind::Other && - io_error.description() == "Error in OpenSSL" && - // FIXME: This incredibly hacky. Make it more robust, and at least test it. - format!("{:?}", io_error.cause()) == ssl_err_string - ) => { - Err( - LoadError::Ssl( - url.clone(), - format!("ssl error {:?}: {:?} {:?}", io_error.kind(), io_error.description(), io_error.cause()) - ) - ) - }, - Err(e) => { - Err(LoadError::Connection(url, e.description().to_string())) + NetworkHttpRequester { + connector: HttpsConnector::new(Openssl { context: Arc::new(context) }) } } } +impl HttpRequester for NetworkHttpRequester { + fn send(&self, request: Request) -> Result { + unimplemented!() + } + + fn build(&self, url: Url, method: Method) -> Result, LoadError> { + let connection = Request::with_connector(method, url.clone(), &self.connector); + + let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ + function: \"SSL3_GET_SERVER_CERTIFICATE\", \ + reason: \"certificate verify failed\" }]))"; + + match connection { + Ok(req) => Ok(req), + + Err(HttpError::Io(ref io_error)) if ( + io_error.kind() == io::ErrorKind::Other && + io_error.description() == "Error in OpenSSL" && + // FIXME: This incredibly hacky. Make it more robust, and at least test it. + format!("{:?}", io_error.cause()) == ssl_err_string + ) => { + Err( + LoadError::Ssl( + url.clone(), + format!("ssl error {:?}: {:?} {:?}", io_error.kind(), io_error.description(), io_error.cause()) + ) + ) + }, + Err(e) => { + Err(LoadError::Connection(url, e.description().to_string())) + } + } + } +} + +pub trait HttpRequester { + fn build(&self, url: Url, method: Method) -> Result, LoadError>; + fn send(&self, request: Request) -> Result; +} + pub enum LoadError { UnsupportedScheme(Url), Connection(Url, String), @@ -194,14 +214,12 @@ pub enum LoadError { MaxRedirects(Url) } -fn load(mut load_data: LoadData, - resource_mgr_chan: IpcSender, - devtools_chan: Option>, - hsts_list: Arc>, - connector: &C) -> Result<(Box, Metadata), LoadError> where - C: NetworkConnector, - S: Into> { - +pub fn load(mut load_data: LoadData, + resource_mgr_chan: IpcSender, + devtools_chan: Option>, + hsts_list: Arc>, + requester: &HttpRequester) + -> Result<(Box, Metadata), LoadError> { // FIXME: At the time of writing this FIXME, servo didn't have any central // location for configuration. If you're reading this and such a // repository DOES exist, please update this constant to use it. @@ -243,7 +261,7 @@ fn load(mut load_data: LoadData, info!("requesting {}", url.serialize()); - let mut req = try!(connect(url.clone(), load_data.method.clone(), connector)); + let mut req = try!(requester.build(url.clone(), load_data.method.clone())); //Ensure that the host header is set from the original url let host = Host { diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 9e27c47d73c..2c05aadb52d 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -2,153 +2,25 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -use net::http_loader::{load, LoadError}; -use hyper::net::{NetworkStream, NetworkConnector}; -use hyper; +use net::http_loader::{load, LoadError, HttpRequester}; use url::Url; -use std::io::{self, Read, Write, Cursor}; -use std::fmt; -use std::net::SocketAddr; use std::sync::{Arc, Mutex}; use ipc_channel::ipc; use net_traits::LoadData; use net::hsts::HSTSList; -use net::hsts::HSTSEntry; +use hyper::client::{Request, Response}; +use hyper::net::Fresh; +use hyper::method::Method; -#[derive(Clone)] -pub struct MockStream { - pub read: Cursor>, - pub write: Vec, - #[cfg(feature = "timeouts")] - pub read_timeout: Cell>, - #[cfg(feature = "timeouts")] - pub write_timeout: Cell> -} +struct MockHttpRequester; -impl fmt::Debug for MockStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "MockStream {{ read: {:?}, write: {:?} }}", self.read.get_ref(), self.write) - } -} - -impl PartialEq for MockStream { - fn eq(&self, other: &MockStream) -> bool { - self.read.get_ref() == other.read.get_ref() && self.write == other.write - } -} - -impl MockStream { - pub fn new() -> MockStream { - MockStream::with_input(b"") +impl HttpRequester for MockHttpRequester { + fn build(&self, url: Url, _: Method) -> Result, LoadError> { + Err(LoadError::Connection(url.clone(), "shouldn't connect".to_string())) } - #[cfg(not(feature = "timeouts"))] - pub fn with_input(input: &[u8]) -> MockStream { - MockStream { - read: Cursor::new(input.to_vec()), - write: vec![] - } - } - - #[cfg(feature = "timeouts")] - pub fn with_input(input: &[u8]) -> MockStream { - MockStream { - read: Cursor::new(input.to_vec()), - write: vec![], - read_timeout: Cell::new(None), - write_timeout: Cell::new(None), - } - } -} - -impl Read for MockStream { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - self.read.read(buf) - } -} - -impl Write for MockStream { - fn write(&mut self, msg: &[u8]) -> io::Result { - Write::write(&mut self.write, msg) - } - - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -impl NetworkStream for MockStream { - fn peer_addr(&mut self) -> io::Result { - Ok("127.0.0.1:1337".parse().unwrap()) - } - - #[cfg(feature = "timeouts")] - fn set_read_timeout(&self, dur: Option) -> io::Result<()> { - self.read_timeout.set(dur); - Ok(()) - } - - #[cfg(feature = "timeouts")] - fn set_write_timeout(&self, dur: Option) -> io::Result<()> { - self.write_timeout.set(dur); - Ok(()) - } -} - -/// A wrapper around a `MockStream` that allows one to clone it and keep an independent copy to the -/// same underlying stream. -#[derive(Clone)] -pub struct CloneableMockStream { - pub inner: Arc>, -} - -impl Write for CloneableMockStream { - fn write(&mut self, msg: &[u8]) -> io::Result { - self.inner.lock().unwrap().write(msg) - } - - fn flush(&mut self) -> io::Result<()> { - self.inner.lock().unwrap().flush() - } -} - -impl Read for CloneableMockStream { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - self.inner.lock().unwrap().read(buf) - } -} - -impl NetworkStream for CloneableMockStream { - fn peer_addr(&mut self) -> io::Result { - self.inner.lock().unwrap().peer_addr() - } - - #[cfg(feature = "timeouts")] - fn set_read_timeout(&self, dur: Option) -> io::Result<()> { - self.inner.lock().unwrap().set_read_timeout(dur) - } - - #[cfg(feature = "timeouts")] - fn set_write_timeout(&self, dur: Option) -> io::Result<()> { - self.inner.lock().unwrap().set_write_timeout(dur) - } -} - -impl CloneableMockStream { - pub fn with_stream(stream: MockStream) -> CloneableMockStream { - CloneableMockStream { - inner: Arc::new(Mutex::new(stream)), - } - } -} - -pub struct MockConnector; - -impl NetworkConnector for MockConnector { - type Stream = MockStream; - - fn connect(&self, _host: &str, _port: u16, _scheme: &str) -> hyper::Result { - Ok(MockStream::new()) + fn send(&self, _: Request) -> Result { + Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), "shouldn't connect".to_string())) } } @@ -156,10 +28,10 @@ impl NetworkConnector for MockConnector { fn test_load_errors_when_scheme_is_not_http_or_https() { let url = Url::parse("ftp://not-supported").unwrap(); let (cookies_chan, _) = ipc::channel().unwrap(); - let load_data = LoadData::new(url, None); + let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, cookies_chan, None, hsts_list, &MockConnector) { + match load(load_data, cookies_chan, None, hsts_list, &MockHttpRequester) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } @@ -169,10 +41,10 @@ fn test_load_errors_when_scheme_is_not_http_or_https() { fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_https() { let url = Url::parse("view-source:ftp://not-supported").unwrap(); let (cookies_chan, _) = ipc::channel().unwrap(); - let load_data = LoadData::new(url, None); + let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, cookies_chan, None, hsts_list, &MockConnector) { + match load(load_data, cookies_chan, None, hsts_list, &MockHttpRequester) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } From 7633cd54c2c5d75109043a305c1ce325fdb74a16 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 9 Aug 2015 14:42:31 +1000 Subject: [PATCH 06/43] Abstract everything but the response from hyper Because we're using unsized types not for requesting, there's not a satisfactory way of doing this without boxing the request... Once unsized stuff lands in rust 1.2/1.3(???) then this should be implemented with Rc's instead of Box's. For the time being I'm not sure what else to do. servo/servo#6727 --- components/net/http_loader.rs | 92 ++++++++++++++++++++--------------- tests/unit/net/http_loader.rs | 9 ++-- 2 files changed, 57 insertions(+), 44 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 5f3a1ebe330..190ae0f98a7 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -23,11 +23,11 @@ use hyper::header::StrictTransportSecurity; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, Quality, QualityItem}; use hyper::client::{Request, Response}; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, StrictTransportSecurity}; -use hyper::header::{Quality, QualityItem}; +use hyper::header::{Quality, QualityItem, Headers}; use hyper::Error as HttpError; use hyper::method::Method; use hyper::mime::{Mime, TopLevel, SubLevel}; -use hyper::net::{Fresh, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; +use hyper::net::{Fresh, Streaming, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; use hyper::status::{StatusCode, StatusClass}; use ipc_channel::ipc::{self, IpcSender}; use log; @@ -164,20 +164,52 @@ impl NetworkHttpRequester { } } -impl HttpRequester for NetworkHttpRequester { - fn send(&self, request: Request) -> Result { - unimplemented!() +pub trait HttpRequest { + fn headers(&self) -> &Headers; + fn headers_mut(&mut self) -> &mut Headers; + fn send(self: Box) -> Result; +} + +struct NetworkHttpRequest { + fresh: Request +} + +impl HttpRequest for NetworkHttpRequest { + fn headers(&self) -> &Headers { + self.fresh.headers() } - fn build(&self, url: Url, method: Method) -> Result, LoadError> { + fn headers_mut(&mut self) -> &mut Headers { + self.fresh.headers_mut() + } + + fn send(self: Box) -> Result { + let connected = match self.fresh.start() { + Ok(streaming) => streaming, + Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + }; + + match connected.send() { + Ok(w) => Ok(w), + Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + } + } +} + +impl HttpRequester for NetworkHttpRequester { + fn send(&self, request: Box) -> Result { + request.send() + } + + fn build(&self, url: Url, method: Method) -> Result, LoadError> { let connection = Request::with_connector(method, url.clone(), &self.connector); let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ function: \"SSL3_GET_SERVER_CERTIFICATE\", \ reason: \"certificate verify failed\" }]))"; - match connection { - Ok(req) => Ok(req), + let request = match connection { + Ok(req) => req, Err(HttpError::Io(ref io_error)) if ( io_error.kind() == io::ErrorKind::Other && @@ -185,7 +217,7 @@ impl HttpRequester for NetworkHttpRequester { // FIXME: This incredibly hacky. Make it more robust, and at least test it. format!("{:?}", io_error.cause()) == ssl_err_string ) => { - Err( + return Err( LoadError::Ssl( url.clone(), format!("ssl error {:?}: {:?} {:?}", io_error.kind(), io_error.description(), io_error.cause()) @@ -193,15 +225,17 @@ impl HttpRequester for NetworkHttpRequester { ) }, Err(e) => { - Err(LoadError::Connection(url, e.description().to_string())) + return Err(LoadError::Connection(url, e.description().to_string())) } - } + }; + + Ok(Box::new(NetworkHttpRequest { fresh: request })) } } pub trait HttpRequester { - fn build(&self, url: Url, method: Method) -> Result, LoadError>; - fn send(&self, request: Request) -> Result; + fn build(&self, url: Url, method: Method) -> Result, LoadError>; + fn send(&self, request: Box) -> Result; } pub enum LoadError { @@ -321,35 +355,23 @@ pub fn load(mut load_data: LoadData, // --- Start sending the request // Avoid automatically sending request body if a redirect has occurred. - let writer = match load_data.data { + let response = match load_data.data { Some(ref data) if iters == 1 => { req.headers_mut().set(ContentLength(data.len() as u64)); - let mut writer = match req.start() { + match requester.send(req) { Ok(w) => w, - Err(e) => { - return Err(LoadError::Connection(url, e.description().to_string())); - } - }; - - match writer.write_all(&*data) { - Err(e) => { - return Err(LoadError::Connection(url, e.description().to_string())); - } - _ => {} - }; - writer + Err(e) => return Err(e) + } }, _ => { match load_data.method { Method::Get | Method::Head => (), _ => req.headers_mut().set(ContentLength(0)) } - match req.start() { + match requester.send(req) { Ok(w) => w, - Err(e) => { - return Err(LoadError::Connection(url, e.description().to_string())); - } + Err(e) => return Err(e) } } }; @@ -368,14 +390,6 @@ pub fn load(mut load_data: LoadData, net_event))).unwrap(); } - // --- Finish writing the request and read the response - let response = match writer.send() { - Ok(r) => r, - Err(e) => { - return Err(LoadError::Connection(url, e.description().to_string())); - } - }; - // Dump headers, but only do the iteration if info!() is enabled. info!("got HTTP response {}, headers:", response.status); if log_enabled!(log::LogLevel::Info) { diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 2c05aadb52d..863700eef53 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -2,24 +2,23 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -use net::http_loader::{load, LoadError, HttpRequester}; +use net::http_loader::{load, LoadError, HttpRequester, HttpRequest}; use url::Url; use std::sync::{Arc, Mutex}; use ipc_channel::ipc; use net_traits::LoadData; use net::hsts::HSTSList; -use hyper::client::{Request, Response}; -use hyper::net::Fresh; +use hyper::client::Response; use hyper::method::Method; struct MockHttpRequester; impl HttpRequester for MockHttpRequester { - fn build(&self, url: Url, _: Method) -> Result, LoadError> { + fn build(&self, url: Url, _: Method) -> Result, LoadError> { Err(LoadError::Connection(url.clone(), "shouldn't connect".to_string())) } - fn send(&self, _: Request) -> Result { + fn send(&self, _: Box) -> Result { Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), "shouldn't connect".to_string())) } } From 81fe5938bf4dc5f3566012cffddfae1bbe87b6e2 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 9 Aug 2015 23:33:43 +1200 Subject: [PATCH 07/43] Removes HttpRequest, adds HttpResponse wrapper The HttpRequest trait doesn't make sense, on further reflection. Rather, just modify the method signature on the requester. The hyper request was only being used to mutate it's headers anyway. servo/servo#6727 --- components/net/http_loader.rs | 167 ++++++++++++++++++---------------- tests/unit/net/http_loader.rs | 9 +- 2 files changed, 91 insertions(+), 85 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 190ae0f98a7..f9c0d934502 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -26,6 +26,7 @@ use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Lo use hyper::header::{Quality, QualityItem, Headers}; use hyper::Error as HttpError; use hyper::method::Method; +use hyper::http::RawStatus; use hyper::mime::{Mime, TopLevel, SubLevel}; use hyper::net::{Fresh, Streaming, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; use hyper::status::{StatusCode, StatusClass}; @@ -164,52 +165,47 @@ impl NetworkHttpRequester { } } -pub trait HttpRequest { +pub trait HttpResponse: Read { fn headers(&self) -> &Headers; - fn headers_mut(&mut self) -> &mut Headers; - fn send(self: Box) -> Result; + fn status(&self) -> StatusCode; + fn status_raw(&self) -> &RawStatus; } -struct NetworkHttpRequest { - fresh: Request +struct WrappedHttpResponse { + response: Response } -impl HttpRequest for NetworkHttpRequest { +impl Read for WrappedHttpResponse { + #[inline] + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.response.read(buf) + } +} + +impl HttpResponse for WrappedHttpResponse { fn headers(&self) -> &Headers { - self.fresh.headers() + &self.response.headers } - fn headers_mut(&mut self) -> &mut Headers { - self.fresh.headers_mut() + fn status(&self) -> StatusCode { + self.response.status } - fn send(self: Box) -> Result { - let connected = match self.fresh.start() { - Ok(streaming) => streaming, - Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) - }; - - match connected.send() { - Ok(w) => Ok(w), - Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) - } + fn status_raw(&self) -> &RawStatus { + self.response.status_raw() } } -impl HttpRequester for NetworkHttpRequester { - fn send(&self, request: Box) -> Result { - request.send() - } - - fn build(&self, url: Url, method: Method) -> Result, LoadError> { - let connection = Request::with_connector(method, url.clone(), &self.connector); +impl NetworkHttpRequester { + fn start_connection(&self, url: &Url, method: &Method) -> Result, LoadError> { + let connection = Request::with_connector(method.clone(), url.clone(), &self.connector); let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ function: \"SSL3_GET_SERVER_CERTIFICATE\", \ reason: \"certificate verify failed\" }]))"; - let request = match connection { - Ok(req) => req, + match connection { + Ok(req) => Ok(req), Err(HttpError::Io(ref io_error)) if ( io_error.kind() == io::ErrorKind::Other && @@ -225,17 +221,49 @@ impl HttpRequester for NetworkHttpRequester { ) }, Err(e) => { - return Err(LoadError::Connection(url, e.description().to_string())) + return Err(LoadError::Connection(url.clone(), e.description().to_string())) } - }; - - Ok(Box::new(NetworkHttpRequest { fresh: request })) + } } } +impl HttpRequester for NetworkHttpRequester { + fn send(&self, url: &Url, method: &Method, headers: &Headers, body: &Option>) -> Result, LoadError> { + let mut request = try!(self.start_connection(url, method)); + + *request.headers_mut() = headers.clone(); + + // TODO: fix HEAD method (don't write the body, and force content-length to 0) + if let Some(ref data) = *body { + request.headers_mut().set(ContentLength(data.len() as u64)); + } + + let mut request_writer = match request.start() { + Ok(streaming) => streaming, + Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + }; + + if let Some(ref data) = *body { + match request_writer.write_all(&*data) { + Err(e) => { + return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + } + _ => {} + } + } + + let response = match request_writer.send() { + Ok(w) => w, + Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + }; + + Ok(Box::new(WrappedHttpResponse { response: response })) + } + +} + pub trait HttpRequester { - fn build(&self, url: Url, method: Method) -> Result, LoadError>; - fn send(&self, request: Box) -> Result; + fn send(&self, url: &Url, method: &Method, headers: &Headers, body: &Option>) -> Result, LoadError>; } pub enum LoadError { @@ -295,7 +323,7 @@ pub fn load(mut load_data: LoadData, info!("requesting {}", url.serialize()); - let mut req = try!(requester.build(url.clone(), load_data.method.clone())); + // let mut req = try!(requester.build(url.clone(), load_data.method.clone())); //Ensure that the host header is set from the original url let host = Host { @@ -307,26 +335,26 @@ pub fn load(mut load_data: LoadData, // See https://bugzilla.mozilla.org/show_bug.cgi?id=401564 and // https://bugzilla.mozilla.org/show_bug.cgi?id=216828 . // Only preserve ones which have been explicitly marked as such. - if iters == 1 { + let mut request_headers = if iters == 1 { let mut combined_headers = load_data.headers.clone(); combined_headers.extend(load_data.preserved_headers.iter()); - *req.headers_mut() = combined_headers; + combined_headers } else { - *req.headers_mut() = load_data.preserved_headers.clone(); - } + load_data.preserved_headers.clone() + }; - req.headers_mut().set(host); + request_headers.set(host); // --- Set default accept header - if !req.headers().has::() { + if !request_headers.has::() { let accept = Accept(vec![ qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])), qitem(Mime(TopLevel::Application, SubLevel::Ext("xhtml+xml".to_string()), vec![])), QualityItem::new(Mime(TopLevel::Application, SubLevel::Xml, vec![]), Quality(900u16)), QualityItem::new(Mime(TopLevel::Star, SubLevel::Star, vec![]), Quality(800u16)), ]); - req.headers_mut().set(accept); + request_headers.set(accept); } // --- Fetch cookies @@ -337,44 +365,25 @@ pub fn load(mut load_data: LoadData, if let Some(cookie_list) = rx.recv().unwrap() { let mut v = Vec::new(); v.push(cookie_list.into_bytes()); - req.headers_mut().set_raw("Cookie".to_owned(), v); + request_headers.set_raw("Cookie".to_owned(), v); } // --- Set default accept encoding - if !req.headers().has::() { - req.headers_mut().set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); + if !request_headers.has::() { + request_headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); } if log_enabled!(log::LogLevel::Info) { info!("{}", load_data.method); - for header in req.headers().iter() { + for header in request_headers.iter() { info!(" - {}", header); } info!("{:?}", load_data.data); } // --- Start sending the request - // Avoid automatically sending request body if a redirect has occurred. - let response = match load_data.data { - Some(ref data) if iters == 1 => { - req.headers_mut().set(ContentLength(data.len() as u64)); - - match requester.send(req) { - Ok(w) => w, - Err(e) => return Err(e) - } - }, - _ => { - match load_data.method { - Method::Get | Method::Head => (), - _ => req.headers_mut().set(ContentLength(0)) - } - match requester.send(req) { - Ok(w) => w, - Err(e) => return Err(e) - } - } - }; + // TODO: Avoid automatically sending request body if a redirect has occurred. + let response = try!(requester.send(&url, &load_data.method, &request_headers, &load_data.data)); // --- Tell devtools we've made a request // Send an HttpRequest message to devtools with a unique request_id @@ -391,16 +400,16 @@ pub fn load(mut load_data: LoadData, } // Dump headers, but only do the iteration if info!() is enabled. - info!("got HTTP response {}, headers:", response.status); + info!("got HTTP response {}, headers:", (*response).status()); if log_enabled!(log::LogLevel::Info) { - for header in response.headers.iter() { + for header in response.headers().iter() { info!(" - {}", header); } } // --- Update the resource manager that we've gotten a cookie - if let Some(cookies) = response.headers.get_raw("set-cookie") { - for cookie in cookies { + if let Some(cookies) = response.headers().get_raw("set-cookie") { + for cookie in cookies.iter() { if let Ok(cookies) = String::from_utf8(cookie.clone()) { resource_mgr_chan.send(ControlMsg::SetCookiesForUrl(doc_url.clone(), cookies, @@ -410,7 +419,7 @@ pub fn load(mut load_data: LoadData, } if url.scheme == "https" { - if let Some(header) = response.headers.get::() { + if let Some(header) = response.headers().get::() { if let Some(host) = url.domain() { info!("adding host {} to the strict transport security list", host); info!("- max-age {}", header.max_age); @@ -432,8 +441,8 @@ pub fn load(mut load_data: LoadData, } // --- Loop if there's a redirect - if response.status.class() == StatusClass::Redirection { - match response.headers.get::() { + if response.status().class() == StatusClass::Redirection { + match response.headers().get::() { Some(&Location(ref new_url)) => { // CORS (https://fetch.spec.whatwg.org/#http-fetch, status section, point 9, 10) match load_data.cors { @@ -460,8 +469,8 @@ pub fn load(mut load_data: LoadData, // According to https://tools.ietf.org/html/rfc7231#section-6.4.2, // historically UAs have rewritten POST->GET on 301 and 302 responses. if load_data.method == Method::Post && - (response.status == StatusCode::MovedPermanently || - response.status == StatusCode::Found) { + (response.status() == StatusCode::MovedPermanently || + response.status() == StatusCode::Found) { load_data.method = Method::Get; } @@ -476,7 +485,7 @@ pub fn load(mut load_data: LoadData, } } - let mut adjusted_headers = response.headers.clone(); + let mut adjusted_headers = response.headers().clone(); if viewing_source { adjusted_headers.set(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec![]))); @@ -494,8 +503,8 @@ pub fn load(mut load_data: LoadData, //TODO: This is now in hyper, just need to implement //FIXME: Implement Content-Encoding Header https://github.com/hyperium/hyper/issues/391 - if let Some(encodings) = response.headers.get_raw("content-encoding") { - for encoding in encodings { + if let Some(encodings) = response.headers().get_raw("content-encoding") { + for encoding in encodings.iter() { if let Ok(encodings) = String::from_utf8(encoding.clone()) { if encodings == "gzip" || encodings == "deflate" { encoding_str = Some(encodings); diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 863700eef53..40c921d2f7d 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -2,7 +2,7 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -use net::http_loader::{load, LoadError, HttpRequester, HttpRequest}; +use net::http_loader::{load, LoadError, HttpRequester, HttpResponse}; use url::Url; use std::sync::{Arc, Mutex}; use ipc_channel::ipc; @@ -10,15 +10,12 @@ use net_traits::LoadData; use net::hsts::HSTSList; use hyper::client::Response; use hyper::method::Method; +use hyper::header::Headers; struct MockHttpRequester; impl HttpRequester for MockHttpRequester { - fn build(&self, url: Url, _: Method) -> Result, LoadError> { - Err(LoadError::Connection(url.clone(), "shouldn't connect".to_string())) - } - - fn send(&self, _: Box) -> Result { + fn send(&self, _: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), "shouldn't connect".to_string())) } } From 06f09f6cdbfaf5326d1895e2d4c54563bf02cd81 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Mon, 10 Aug 2015 21:13:01 +1200 Subject: [PATCH 08/43] Adds tests for redirecting servo/servo#6727 --- components/net/http_loader.rs | 1 + tests/unit/net/http_loader.rs | 180 +++++++++++++++++++++++++++++++++- 2 files changed, 176 insertions(+), 5 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index f9c0d934502..e6c4a482cd8 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -266,6 +266,7 @@ pub trait HttpRequester { fn send(&self, url: &Url, method: &Method, headers: &Headers, body: &Option>) -> Result, LoadError>; } +#[derive(Debug)] pub enum LoadError { UnsupportedScheme(Url), Connection(Url, String), diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 40c921d2f7d..067ed89f83d 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -3,6 +3,7 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use net::http_loader::{load, LoadError, HttpRequester, HttpResponse}; +use net::resource_task::new_resource_task; use url::Url; use std::sync::{Arc, Mutex}; use ipc_channel::ipc; @@ -10,11 +11,180 @@ use net_traits::LoadData; use net::hsts::HSTSList; use hyper::client::Response; use hyper::method::Method; -use hyper::header::Headers; +use hyper::http::RawStatus; +use hyper::status::StatusCode; +use hyper::header::{Headers, Location, ContentLength}; +use std::io::{self, Read}; +use std::cmp::{self}; +use std::mem::{self}; +use std::borrow::Cow; -struct MockHttpRequester; +fn redirect_to(host: &str) -> Box { + let mut headers = Headers::new(); + headers.set(Location(host.to_string())); -impl HttpRequester for MockHttpRequester { + struct Redirect { + h: Headers, + sr: RawStatus + } + + impl Read for Redirect { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + Ok(0) + } + } + + impl HttpResponse for Redirect { + fn headers(&self) -> &Headers { &self.h } + fn status(&self) -> StatusCode { StatusCode::MovedPermanently } + fn status_raw(&self) -> &RawStatus { &self.sr } + } + + Box::new( + Redirect { + h: headers, + sr: RawStatus(301, Cow::Borrowed("Moved Permanently")) + } + ) +} + +fn respond_with(body: &str) -> Box { + let mut headers = Headers::new(); + headers.set(ContentLength(body.len() as u64)); + + struct TextResponse { + h: Headers, + body: Vec, + sr: RawStatus + } + + impl Read for TextResponse { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let buf_len = buf.len(); + for (a, b) in buf.iter_mut().zip(&self.body[0 .. cmp::min(buf_len, self.body.len())]) { + *a = *b + } + + Ok(cmp::min(buf.len(), self.body.len())) + } + } + + impl HttpResponse for TextResponse { + fn headers(&self) -> &Headers { &self.h } + fn status(&self) -> StatusCode { StatusCode::Ok } + fn status_raw(&self) -> &RawStatus { &self.sr } + } + + Box::new( + TextResponse { + h: headers, + body: <[_]>::to_vec(body.as_bytes()), + sr: RawStatus(200, Cow::Borrowed("Ok")) + } + ) +} + +fn read_response(reader: &mut Read) -> String { + let mut buf = vec![0; 1024]; + match reader.read(&mut buf) { + Ok(len) if len > 0 => { + unsafe { buf.set_len(len); } + String::from_utf8(buf).unwrap() + }, + Ok(_) => "".to_string(), + _ => panic!("problem reading response") + } +} + +#[test] +fn test_load_errors_when_there_a_redirect_loop() { + struct Redirector; + + impl HttpRequester for Redirector { + fn send(&self, url: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { + if url.domain().unwrap() == "mozilla.com" { + Ok(redirect_to("http://mozilla.org")) + } else if url.domain().unwrap() == "mozilla.org" { + Ok(redirect_to("http://mozilla.com")) + } else { + panic!("unexpected host {:?}", url) + } + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let load_data = LoadData::new(url.clone(), None); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + match load(load_data, resource_mgr, None, hsts_list, &Redirector) { + Err(LoadError::InvalidRedirect(_, msg)) => { + assert_eq!(msg, "redirect loop"); + }, + _ => panic!("expected max redirects to fail") + } +} + +#[test] +fn test_load_errors_when_there_is_too_many_redirects() { + struct Redirector; + + impl HttpRequester for Redirector { + fn send(&self, url: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { + if url.domain().unwrap() == "mozilla.com" { + Ok(redirect_to(&*format!("{}/1", url.serialize()))) + } else { + panic!("unexpected host {:?}", url) + } + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let load_data = LoadData::new(url.clone(), None); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + match load(load_data, resource_mgr, None, hsts_list, &Redirector) { + Err(LoadError::MaxRedirects(url)) => { + assert_eq!(url.domain().unwrap(), "mozilla.com") + }, + _ => panic!("expected max redirects to fail") + } +} + +#[test] +fn test_load_follows_a_redirect() { + struct Redirector; + + impl HttpRequester for Redirector { + fn send(&self, url: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { + if url.domain().unwrap() == "mozilla.com" { + Ok(redirect_to("http://mozilla.org")) + } else if url.domain().unwrap() == "mozilla.org" { + Ok(respond_with("Yay!")) + } else { + panic!("unexpected host") + } + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let load_data = LoadData::new(url.clone(), None); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + match load(load_data, resource_mgr, None, hsts_list, &Redirector) { + Err(_) => panic!("expected to follow a redirect"), + Ok((mut r, m)) => { + let response = read_response(&mut *r); + assert_eq!(response, "Yay!".to_string()); + } + } +} + +struct DontConnectHttpRequester; + +impl HttpRequester for DontConnectHttpRequester { fn send(&self, _: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), "shouldn't connect".to_string())) } @@ -27,7 +197,7 @@ fn test_load_errors_when_scheme_is_not_http_or_https() { let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, cookies_chan, None, hsts_list, &MockHttpRequester) { + match load(load_data, cookies_chan, None, hsts_list, &DontConnectHttpRequester) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } @@ -40,7 +210,7 @@ fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_http let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, cookies_chan, None, hsts_list, &MockHttpRequester) { + match load(load_data, cookies_chan, None, hsts_list, &DontConnectHttpRequester) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } From 7a09c2d924c7f0d9260c4733d5a426d5c8d9a85f Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 13:39:28 +1000 Subject: [PATCH 09/43] Shifts to a series of traits/associated types instead of having a requester --- components/net/http_loader.rs | 114 ++++++++++--------- tests/unit/net/http_loader.rs | 209 ++++++++++++++++++++-------------- 2 files changed, 181 insertions(+), 142 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index e6c4a482cd8..9cf6f15c4d2 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -28,7 +28,7 @@ use hyper::Error as HttpError; use hyper::method::Method; use hyper::http::RawStatus; use hyper::mime::{Mime, TopLevel, SubLevel}; -use hyper::net::{Fresh, Streaming, HttpsConnector, Openssl, NetworkConnector, NetworkStream}; +use hyper::net::{Fresh, HttpsConnector, Openssl}; use hyper::status::{StatusCode, StatusClass}; use ipc_channel::ipc::{self, IpcSender}; use log; @@ -110,9 +110,7 @@ fn load_for_consumer(load_data: LoadData, devtools_chan: Option>, hsts_list: Arc>) { - let requester = NetworkHttpRequester::new(); - - match load(load_data, resource_mgr_chan, devtools_chan, hsts_list, &requester) { + match load::(load_data, resource_mgr_chan, devtools_chan, hsts_list, &NetworkHttpRequestFactory) { Err(LoadError::UnsupportedScheme(url)) => { let s = format!("{} request, but we don't support that scheme", &*url.scheme); send_error(url, s, start_chan) @@ -144,27 +142,6 @@ fn load_for_consumer(load_data: LoadData, } } -struct NetworkHttpRequester { - connector: HttpsConnector -} - -impl NetworkHttpRequester { - fn new() -> NetworkHttpRequester { - // TODO: Is this still necessary? The type system is making it really hard to support both - // connectors. SSL is working, so it's not clear to me if the original rationale still - // stands? - // if opts::get().nossl { - // &HttpConnector - let mut context = SslContext::new(SslMethod::Sslv23).unwrap(); - context.set_verify(SSL_VERIFY_PEER, None); - context.set_CA_file(&resources_dir_path().join("certs")).unwrap(); - - NetworkHttpRequester { - connector: HttpsConnector::new(Openssl { context: Arc::new(context) }) - } - } -} - pub trait HttpResponse: Read { fn headers(&self) -> &Headers; fn status(&self) -> StatusCode; @@ -196,16 +173,31 @@ impl HttpResponse for WrappedHttpResponse { } } -impl NetworkHttpRequester { - fn start_connection(&self, url: &Url, method: &Method) -> Result, LoadError> { - let connection = Request::with_connector(method.clone(), url.clone(), &self.connector); +pub trait HttpRequestFactory { + type R: HttpRequest; + + fn create(&self, url: Url, method: Method) -> Result; +} + +struct NetworkHttpRequestFactory; + +impl HttpRequestFactory for NetworkHttpRequestFactory { + type R = WrappedHttpRequest; + + fn create(&self, url: Url, method: Method) -> Result { + let mut context = SslContext::new(SslMethod::Sslv23).unwrap(); + context.set_verify(SSL_VERIFY_PEER, None); + context.set_CA_file(&resources_dir_path().join("certs")).unwrap(); + + let connector = HttpsConnector::new(Openssl { context: Arc::new(context) }); + let connection = Request::with_connector(method.clone(), url.clone(), &connector); let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \ function: \"SSL3_GET_SERVER_CERTIFICATE\", \ reason: \"certificate verify failed\" }]))"; - match connection { - Ok(req) => Ok(req), + let request = match connection { + Ok(req) => req, Err(HttpError::Io(ref io_error)) if ( io_error.kind() == io::ErrorKind::Other && @@ -223,28 +215,38 @@ impl NetworkHttpRequester { Err(e) => { return Err(LoadError::Connection(url.clone(), e.description().to_string())) } - } + }; + + Ok(WrappedHttpRequest { request: request }) } } -impl HttpRequester for NetworkHttpRequester { - fn send(&self, url: &Url, method: &Method, headers: &Headers, body: &Option>) -> Result, LoadError> { - let mut request = try!(self.start_connection(url, method)); +pub trait HttpRequest { + type R: HttpResponse + 'static; - *request.headers_mut() = headers.clone(); + fn headers_mut(&mut self) -> &mut Headers; + fn send(self, body: &Option>) -> Result; +} - // TODO: fix HEAD method (don't write the body, and force content-length to 0) - if let Some(ref data) = *body { - request.headers_mut().set(ContentLength(data.len() as u64)); - } +struct WrappedHttpRequest { + request: Request +} - let mut request_writer = match request.start() { +impl HttpRequest for WrappedHttpRequest { + type R = WrappedHttpResponse; + + fn headers_mut(&mut self) -> &mut Headers { + self.request.headers_mut() + } + + fn send(self, body: &Option>) -> Result { + let mut request_writer = match self.request.start() { Ok(streaming) => streaming, Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) }; if let Some(ref data) = *body { - match request_writer.write_all(&*data) { + match request_writer.write_all(&data) { Err(e) => { return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) } @@ -257,13 +259,8 @@ impl HttpRequester for NetworkHttpRequester { Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) }; - Ok(Box::new(WrappedHttpResponse { response: response })) + Ok(WrappedHttpResponse { response: response }) } - -} - -pub trait HttpRequester { - fn send(&self, url: &Url, method: &Method, headers: &Headers, body: &Option>) -> Result, LoadError>; } #[derive(Debug)] @@ -277,12 +274,12 @@ pub enum LoadError { MaxRedirects(Url) } -pub fn load(mut load_data: LoadData, +pub fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, hsts_list: Arc>, - requester: &HttpRequester) - -> Result<(Box, Metadata), LoadError> { + request_factory: &HttpRequestFactory) + -> Result<(Box, Metadata), LoadError> where A: HttpRequest + 'static { // FIXME: At the time of writing this FIXME, servo didn't have any central // location for configuration. If you're reading this and such a // repository DOES exist, please update this constant to use it. @@ -374,17 +371,24 @@ pub fn load(mut load_data: LoadData, request_headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); } + // --- Start sending the request + // TODO: Avoid automatically sending request body if a redirect has occurred. + let mut req = try!(request_factory.create(url.clone(), load_data.method.clone())); + *req.headers_mut() = request_headers; + if log_enabled!(log::LogLevel::Info) { info!("{}", load_data.method); - for header in request_headers.iter() { + for header in req.headers_mut().iter() { info!(" - {}", header); } info!("{:?}", load_data.data); } - // --- Start sending the request - // TODO: Avoid automatically sending request body if a redirect has occurred. - let response = try!(requester.send(&url, &load_data.method, &request_headers, &load_data.data)); + if let Some(ref data) = load_data.data { + req.headers_mut().set(ContentLength(data.len() as u64)); + } + + let response = try!(req.send(&load_data.data)); // --- Tell devtools we've made a request // Send an HttpRequest message to devtools with a unique request_id @@ -401,7 +405,7 @@ pub fn load(mut load_data: LoadData, } // Dump headers, but only do the iteration if info!() is enabled. - info!("got HTTP response {}, headers:", (*response).status()); + info!("got HTTP response {}, headers:", response.status()); if log_enabled!(log::LogLevel::Info) { for header in response.headers().iter() { info!(" - {}", header); diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 067ed89f83d..876aed61d81 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -2,85 +2,30 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -use net::http_loader::{load, LoadError, HttpRequester, HttpResponse}; +use net::http_loader::{load, LoadError, HttpRequestFactory, HttpRequest, HttpResponse}; use net::resource_task::new_resource_task; use url::Url; use std::sync::{Arc, Mutex}; use ipc_channel::ipc; use net_traits::LoadData; use net::hsts::HSTSList; -use hyper::client::Response; use hyper::method::Method; use hyper::http::RawStatus; use hyper::status::StatusCode; use hyper::header::{Headers, Location, ContentLength}; use std::io::{self, Read}; use std::cmp::{self}; -use std::mem::{self}; use std::borrow::Cow; -fn redirect_to(host: &str) -> Box { - let mut headers = Headers::new(); - headers.set(Location(host.to_string())); - - struct Redirect { - h: Headers, - sr: RawStatus - } - - impl Read for Redirect { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - Ok(0) - } - } - - impl HttpResponse for Redirect { - fn headers(&self) -> &Headers { &self.h } - fn status(&self) -> StatusCode { StatusCode::MovedPermanently } - fn status_raw(&self) -> &RawStatus { &self.sr } - } - - Box::new( - Redirect { - h: headers, - sr: RawStatus(301, Cow::Borrowed("Moved Permanently")) - } - ) -} - -fn respond_with(body: &str) -> Box { +fn respond_with(body: Vec) -> MockResponse { let mut headers = Headers::new(); headers.set(ContentLength(body.len() as u64)); - struct TextResponse { - h: Headers, - body: Vec, - sr: RawStatus - } - - impl Read for TextResponse { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - let buf_len = buf.len(); - for (a, b) in buf.iter_mut().zip(&self.body[0 .. cmp::min(buf_len, self.body.len())]) { - *a = *b - } - - Ok(cmp::min(buf.len(), self.body.len())) - } - } - - impl HttpResponse for TextResponse { - fn headers(&self) -> &Headers { &self.h } - fn status(&self) -> StatusCode { StatusCode::Ok } - fn status_raw(&self) -> &RawStatus { &self.sr } - } - - Box::new( - TextResponse { - h: headers, - body: <[_]>::to_vec(body.as_bytes()), - sr: RawStatus(200, Cow::Borrowed("Ok")) - } + MockResponse::new( + headers, + StatusCode::Ok, + RawStatus(200, Cow::Borrowed("Ok")), + body ) } @@ -96,16 +41,94 @@ fn read_response(reader: &mut Read) -> String { } } +struct MockResponse { + h: Headers, + sc: StatusCode, + sr: RawStatus, + msg: Vec +} + +impl MockResponse { + fn new(h: Headers, sc: StatusCode, sr: RawStatus, msg: Vec) -> MockResponse { + MockResponse { h: h, sc: sc, sr: sr, msg: msg } + } +} + +impl Read for MockResponse { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let buf_len = buf.len(); + for (a, b) in buf.iter_mut().zip(&self.msg[0 .. cmp::min(buf_len, self.msg.len())]) { + *a = *b + } + + Ok(cmp::min(buf.len(), self.msg.len())) + } +} + +impl HttpResponse for MockResponse { + fn headers(&self) -> &Headers { &self.h } + fn status(&self) -> StatusCode { self.sc } + fn status_raw(&self) -> &RawStatus { &self.sr } +} + +fn redirect_to(host: String) -> MockResponse { + let mut headers = Headers::new(); + headers.set(Location(host.to_string())); + + MockResponse::new( + headers, + StatusCode::MovedPermanently, + RawStatus(301, Cow::Borrowed("Moved Permanently")), + <[_]>::to_vec("".as_bytes()) + ) +} + + +enum RequestType { + Redirect(String), + Text(Vec) +} + +struct MockRequest { + headers: Headers, + t: RequestType +} + +impl MockRequest { + fn new(t: RequestType) -> MockRequest { + MockRequest { headers: Headers::new(), t: t } + } +} + +impl HttpRequest for MockRequest { + type R=MockResponse; + + fn headers_mut(&mut self) -> &mut Headers { &mut self.headers } + + fn send(self, _: &Option>) -> Result { + match self.t { + RequestType::Redirect(location) => { + Ok(redirect_to(location)) + }, + RequestType::Text(b) => { + Ok(respond_with(b)) + } + } + } +} + #[test] fn test_load_errors_when_there_a_redirect_loop() { - struct Redirector; + struct Factory; - impl HttpRequester for Redirector { - fn send(&self, url: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { - Ok(redirect_to("http://mozilla.org")) + Ok(MockRequest::new(RequestType::Redirect("http://mozilla.org".to_string()))) } else if url.domain().unwrap() == "mozilla.org" { - Ok(redirect_to("http://mozilla.com")) + Ok(MockRequest::new(RequestType::Redirect("http://mozilla.com".to_string()))) } else { panic!("unexpected host {:?}", url) } @@ -117,7 +140,7 @@ fn test_load_errors_when_there_a_redirect_loop() { let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, resource_mgr, None, hsts_list, &Redirector) { + match load::(load_data, resource_mgr, None, hsts_list, &Factory) { Err(LoadError::InvalidRedirect(_, msg)) => { assert_eq!(msg, "redirect loop"); }, @@ -127,12 +150,14 @@ fn test_load_errors_when_there_a_redirect_loop() { #[test] fn test_load_errors_when_there_is_too_many_redirects() { - struct Redirector; + struct Factory; - impl HttpRequester for Redirector { - fn send(&self, url: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { - Ok(redirect_to(&*format!("{}/1", url.serialize()))) + Ok(MockRequest::new(RequestType::Redirect(format!("{}/1", url.serialize())))) } else { panic!("unexpected host {:?}", url) } @@ -144,7 +169,7 @@ fn test_load_errors_when_there_is_too_many_redirects() { let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, resource_mgr, None, hsts_list, &Redirector) { + match load::(load_data, resource_mgr, None, hsts_list, &Factory) { Err(LoadError::MaxRedirects(url)) => { assert_eq!(url.domain().unwrap(), "mozilla.com") }, @@ -154,16 +179,24 @@ fn test_load_errors_when_there_is_too_many_redirects() { #[test] fn test_load_follows_a_redirect() { - struct Redirector; + struct Factory; - impl HttpRequester for Redirector { - fn send(&self, url: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { - Ok(redirect_to("http://mozilla.org")) + Ok(MockRequest::new(RequestType::Redirect("http://mozilla.org".to_string()))) } else if url.domain().unwrap() == "mozilla.org" { - Ok(respond_with("Yay!")) + Ok( + MockRequest::new( + RequestType::Text( + <[_]>::to_vec("Yay!".as_bytes()) + ) + ) + ) } else { - panic!("unexpected host") + panic!("unexpected host {:?}", url) } } } @@ -173,20 +206,22 @@ fn test_load_follows_a_redirect() { let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, resource_mgr, None, hsts_list, &Redirector) { + match load::(load_data, resource_mgr, None, hsts_list, &Factory) { Err(_) => panic!("expected to follow a redirect"), - Ok((mut r, m)) => { + Ok((mut r, _)) => { let response = read_response(&mut *r); assert_eq!(response, "Yay!".to_string()); } } } -struct DontConnectHttpRequester; +struct DontConnectFactory; -impl HttpRequester for DontConnectHttpRequester { - fn send(&self, _: &Url, _: &Method, _: &Headers, _: &Option>) -> Result, LoadError> { - Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), "shouldn't connect".to_string())) +impl HttpRequestFactory for DontConnectFactory { + type R=MockRequest; + + fn create(&self, url: Url, _: Method) -> Result { + Err(LoadError::Connection(url, "should not have connected".to_string())) } } @@ -197,7 +232,7 @@ fn test_load_errors_when_scheme_is_not_http_or_https() { let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, cookies_chan, None, hsts_list, &DontConnectHttpRequester) { + match load::(load_data, cookies_chan, None, hsts_list, &DontConnectFactory) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } @@ -210,7 +245,7 @@ fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_http let load_data = LoadData::new(url.clone(), None); let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load(load_data, cookies_chan, None, hsts_list, &DontConnectHttpRequester) { + match load::(load_data, cookies_chan, None, hsts_list, &DontConnectFactory) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } From 26a6e058e7ddb17ccb553a806260879627d2247f Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 15:04:23 +1000 Subject: [PATCH 10/43] Adds more tests for checking request headers --- components/net/http_loader.rs | 46 +++++++------ tests/unit/net/http_loader.rs | 122 +++++++++++++++++++++++++++++++--- 2 files changed, 139 insertions(+), 29 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 9cf6f15c4d2..7625393c582 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -274,6 +274,26 @@ pub enum LoadError { MaxRedirects(Url) } +#[inline(always)] +fn set_default_accept_encoding(headers: &mut Headers) { + if !headers.has::() { + headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); + } +} + +#[inline(always)] +fn set_default_accept(headers: &mut Headers) { + if !headers.has::() { + let accept = Accept(vec![ + qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])), + qitem(Mime(TopLevel::Application, SubLevel::Ext("xhtml+xml".to_string()), vec![])), + QualityItem::new(Mime(TopLevel::Application, SubLevel::Xml, vec![]), Quality(900u16)), + QualityItem::new(Mime(TopLevel::Star, SubLevel::Star, vec![]), Quality(800u16)), + ]); + headers.set(accept); + } +} + pub fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, @@ -321,9 +341,7 @@ pub fn load(mut load_data: LoadData, info!("requesting {}", url.serialize()); - // let mut req = try!(requester.build(url.clone(), load_data.method.clone())); - - //Ensure that the host header is set from the original url + // Ensure that the host header is set from the original url let host = Host { hostname: doc_url.serialize_host().unwrap(), port: doc_url.port_or_default() @@ -343,17 +361,8 @@ pub fn load(mut load_data: LoadData, request_headers.set(host); - - // --- Set default accept header - if !request_headers.has::() { - let accept = Accept(vec![ - qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])), - qitem(Mime(TopLevel::Application, SubLevel::Ext("xhtml+xml".to_string()), vec![])), - QualityItem::new(Mime(TopLevel::Application, SubLevel::Xml, vec![]), Quality(900u16)), - QualityItem::new(Mime(TopLevel::Star, SubLevel::Star, vec![]), Quality(800u16)), - ]); - request_headers.set(accept); - } + set_default_accept(&mut request_headers); + set_default_accept_encoding(&mut request_headers); // --- Fetch cookies let (tx, rx) = ipc::channel().unwrap(); @@ -366,13 +375,7 @@ pub fn load(mut load_data: LoadData, request_headers.set_raw("Cookie".to_owned(), v); } - // --- Set default accept encoding - if !request_headers.has::() { - request_headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); - } - - // --- Start sending the request - // TODO: Avoid automatically sending request body if a redirect has occurred. + // --- Send the request let mut req = try!(request_factory.create(url.clone(), load_data.method.clone())); *req.headers_mut() = request_headers; @@ -384,6 +387,7 @@ pub fn load(mut load_data: LoadData, info!("{:?}", load_data.data); } + // TODO: Avoid automatically sending request body if a redirect has occurred. if let Some(ref data) = load_data.data { req.headers_mut().set(ContentLength(data.len() as u64)); } diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 876aed61d81..e8f9a2a7b4a 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -100,23 +100,129 @@ impl MockRequest { } } +fn response_for_request_type(t: RequestType) -> Result { + match t { + RequestType::Redirect(location) => { + Ok(redirect_to(location)) + }, + RequestType::Text(b) => { + Ok(respond_with(b)) + } + } +} + impl HttpRequest for MockRequest { type R=MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.headers } fn send(self, _: &Option>) -> Result { - match self.t { - RequestType::Redirect(location) => { - Ok(redirect_to(location)) - }, - RequestType::Text(b) => { - Ok(respond_with(b)) - } - } + response_for_request_type(self.t) } } +struct AssertMustHaveHeadersRequest { + expected_headers: Headers, + request_headers: Headers, + t: RequestType +} + +impl AssertMustHaveHeadersRequest { + fn new(t: RequestType, expected_headers: Headers) -> Self { + AssertMustHaveHeadersRequest { expected_headers: expected_headers, request_headers: Headers::new(), t: t } + } +} + +impl HttpRequest for AssertMustHaveHeadersRequest { + type R=MockResponse; + + fn headers_mut(&mut self) -> &mut Headers { &mut self.request_headers } + + fn send(self, _: &Option>) -> Result { + for header in self.expected_headers.iter() { + assert!(self.request_headers.get_raw(header.name()).is_some()); + assert_eq!( + self.request_headers.get_raw(header.name()).unwrap(), + self.expected_headers.get_raw(header.name()).unwrap() + ) + } + + response_for_request_type(self.t) + } +} + +struct AssertMustHaveHeadersRequestFactory { + expected_headers: Headers, + body: Vec +} + +impl HttpRequestFactory for AssertMustHaveHeadersRequestFactory { + type R=AssertMustHaveHeadersRequest; + + fn create(&self, _: Url, _: Method) -> Result { + Ok( + AssertMustHaveHeadersRequest::new( + RequestType::Text(self.body.clone()), + self.expected_headers.clone() + ) + ) + } +} + +#[test] +fn test_load_sets_content_length_to_length_of_request_body() { + let content = "This is a request body"; + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = Some(<[_]>::to_vec(content.as_bytes())); + + let mut content_len_headers= Headers::new(); + content_len_headers.set_raw("Content-Length".to_owned(), vec![<[_]>::to_vec(&*format!("{}", content.len()).as_bytes())]); + + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + let _ = load::(load_data.clone(), resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + expected_headers: content_len_headers, + body: <[_]>::to_vec(&*load_data.data.unwrap()) + }); +} + +#[test] +fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { + let mut accept_headers = Headers::new(); + accept_headers.set_raw("Accept".to_owned(), vec![b"text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8".to_vec()]); + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + let _ = load::(load_data, resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + expected_headers: accept_headers, + body: <[_]>::to_vec("Yay!".as_bytes()) + }); +} + +#[test] +fn test_load_sets_default_accept_encoding_to_gzip_and_deflate() { + let mut accept_encoding_headers = Headers::new(); + accept_encoding_headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + let _ = load::(load_data, resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + expected_headers: accept_encoding_headers, + body: <[_]>::to_vec("Yay!".as_bytes()) + }); +} + #[test] fn test_load_errors_when_there_a_redirect_loop() { struct Factory; From ffc3877debce48704cef0dc98436d97786cd863a Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 15:56:17 +1000 Subject: [PATCH 11/43] Adds tests for setting/getting cookies from load --- components/net/http_loader.rs | 49 +++++++++++++---------- tests/unit/net/http_loader.rs | 74 ++++++++++++++++++++++++++++++++++- 2 files changed, 100 insertions(+), 23 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 7625393c582..8bd03e86e54 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -294,6 +294,32 @@ fn set_default_accept(headers: &mut Headers) { } } +#[inline(always)] +fn set_request_cookies(url: Url, headers: &mut Headers, resource_mgr_chan: &IpcSender) { + let (tx, rx) = ipc::channel().unwrap(); + resource_mgr_chan.send(ControlMsg::GetCookiesForUrl(url.clone(), + tx, + CookieSource::HTTP)).unwrap(); + if let Some(cookie_list) = rx.recv().unwrap() { + let mut v = Vec::new(); + v.push(cookie_list.into_bytes()); + headers.set_raw("Cookie".to_owned(), v); + } +} + +#[inline(always)] +fn set_cookies_from_response(url: Url, response: &HttpResponse, resource_mgr_chan: &IpcSender) { + if let Some(cookies) = response.headers().get_raw("set-cookie") { + for cookie in cookies.iter() { + if let Ok(cookies) = String::from_utf8(cookie.clone()) { + resource_mgr_chan.send(ControlMsg::SetCookiesForUrl(url.clone(), + cookies, + CookieSource::HTTP)).unwrap(); + } + } + } +} + pub fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, @@ -363,17 +389,7 @@ pub fn load(mut load_data: LoadData, set_default_accept(&mut request_headers); set_default_accept_encoding(&mut request_headers); - - // --- Fetch cookies - let (tx, rx) = ipc::channel().unwrap(); - resource_mgr_chan.send(ControlMsg::GetCookiesForUrl(doc_url.clone(), - tx, - CookieSource::HTTP)).unwrap(); - if let Some(cookie_list) = rx.recv().unwrap() { - let mut v = Vec::new(); - v.push(cookie_list.into_bytes()); - request_headers.set_raw("Cookie".to_owned(), v); - } + set_request_cookies(doc_url.clone(), &mut request_headers, &resource_mgr_chan); // --- Send the request let mut req = try!(request_factory.create(url.clone(), load_data.method.clone())); @@ -416,16 +432,7 @@ pub fn load(mut load_data: LoadData, } } - // --- Update the resource manager that we've gotten a cookie - if let Some(cookies) = response.headers().get_raw("set-cookie") { - for cookie in cookies.iter() { - if let Ok(cookies) = String::from_utf8(cookie.clone()) { - resource_mgr_chan.send(ControlMsg::SetCookiesForUrl(doc_url.clone(), - cookies, - CookieSource::HTTP)).unwrap(); - } - } - } + set_cookies_from_response(doc_url.clone(), &response, &resource_mgr_chan); if url.scheme == "https" { if let Some(header) = response.headers().get::() { diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index e8f9a2a7b4a..25662619cb8 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -4,6 +4,7 @@ use net::http_loader::{load, LoadError, HttpRequestFactory, HttpRequest, HttpResponse}; use net::resource_task::new_resource_task; +use net_traits::{ResourceTask, ControlMsg, CookieSource}; use url::Url; use std::sync::{Arc, Mutex}; use ipc_channel::ipc; @@ -21,8 +22,12 @@ fn respond_with(body: Vec) -> MockResponse { let mut headers = Headers::new(); headers.set(ContentLength(body.len() as u64)); + respond_with_headers(body, headers) +} + +fn respond_with_headers(body: Vec, headers: Headers) -> MockResponse { MockResponse::new( - headers, + headers.clone(), StatusCode::Ok, RawStatus(200, Cow::Borrowed("Ok")), body @@ -86,7 +91,8 @@ fn redirect_to(host: String) -> MockResponse { enum RequestType { Redirect(String), - Text(Vec) + Text(Vec), + WithHeaders(Vec, Headers) } struct MockRequest { @@ -107,6 +113,9 @@ fn response_for_request_type(t: RequestType) -> Result }, RequestType::Text(b) => { Ok(respond_with(b)) + }, + RequestType::WithHeaders(b, h) => { + Ok(respond_with_headers(b, h)) } } } @@ -169,6 +178,67 @@ impl HttpRequestFactory for AssertMustHaveHeadersRequestFactory { } } +fn assert_cookie_for_domain(resource_mgr: &ResourceTask, domain: &str, cookie: &str) { + let (tx, rx) = ipc::channel().unwrap(); + resource_mgr.send(ControlMsg::GetCookiesForUrl(Url::parse(&*domain).unwrap(), + tx, + CookieSource::HTTP)).unwrap(); + if let Some(cookie_list) = rx.recv().unwrap() { + assert_eq!(cookie.to_string(), cookie_list); + } else { + assert_eq!(cookie.len(), 0); + } +} + +#[test] +fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_in_response() { + struct Factory; + + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, _: Url, _: Method) -> Result { + let content = <[_]>::to_vec("Yay!".as_bytes()); + let mut headers = Headers::new(); + headers.set_raw("set-cookie", vec![b"mozillaIs=theBest".to_vec()]); + Ok(MockRequest::new(RequestType::WithHeaders(content, headers))) + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + assert_cookie_for_domain(&resource_mgr, "http://mozilla.com", ""); + + let load_data = LoadData::new(url.clone(), None); + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + let _ = load::(load_data, resource_mgr.clone(), None, hsts_list, &Factory); + + assert_cookie_for_domain(&resource_mgr, "http://mozilla.com", "mozillaIs=theBest"); +} + +#[test] +fn test_load_sets_requests_cookies_header_for_url_by_getting_cookies_from_the_resource_manager() { + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + resource_mgr.send(ControlMsg::SetCookiesForUrl(Url::parse("http://mozilla.com").unwrap(), + "mozillaIs=theBest".to_string(), + CookieSource::HTTP)).unwrap(); + + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); + + let mut cookie = Headers::new(); + cookie.set_raw("Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIs=theBest".as_bytes())]); + + let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); + + let _ = load::(load_data.clone(), resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + expected_headers: cookie, + body: <[_]>::to_vec(&*load_data.data.unwrap()) + }); +} + #[test] fn test_load_sets_content_length_to_length_of_request_body() { let content = "This is a request body"; From c093ce8a5a73ea405745c8eb1c0b3554175cf645 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 17:22:45 +1000 Subject: [PATCH 12/43] Only use the resource manager's HSTS list. Simplifies a bunch of stuff. --- components/net/http_loader.rs | 34 ++++++++--------- components/net/resource_task.rs | 48 ++++++++++++------------ components/net_traits/lib.rs | 1 + tests/unit/net/http_loader.rs | 66 ++++++++++++++++++++------------- 4 files changed, 81 insertions(+), 68 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 8bd03e86e54..d79f90e1d06 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -9,7 +9,7 @@ use net_traits::ProgressMsg::{Payload, Done}; use net_traits::hosts::replace_hosts; use net_traits::{ControlMsg, CookieSource, LoadData, Metadata, LoadConsumer, IncludeSubdomains}; use resource_task::{start_sending_opt, start_sending_sniffed_opt}; -use hsts::{HSTSList, secure_url}; +use hsts::secure_url; use file_loader; use file_loader; @@ -37,7 +37,6 @@ use std::collections::HashSet; use std::error::Error; use std::io::{self, Read, Write}; use std::sync::Arc; -use std::sync::Mutex; use std::sync::mpsc::{Sender, channel}; use util::task::spawn_named; use util::resource_files::resources_dir_path; @@ -52,12 +51,11 @@ use uuid; use std::fs::File; pub fn factory(resource_mgr_chan: IpcSender, - devtools_chan: Option>, - hsts_list: Arc>) + devtools_chan: Option>) -> Box) + Send> { box move |load_data, senders, classifier| { spawn_named(format!("http_loader for {}", load_data.url.serialize()), - move || load_for_consumer(load_data, senders, classifier, resource_mgr_chan, devtools_chan, hsts_list)) + move || load_for_consumer(load_data, senders, classifier, resource_mgr_chan, devtools_chan)) } } @@ -89,15 +87,6 @@ fn read_block(reader: &mut R) -> Result { } } -fn request_must_be_secured(hsts_list: &HSTSList, url: &Url) -> bool { - match url.domain() { - Some(ref h) => { - hsts_list.is_host_secure(h) - }, - _ => false - } -} - fn inner_url(url: &Url) -> Url { let inner_url = url.non_relative_scheme_data().unwrap(); Url::parse(inner_url).unwrap() @@ -107,10 +96,9 @@ fn load_for_consumer(load_data: LoadData, start_chan: LoadConsumer, classifier: Arc, resource_mgr_chan: IpcSender, - devtools_chan: Option>, - hsts_list: Arc>) { + devtools_chan: Option>) { - match load::(load_data, resource_mgr_chan, devtools_chan, hsts_list, &NetworkHttpRequestFactory) { + match load::(load_data, resource_mgr_chan, devtools_chan, &NetworkHttpRequestFactory) { Err(LoadError::UnsupportedScheme(url)) => { let s = format!("{} request, but we don't support that scheme", &*url.scheme); send_error(url, s, start_chan) @@ -320,10 +308,18 @@ fn set_cookies_from_response(url: Url, response: &HttpResponse, resource_mgr_cha } } +fn request_must_be_secured(url: &Url, resource_mgr_chan: &IpcSender) -> bool { + let (tx, rx) = ipc::channel().unwrap(); + resource_mgr_chan.send( + ControlMsg::GetHostMustBeSecured(url.domain().unwrap().to_string(), tx) + ).unwrap(); + + rx.recv().unwrap() +} + pub fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, - hsts_list: Arc>, request_factory: &HttpRequestFactory) -> Result<(Box, Metadata), LoadError> where A: HttpRequest + 'static { // FIXME: At the time of writing this FIXME, servo didn't have any central @@ -352,7 +348,7 @@ pub fn load(mut load_data: LoadData, loop { iters = iters + 1; - if &*url.scheme != "https" && request_must_be_secured(&hsts_list.lock().unwrap(), &url) { + if &*url.scheme != "https" && request_must_be_secured(&url, &resource_mgr_chan) { info!("{} is in the strict transport security list, requesting secure host", url); url = secure_url(&url); } diff --git a/components/net/resource_task.rs b/components/net/resource_task.rs index 62566f71c70..d232db51bff 100644 --- a/components/net/resource_task.rs +++ b/components/net/resource_task.rs @@ -29,7 +29,6 @@ use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; use std::borrow::ToOwned; use std::boxed::FnBox; use std::sync::Arc; -use std::sync::Mutex; use std::sync::mpsc::{channel, Sender}; pub enum ProgressSender { @@ -161,23 +160,26 @@ impl ResourceChannelManager { fn start(&mut self) { loop { match self.from_client.recv().unwrap() { - ControlMsg::Load(load_data, consumer) => { - self.resource_manager.load(load_data, consumer) - } - ControlMsg::SetCookiesForUrl(request, cookie_list, source) => { - self.resource_manager.set_cookies_for_url(request, cookie_list, source) - } - ControlMsg::GetCookiesForUrl(url, consumer, source) => { - consumer.send(self.resource_manager.cookie_storage.cookies_for_url(&url, source)).unwrap(); - } - ControlMsg::SetHSTSEntryForHost(host, include_subdomains, max_age) => { - if let Some(entry) = HSTSEntry::new(host, include_subdomains, Some(max_age)) { - self.resource_manager.add_hsts_entry(entry) - } - } - ControlMsg::Exit => { - break - } + ControlMsg::Load(load_data, consumer) => { + self.resource_manager.load(load_data, consumer) + } + ControlMsg::SetCookiesForUrl(request, cookie_list, source) => { + self.resource_manager.set_cookies_for_url(request, cookie_list, source) + } + ControlMsg::GetCookiesForUrl(url, consumer, source) => { + consumer.send(self.resource_manager.cookie_storage.cookies_for_url(&url, source)).unwrap(); + } + ControlMsg::SetHSTSEntryForHost(host, include_subdomains, max_age) => { + if let Some(entry) = HSTSEntry::new(host, include_subdomains, Some(max_age)) { + self.resource_manager.add_hsts_entry(entry) + } + } + ControlMsg::GetHostMustBeSecured(host, consumer) => { + consumer.send(self.resource_manager.is_host_sts(&*host)).unwrap(); + } + ControlMsg::Exit => { + break + } } } } @@ -189,7 +191,7 @@ pub struct ResourceManager { resource_task: IpcSender, mime_classifier: Arc, devtools_chan: Option>, - hsts_list: Arc> + hsts_list: HSTSList } impl ResourceManager { @@ -203,7 +205,7 @@ impl ResourceManager { resource_task: resource_task, mime_classifier: Arc::new(MIMEClassifier::new()), devtools_chan: devtools_channel, - hsts_list: Arc::new(Mutex::new(hsts_list)) + hsts_list: hsts_list } } } @@ -221,11 +223,11 @@ impl ResourceManager { } pub fn add_hsts_entry(&mut self, entry: HSTSEntry) { - self.hsts_list.lock().unwrap().push(entry); + self.hsts_list.push(entry); } pub fn is_host_sts(&self, host: &str) -> bool { - self.hsts_list.lock().unwrap().is_host_secure(host) + self.hsts_list.is_host_secure(host) } fn load(&mut self, mut load_data: LoadData, consumer: LoadConsumer) { @@ -241,7 +243,7 @@ impl ResourceManager { let loader = match &*load_data.url.scheme { "file" => from_factory(file_loader::factory), "http" | "https" | "view-source" => - http_loader::factory(self.resource_task.clone(), self.devtools_chan.clone(), self.hsts_list.clone()), + http_loader::factory(self.resource_task.clone(), self.devtools_chan.clone()), "data" => from_factory(data_loader::factory), "about" => from_factory(about_loader::factory), _ => { diff --git a/components/net_traits/lib.rs b/components/net_traits/lib.rs index e4a93f56673..3f9db2a7ebc 100644 --- a/components/net_traits/lib.rs +++ b/components/net_traits/lib.rs @@ -161,6 +161,7 @@ pub enum ControlMsg { GetCookiesForUrl(Url, IpcSender>, CookieSource), /// Store a domain's STS information SetHSTSEntryForHost(String, IncludeSubdomains, u64), + GetHostMustBeSecured(String, IpcSender), Exit } diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 25662619cb8..a787f46e3f5 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -6,10 +6,8 @@ use net::http_loader::{load, LoadError, HttpRequestFactory, HttpRequest, HttpRes use net::resource_task::new_resource_task; use net_traits::{ResourceTask, ControlMsg, CookieSource}; use url::Url; -use std::sync::{Arc, Mutex}; use ipc_channel::ipc; use net_traits::LoadData; -use net::hsts::HSTSList; use hyper::method::Method; use hyper::http::RawStatus; use hyper::status::StatusCode; @@ -190,6 +188,34 @@ fn assert_cookie_for_domain(resource_mgr: &ResourceTask, domain: &str, cookie: & } } +#[test] +fn test_load_adds_host_to_sts_list_when_url_is_https_and_sts_headers_are_present() { + struct Factory; + + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, _: Url, _: Method) -> Result { + let content = <[_]>::to_vec("Yay!".as_bytes()); + let mut headers = Headers::new(); + headers.set_raw("Strict-Transport-Security", vec![b"max-age=31536000".to_vec()]); + Ok(MockRequest::new(RequestType::WithHeaders(content, headers))) + } + } + + let url = Url::parse("https://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + + let load_data = LoadData::new(url.clone(), None); + + let _ = load::(load_data, resource_mgr.clone(), None, &Factory); + + let (tx, rx) = ipc::channel().unwrap(); + resource_mgr.send(ControlMsg::GetHostMustBeSecured("mozilla.com".to_string(), tx)).unwrap(); + + assert!(rx.recv().unwrap()); +} + #[test] fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_in_response() { struct Factory; @@ -210,9 +236,8 @@ fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_ assert_cookie_for_domain(&resource_mgr, "http://mozilla.com", ""); let load_data = LoadData::new(url.clone(), None); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - let _ = load::(load_data, resource_mgr.clone(), None, hsts_list, &Factory); + let _ = load::(load_data, resource_mgr.clone(), None, &Factory); assert_cookie_for_domain(&resource_mgr, "http://mozilla.com", "mozillaIs=theBest"); } @@ -231,9 +256,7 @@ fn test_load_sets_requests_cookies_header_for_url_by_getting_cookies_from_the_re let mut cookie = Headers::new(); cookie.set_raw("Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIs=theBest".as_bytes())]); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - - let _ = load::(load_data.clone(), resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + let _ = load::(load_data.clone(), resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: cookie, body: <[_]>::to_vec(&*load_data.data.unwrap()) }); @@ -251,9 +274,7 @@ fn test_load_sets_content_length_to_length_of_request_body() { let mut content_len_headers= Headers::new(); content_len_headers.set_raw("Content-Length".to_owned(), vec![<[_]>::to_vec(&*format!("{}", content.len()).as_bytes())]); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - - let _ = load::(load_data.clone(), resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + let _ = load::(load_data.clone(), resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: content_len_headers, body: <[_]>::to_vec(&*load_data.data.unwrap()) }); @@ -268,9 +289,8 @@ fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { let resource_mgr = new_resource_task(None, None); let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - let _ = load::(load_data, resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: accept_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }); @@ -285,9 +305,8 @@ fn test_load_sets_default_accept_encoding_to_gzip_and_deflate() { let resource_mgr = new_resource_task(None, None); let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - let _ = load::(load_data, resource_mgr, None, hsts_list, &AssertMustHaveHeadersRequestFactory { + let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: accept_encoding_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }); @@ -314,9 +333,8 @@ fn test_load_errors_when_there_a_redirect_loop() { let url = Url::parse("http://mozilla.com").unwrap(); let resource_mgr = new_resource_task(None, None); let load_data = LoadData::new(url.clone(), None); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load::(load_data, resource_mgr, None, hsts_list, &Factory) { + match load::(load_data, resource_mgr, None, &Factory) { Err(LoadError::InvalidRedirect(_, msg)) => { assert_eq!(msg, "redirect loop"); }, @@ -343,9 +361,8 @@ fn test_load_errors_when_there_is_too_many_redirects() { let url = Url::parse("http://mozilla.com").unwrap(); let resource_mgr = new_resource_task(None, None); let load_data = LoadData::new(url.clone(), None); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load::(load_data, resource_mgr, None, hsts_list, &Factory) { + match load::(load_data, resource_mgr, None, &Factory) { Err(LoadError::MaxRedirects(url)) => { assert_eq!(url.domain().unwrap(), "mozilla.com") }, @@ -380,9 +397,8 @@ fn test_load_follows_a_redirect() { let url = Url::parse("http://mozilla.com").unwrap(); let resource_mgr = new_resource_task(None, None); let load_data = LoadData::new(url.clone(), None); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load::(load_data, resource_mgr, None, hsts_list, &Factory) { + match load::(load_data, resource_mgr, None, &Factory) { Err(_) => panic!("expected to follow a redirect"), Ok((mut r, _)) => { let response = read_response(&mut *r); @@ -404,11 +420,10 @@ impl HttpRequestFactory for DontConnectFactory { #[test] fn test_load_errors_when_scheme_is_not_http_or_https() { let url = Url::parse("ftp://not-supported").unwrap(); - let (cookies_chan, _) = ipc::channel().unwrap(); + let resource_mgr = new_resource_task(None, None); let load_data = LoadData::new(url.clone(), None); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load::(load_data, cookies_chan, None, hsts_list, &DontConnectFactory) { + match load::(load_data, resource_mgr, None, &DontConnectFactory) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } @@ -417,11 +432,10 @@ fn test_load_errors_when_scheme_is_not_http_or_https() { #[test] fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_https() { let url = Url::parse("view-source:ftp://not-supported").unwrap(); - let (cookies_chan, _) = ipc::channel().unwrap(); + let resource_mgr = new_resource_task(None, None); let load_data = LoadData::new(url.clone(), None); - let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() })); - match load::(load_data, cookies_chan, None, hsts_list, &DontConnectFactory) { + match load::(load_data, resource_mgr, None, &DontConnectFactory) { Err(LoadError::UnsupportedScheme(_)) => {} _ => panic!("expected ftp scheme to be unsupported") } From 5a60fdf4ca9218137667af91629b34d44c36d909 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 17:32:01 +1000 Subject: [PATCH 13/43] Moves STS update code to a function --- components/net/http_loader.rs | 49 +++++++++++++++++++---------------- tests/unit/net/http_loader.rs | 28 ++++++++++++++++++++ 2 files changed, 54 insertions(+), 23 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index d79f90e1d06..c6546dabba5 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -317,6 +317,31 @@ fn request_must_be_secured(url: &Url, resource_mgr_chan: &IpcSender) rx.recv().unwrap() } +#[inline(always)] +fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mgr_chan: &IpcSender) { + if url.scheme == "https" { + if let Some(header) = response.headers().get::() { + if let Some(host) = url.domain() { + info!("adding host {} to the strict transport security list", host); + info!("- max-age {}", header.max_age); + + let include_subdomains = if header.include_subdomains { + info!("- includeSubdomains"); + IncludeSubdomains::Included + } else { + IncludeSubdomains::NotIncluded + }; + + resource_mgr_chan.send( + ControlMsg::SetHSTSEntryForHost( + host.to_string(), include_subdomains, header.max_age + ) + ).unwrap(); + } + } + } +} + pub fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, @@ -420,7 +445,6 @@ pub fn load(mut load_data: LoadData, net_event))).unwrap(); } - // Dump headers, but only do the iteration if info!() is enabled. info!("got HTTP response {}, headers:", response.status()); if log_enabled!(log::LogLevel::Info) { for header in response.headers().iter() { @@ -429,28 +453,7 @@ pub fn load(mut load_data: LoadData, } set_cookies_from_response(doc_url.clone(), &response, &resource_mgr_chan); - - if url.scheme == "https" { - if let Some(header) = response.headers().get::() { - if let Some(host) = url.domain() { - info!("adding host {} to the strict transport security list", host); - info!("- max-age {}", header.max_age); - - let include_subdomains = if header.include_subdomains { - info!("- includeSubdomains"); - IncludeSubdomains::Included - } else { - IncludeSubdomains::NotIncluded - }; - - resource_mgr_chan.send( - ControlMsg::SetHSTSEntryForHost( - host.to_string(), include_subdomains, header.max_age - ) - ).unwrap(); - } - } - } + update_sts_list_from_response(&url, &response, &resource_mgr_chan); // --- Loop if there's a redirect if response.status().class() == StatusClass::Redirection { diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index a787f46e3f5..79f5c29a8ba 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -188,6 +188,34 @@ fn assert_cookie_for_domain(resource_mgr: &ResourceTask, domain: &str, cookie: & } } +#[test] +fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_are_present() { + struct Factory; + + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, _: Url, _: Method) -> Result { + let content = <[_]>::to_vec("Yay!".as_bytes()); + let mut headers = Headers::new(); + headers.set_raw("Strict-Transport-Security", vec![b"max-age=31536000".to_vec()]); + Ok(MockRequest::new(RequestType::WithHeaders(content, headers))) + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + + let load_data = LoadData::new(url.clone(), None); + + let _ = load::(load_data, resource_mgr.clone(), None, &Factory); + + let (tx, rx) = ipc::channel().unwrap(); + resource_mgr.send(ControlMsg::GetHostMustBeSecured("mozilla.com".to_string(), tx)).unwrap(); + + assert!(!rx.recv().unwrap()); +} + #[test] fn test_load_adds_host_to_sts_list_when_url_is_https_and_sts_headers_are_present() { struct Factory; From d6e1fab2786189ae7d1bd8fde405592b31baac46 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 21:37:59 +1000 Subject: [PATCH 14/43] Uses hyper's ContentEncoding instead of get_raw --- components/net/http_loader.rs | 71 ++++++++++++++++++----------------- 1 file changed, 37 insertions(+), 34 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index c6546dabba5..fa0d4b1b617 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -23,7 +23,7 @@ use hyper::header::StrictTransportSecurity; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, Quality, QualityItem}; use hyper::client::{Request, Response}; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, StrictTransportSecurity}; -use hyper::header::{Quality, QualityItem, Headers}; +use hyper::header::{Quality, QualityItem, Headers, ContentEncoding, Encoding}; use hyper::Error as HttpError; use hyper::method::Method; use hyper::http::RawStatus; @@ -109,6 +109,9 @@ fn load_for_consumer(load_data: LoadData, Err(LoadError::MaxRedirects(url)) => { send_error(url, "too many redirects".to_string(), start_chan) } + Err(LoadError::UnsupportedContentEncodings(url)) => { + send_error(url, "no valid content encodings supported".to_string(), start_chan) + } Err(LoadError::Cors(url, msg)) | Err(LoadError::InvalidRedirect(url, msg)) | Err(LoadError::Decoding(url, msg)) => { @@ -259,7 +262,8 @@ pub enum LoadError { Ssl(Url, String), InvalidRedirect(Url, String), Decoding(Url, String), - MaxRedirects(Url) + MaxRedirects(Url), + UnsupportedContentEncodings(Url) } #[inline(always)] @@ -514,21 +518,6 @@ pub fn load(mut load_data: LoadData, metadata.headers = Some(adjusted_headers); metadata.status = Some(response.status_raw().clone()); - let mut encoding_str: Option = None; - - //TODO: This is now in hyper, just need to implement - //FIXME: Implement Content-Encoding Header https://github.com/hyperium/hyper/issues/391 - if let Some(encodings) = response.headers().get_raw("content-encoding") { - for encoding in encodings.iter() { - if let Ok(encodings) = String::from_utf8(encoding.clone()) { - if encodings == "gzip" || encodings == "deflate" { - encoding_str = Some(encodings); - break; - } - } - } - } - // --- Tell devtools that we got a response // Send an HttpResponse message to devtools with the corresponding request_id // TODO: Send this message only if load_data has a pipeline_id that is not None @@ -542,27 +531,41 @@ pub fn load(mut load_data: LoadData, net_event_response))).unwrap(); } - // --- Stream the results depending on the encoding type. - match encoding_str { - Some(encoding) => { - if encoding == "gzip" { - let result = GzDecoder::new(response); - match result { - Ok(response_decoding) => { - return Ok((Box::new(response_decoding), metadata)); - } - Err(err) => { - return Err(LoadError::Decoding(metadata.final_url, err.to_string())); - } - } - } else if encoding == "deflate" { - let response_decoding = DeflateDecoder::new(response); - return Ok((Box::new(response_decoding), metadata)); + let selected_content_encoding = match response.headers().get::() { + Some(&ContentEncoding(ref encodings)) => { + if encodings.contains(&Encoding::Gzip) { + Some(Encoding::Gzip) + } else if encodings.contains(&Encoding::Deflate) { + Some(Encoding::Deflate) + } else { + return Err(LoadError::UnsupportedContentEncodings(url.clone())) } - }, + } + None => { + None + } + }; + + match selected_content_encoding { + Some(Encoding::Gzip) => { + let result = GzDecoder::new(response); + match result { + Ok(response_decoding) => { + return Ok((Box::new(response_decoding), metadata)); + } + Err(err) => { + return Err(LoadError::Decoding(metadata.final_url, err.to_string())); + } + } + } + Some(Encoding::Deflate) => { + let response_decoding = DeflateDecoder::new(response); + return Ok((Box::new(response_decoding), metadata)); + } None => { return Ok((Box::new(response), metadata)); } + _ => return Err(LoadError::UnsupportedContentEncodings(url.clone())) } } } From ea2d7f4dd5a00097bee8727c3a165c5663ca92a3 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 21:40:41 +1000 Subject: [PATCH 15/43] Uses the correct url when reporting http errors --- components/net/http_loader.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index fa0d4b1b617..15fe660b62e 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -231,15 +231,16 @@ impl HttpRequest for WrappedHttpRequest { } fn send(self, body: &Option>) -> Result { + let url = self.request.url.clone(); let mut request_writer = match self.request.start() { Ok(streaming) => streaming, - Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + Err(e) => return Err(LoadError::Connection(url, e.description().to_string())) }; if let Some(ref data) = *body { match request_writer.write_all(&data) { Err(e) => { - return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + return Err(LoadError::Connection(url, e.description().to_string())) } _ => {} } @@ -247,7 +248,7 @@ impl HttpRequest for WrappedHttpRequest { let response = match request_writer.send() { Ok(w) => w, - Err(e) => return Err(LoadError::Connection(Url::parse("http://example.com").unwrap(), e.description().to_string())) + Err(e) => return Err(LoadError::Connection(url, e.description().to_string())) }; Ok(WrappedHttpResponse { response: response }) From 15d82091c5851abdcb1e76347706d38a48924447 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 22:04:14 +1000 Subject: [PATCH 16/43] Fixes tidy overlong lines --- components/net/http_loader.rs | 13 ++++++++++--- tests/unit/net/http_loader.rs | 28 ++++++++++++++++++---------- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 15fe660b62e..3bda8705cc5 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -22,7 +22,8 @@ use hyper::client::Request; use hyper::header::StrictTransportSecurity; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, Quality, QualityItem}; use hyper::client::{Request, Response}; -use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, StrictTransportSecurity}; +use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host}; +use hyper::header::{Location, qitem, StrictTransportSecurity}; use hyper::header::{Quality, QualityItem, Headers, ContentEncoding, Encoding}; use hyper::Error as HttpError; use hyper::method::Method; @@ -199,7 +200,10 @@ impl HttpRequestFactory for NetworkHttpRequestFactory { return Err( LoadError::Ssl( url.clone(), - format!("ssl error {:?}: {:?} {:?}", io_error.kind(), io_error.description(), io_error.cause()) + format!("ssl error {:?}: {:?} {:?}", + io_error.kind(), + io_error.description(), + io_error.cause()) ) ) }, @@ -468,7 +472,10 @@ pub fn load(mut load_data: LoadData, match load_data.cors { Some(ref c) => { if c.preflight { - return Err(LoadError::Cors(url, "Preflight fetch inconsistent with main fetch".to_string())); + return Err( + LoadError::Cors( + url, + "Preflight fetch inconsistent with main fetch".to_string())); } else { // XXXManishearth There are some CORS-related steps here, // but they don't seem necessary until credentials are implemented diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 79f5c29a8ba..1ffab3fdd37 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -284,10 +284,12 @@ fn test_load_sets_requests_cookies_header_for_url_by_getting_cookies_from_the_re let mut cookie = Headers::new(); cookie.set_raw("Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIs=theBest".as_bytes())]); - let _ = load::(load_data.clone(), resource_mgr, None, &AssertMustHaveHeadersRequestFactory { - expected_headers: cookie, - body: <[_]>::to_vec(&*load_data.data.unwrap()) - }); + let _ = load::( + load_data.clone(), resource_mgr, None, + &AssertMustHaveHeadersRequestFactory { + expected_headers: cookie, + body: <[_]>::to_vec(&*load_data.data.unwrap()) + }); } #[test] @@ -300,18 +302,24 @@ fn test_load_sets_content_length_to_length_of_request_body() { load_data.data = Some(<[_]>::to_vec(content.as_bytes())); let mut content_len_headers= Headers::new(); - content_len_headers.set_raw("Content-Length".to_owned(), vec![<[_]>::to_vec(&*format!("{}", content.len()).as_bytes())]); + content_len_headers.set_raw( + "Content-Length".to_owned(), vec![<[_]>::to_vec(&*format!("{}", content.len()).as_bytes())] + ); - let _ = load::(load_data.clone(), resource_mgr, None, &AssertMustHaveHeadersRequestFactory { - expected_headers: content_len_headers, - body: <[_]>::to_vec(&*load_data.data.unwrap()) - }); + let _ = load::( + load_data.clone(), resource_mgr, None, + &AssertMustHaveHeadersRequestFactory { + expected_headers: content_len_headers, + body: <[_]>::to_vec(&*load_data.data.unwrap()) + }); } #[test] fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { let mut accept_headers = Headers::new(); - accept_headers.set_raw("Accept".to_owned(), vec![b"text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8".to_vec()]); + accept_headers.set_raw( + "Accept".to_owned(), vec![b"text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8".to_vec()] + ); let url = Url::parse("http://mozilla.com").unwrap(); let resource_mgr = new_resource_task(None, None); From 3c756d254b7a4410ecad9126dc23e496df7771ff Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sat, 15 Aug 2015 22:10:52 +1000 Subject: [PATCH 17/43] Avoids sending a request body on a redirect --- components/net/http_loader.rs | 22 +++++++++---- tests/unit/net/http_loader.rs | 59 +++++++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+), 6 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 3bda8705cc5..1fbe2ec26e8 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -433,12 +433,22 @@ pub fn load(mut load_data: LoadData, info!("{:?}", load_data.data); } - // TODO: Avoid automatically sending request body if a redirect has occurred. - if let Some(ref data) = load_data.data { - req.headers_mut().set(ContentLength(data.len() as u64)); - } - - let response = try!(req.send(&load_data.data)); + // Avoid automatically sending request body if a redirect has occurred. + // + // TODO - This is the wrong behaviour according to the RFC. However, I'm not + // sure how much "correctness" vs. real-world is important in this case. + // + // http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html + let is_redirected_request = iters != 1; + let response = match load_data.data { + Some(ref data) if !is_redirected_request => { + req.headers_mut().set(ContentLength(data.len() as u64)); + try!(req.send(&load_data.data)) + } + _ => { + try!(req.send(&None)) + } + }; // --- Tell devtools we've made a request // Send an HttpRequest message to devtools with a unique request_id diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 1ffab3fdd37..564b87de79f 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -188,6 +188,65 @@ fn assert_cookie_for_domain(resource_mgr: &ResourceTask, domain: &str, cookie: & } } +struct AssertMustHaveBodyRequest { + expected_body: Option>, + headers: Headers, + t: RequestType +} + +impl AssertMustHaveBodyRequest { + fn new(t: RequestType, expected_body: Option>) -> Self { + AssertMustHaveBodyRequest { expected_body: expected_body, headers: Headers::new(), t: t } + } +} + +impl HttpRequest for AssertMustHaveBodyRequest { + type R=MockResponse; + + fn headers_mut(&mut self) -> &mut Headers { &mut self.headers} + + fn send(self, body: &Option>) -> Result { + assert_eq!(self.expected_body, *body); + + response_for_request_type(self.t) + } +} + +#[test] +fn test_load_doesnt_send_request_body_on_any_redirect() { + struct Factory; + + impl HttpRequestFactory for Factory { + type R=AssertMustHaveBodyRequest; + + fn create(&self, url: Url, _: Method) -> Result { + if url.domain().unwrap() == "mozilla.com" { + Ok( + AssertMustHaveBodyRequest::new( + RequestType::Redirect("http://mozilla.org".to_string()), + Some(<[_]>::to_vec("Body on POST!".as_bytes())) + ) + ) + } else { + Ok( + AssertMustHaveBodyRequest::new( + RequestType::Text(<[_]>::to_vec("Yay!".as_bytes())), + None + ) + ) + } + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = Some(<[_]>::to_vec("Body on POST!".as_bytes())); + + + let _ = load::(load_data, resource_mgr, None, &Factory); +} + #[test] fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_are_present() { struct Factory; From 879b058be20ef0e115dfb3ac1c0d651a07daadad Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 16 Aug 2015 12:41:35 +1000 Subject: [PATCH 18/43] Returns LoadResponse struct instead of a tuple Still boxes the reader, but at least is a step in the right direction --- components/net/http_loader.rs | 23 +++++++++++++++++------ tests/unit/net/http_loader.rs | 4 ++-- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 1fbe2ec26e8..4f60ddbc548 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -128,8 +128,8 @@ fn load_for_consumer(load_data: LoadData, file_loader::factory(load_data, start_chan, classifier) } - Ok((mut response_reader, metadata)) => { - send_data(&mut response_reader, start_chan, metadata, classifier) + Ok(mut load_response) => { + send_data(&mut load_response.reader, start_chan, load_response.metadata, classifier) } } } @@ -351,11 +351,22 @@ fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mg } } +pub struct LoadResponse { + pub reader: Box, + pub metadata: Metadata +} + +impl LoadResponse { + fn new(r: Box, m: Metadata) -> LoadResponse { + LoadResponse { reader: r, metadata: m } + } +} + pub fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, request_factory: &HttpRequestFactory) - -> Result<(Box, Metadata), LoadError> where A: HttpRequest + 'static { + -> Result where A: HttpRequest + 'static { // FIXME: At the time of writing this FIXME, servo didn't have any central // location for configuration. If you're reading this and such a // repository DOES exist, please update this constant to use it. @@ -569,7 +580,7 @@ pub fn load(mut load_data: LoadData, let result = GzDecoder::new(response); match result { Ok(response_decoding) => { - return Ok((Box::new(response_decoding), metadata)); + return Ok(LoadResponse::new(Box::new(response_decoding), metadata)); } Err(err) => { return Err(LoadError::Decoding(metadata.final_url, err.to_string())); @@ -578,10 +589,10 @@ pub fn load(mut load_data: LoadData, } Some(Encoding::Deflate) => { let response_decoding = DeflateDecoder::new(response); - return Ok((Box::new(response_decoding), metadata)); + return Ok(LoadResponse::new(Box::new(response_decoding), metadata)); } None => { - return Ok((Box::new(response), metadata)); + return Ok(LoadResponse::new(Box::new(response), metadata)); } _ => return Err(LoadError::UnsupportedContentEncodings(url.clone())) } diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 564b87de79f..b820ddd8b37 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -495,8 +495,8 @@ fn test_load_follows_a_redirect() { match load::(load_data, resource_mgr, None, &Factory) { Err(_) => panic!("expected to follow a redirect"), - Ok((mut r, _)) => { - let response = read_response(&mut *r); + Ok(mut lr) => { + let response = read_response(&mut *lr.reader); assert_eq!(response, "Yay!".to_string()); } } From 6064ee875687b0d61a76ff5e1c725649a2212260 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 16 Aug 2015 14:44:53 +1000 Subject: [PATCH 19/43] Eliminates need to box response reader I don't know how idiomatic this is for rust, but the only way I could think of to do this is with a union enum and generics. As the number of decoders should never be more than a few, this shouldn't really be a problem. --- components/net/http_loader.rs | 38 ++++++++++++++++++++++++++--------- tests/unit/net/http_loader.rs | 2 +- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 4f60ddbc548..ee93ef2acdb 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -129,7 +129,8 @@ fn load_for_consumer(load_data: LoadData, } Ok(mut load_response) => { - send_data(&mut load_response.reader, start_chan, load_response.metadata, classifier) + let metadata = load_response.metadata.clone(); + send_data(&mut load_response, start_chan, metadata, classifier) } } } @@ -351,22 +352,39 @@ fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mg } } -pub struct LoadResponse { - pub reader: Box, +pub struct LoadResponse { + decoder: Decoders, pub metadata: Metadata } -impl LoadResponse { - fn new(r: Box, m: Metadata) -> LoadResponse { - LoadResponse { reader: r, metadata: m } +impl Read for LoadResponse { + #[inline] + fn read(&mut self, buf: &mut [u8]) -> io::Result { + match self.decoder { + Decoders::Gzip(ref mut d) => d.read(buf), + Decoders::Deflate(ref mut d) => d.read(buf), + Decoders::Plain(ref mut d) => d.read(buf) + } } } +impl LoadResponse { + fn new(m: Metadata, d: Decoders) -> LoadResponse { + LoadResponse { metadata: m, decoder: d } + } +} + +enum Decoders { + Gzip(GzDecoder), + Deflate(DeflateDecoder), + Plain(R) +} + pub fn load(mut load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, request_factory: &HttpRequestFactory) - -> Result where A: HttpRequest + 'static { + -> Result, LoadError> where A: HttpRequest + 'static { // FIXME: At the time of writing this FIXME, servo didn't have any central // location for configuration. If you're reading this and such a // repository DOES exist, please update this constant to use it. @@ -580,7 +598,7 @@ pub fn load(mut load_data: LoadData, let result = GzDecoder::new(response); match result { Ok(response_decoding) => { - return Ok(LoadResponse::new(Box::new(response_decoding), metadata)); + return Ok(LoadResponse::new(metadata, Decoders::Gzip(response_decoding))); } Err(err) => { return Err(LoadError::Decoding(metadata.final_url, err.to_string())); @@ -589,10 +607,10 @@ pub fn load(mut load_data: LoadData, } Some(Encoding::Deflate) => { let response_decoding = DeflateDecoder::new(response); - return Ok(LoadResponse::new(Box::new(response_decoding), metadata)); + return Ok(LoadResponse::new(metadata, Decoders::Deflate(response_decoding))); } None => { - return Ok(LoadResponse::new(Box::new(response), metadata)); + return Ok(LoadResponse::new(metadata, Decoders::Plain(response))); } _ => return Err(LoadError::UnsupportedContentEncodings(url.clone())) } diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index b820ddd8b37..3e61ec71ed0 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -496,7 +496,7 @@ fn test_load_follows_a_redirect() { match load::(load_data, resource_mgr, None, &Factory) { Err(_) => panic!("expected to follow a redirect"), Ok(mut lr) => { - let response = read_response(&mut *lr.reader); + let response = read_response(&mut lr); assert_eq!(response, "Yay!".to_string()); } } From 2eaac7e3f9fd712d6802f380b77b86c860bd43e7 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 16 Aug 2015 17:16:10 +1000 Subject: [PATCH 20/43] Adds tests for gzip/deflate response decoding --- components/net/http_loader.rs | 84 ++++++++++++++++++----------------- components/servo/Cargo.lock | 1 + tests/unit/net/Cargo.toml | 1 + tests/unit/net/http_loader.rs | 84 +++++++++++++++++++++++++++-------- tests/unit/net/lib.rs | 1 + 5 files changed, 112 insertions(+), 59 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index ee93ef2acdb..d4a6b10f7b0 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -110,9 +110,6 @@ fn load_for_consumer(load_data: LoadData, Err(LoadError::MaxRedirects(url)) => { send_error(url, "too many redirects".to_string(), start_chan) } - Err(LoadError::UnsupportedContentEncodings(url)) => { - send_error(url, "no valid content encodings supported".to_string(), start_chan) - } Err(LoadError::Cors(url, msg)) | Err(LoadError::InvalidRedirect(url, msg)) | Err(LoadError::Decoding(url, msg)) => { @@ -139,6 +136,23 @@ pub trait HttpResponse: Read { fn headers(&self) -> &Headers; fn status(&self) -> StatusCode; fn status_raw(&self) -> &RawStatus; + + fn content_encoding(&self) -> Option { + match self.headers().get::() { + Some(&ContentEncoding(ref encodings)) => { + if encodings.contains(&Encoding::Gzip) { + Some(Encoding::Gzip) + } else if encodings.contains(&Encoding::Deflate) { + Some(Encoding::Deflate) + } else { + // TODO: Is this the correct behaviour? + None + } + } + + None => None + } + } } struct WrappedHttpResponse { @@ -268,8 +282,7 @@ pub enum LoadError { Ssl(Url, String), InvalidRedirect(Url, String), Decoding(Url, String), - MaxRedirects(Url), - UnsupportedContentEncodings(Url) + MaxRedirects(Url) } #[inline(always)] @@ -357,6 +370,7 @@ pub struct LoadResponse { pub metadata: Metadata } + impl Read for LoadResponse { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result { @@ -372,6 +386,29 @@ impl LoadResponse { fn new(m: Metadata, d: Decoders) -> LoadResponse { LoadResponse { metadata: m, decoder: d } } + + fn from_http_response(response: R, m: Metadata) -> Result, LoadError> { + match response.content_encoding() { + Some(Encoding::Gzip) => { + let result = GzDecoder::new(response); + match result { + Ok(response_decoding) => { + return Ok(LoadResponse::new(m, Decoders::Gzip(response_decoding))); + } + Err(err) => { + return Err(LoadError::Decoding(m.final_url, err.to_string())); + } + } + } + Some(Encoding::Deflate) => { + let response_decoding = DeflateDecoder::new(response); + return Ok(LoadResponse::new(m, Decoders::Deflate(response_decoding))); + } + _ => { + return Ok(LoadResponse::new(m, Decoders::Plain(response))); + } + } + } } enum Decoders { @@ -578,42 +615,7 @@ pub fn load(mut load_data: LoadData, net_event_response))).unwrap(); } - let selected_content_encoding = match response.headers().get::() { - Some(&ContentEncoding(ref encodings)) => { - if encodings.contains(&Encoding::Gzip) { - Some(Encoding::Gzip) - } else if encodings.contains(&Encoding::Deflate) { - Some(Encoding::Deflate) - } else { - return Err(LoadError::UnsupportedContentEncodings(url.clone())) - } - } - None => { - None - } - }; - - match selected_content_encoding { - Some(Encoding::Gzip) => { - let result = GzDecoder::new(response); - match result { - Ok(response_decoding) => { - return Ok(LoadResponse::new(metadata, Decoders::Gzip(response_decoding))); - } - Err(err) => { - return Err(LoadError::Decoding(metadata.final_url, err.to_string())); - } - } - } - Some(Encoding::Deflate) => { - let response_decoding = DeflateDecoder::new(response); - return Ok(LoadResponse::new(metadata, Decoders::Deflate(response_decoding))); - } - None => { - return Ok(LoadResponse::new(metadata, Decoders::Plain(response))); - } - _ => return Err(LoadError::UnsupportedContentEncodings(url.clone())) - } + return LoadResponse::from_http_response(response, metadata) } } diff --git a/components/servo/Cargo.lock b/components/servo/Cargo.lock index 3e880ad7a2e..f52014d7269 100644 --- a/components/servo/Cargo.lock +++ b/components/servo/Cargo.lock @@ -1053,6 +1053,7 @@ name = "net_tests" version = "0.0.1" dependencies = [ "cookie 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", "ipc-channel 0.1.0 (git+https://github.com/pcwalton/ipc-channel)", "net 0.0.1", diff --git a/tests/unit/net/Cargo.toml b/tests/unit/net/Cargo.toml index 62c72b975bc..fdd82f7fb6d 100644 --- a/tests/unit/net/Cargo.toml +++ b/tests/unit/net/Cargo.toml @@ -25,3 +25,4 @@ cookie = "0.1" hyper = "0.6" url = "0.2" time = "0.1" +flate2 = "0.2.0" diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 3e61ec71ed0..956a9b073e5 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -12,18 +12,19 @@ use hyper::method::Method; use hyper::http::RawStatus; use hyper::status::StatusCode; use hyper::header::{Headers, Location, ContentLength}; -use std::io::{self, Read}; -use std::cmp::{self}; +use std::io::{self, Write, Read, Cursor}; +use flate2::write::{GzEncoder, DeflateEncoder}; +use flate2::Compression; use std::borrow::Cow; fn respond_with(body: Vec) -> MockResponse { let mut headers = Headers::new(); - headers.set(ContentLength(body.len() as u64)); - - respond_with_headers(body, headers) + respond_with_headers(body, &mut headers) } -fn respond_with_headers(body: Vec, headers: Headers) -> MockResponse { +fn respond_with_headers(body: Vec, headers: &mut Headers) -> MockResponse { + headers.set(ContentLength(body.len() as u64)); + MockResponse::new( headers.clone(), StatusCode::Ok, @@ -40,7 +41,7 @@ fn read_response(reader: &mut Read) -> String { String::from_utf8(buf).unwrap() }, Ok(_) => "".to_string(), - _ => panic!("problem reading response") + Err(e) => panic!("problem reading response {}", e) } } @@ -48,23 +49,18 @@ struct MockResponse { h: Headers, sc: StatusCode, sr: RawStatus, - msg: Vec + msg: Cursor> } impl MockResponse { fn new(h: Headers, sc: StatusCode, sr: RawStatus, msg: Vec) -> MockResponse { - MockResponse { h: h, sc: sc, sr: sr, msg: msg } + MockResponse { h: h, sc: sc, sr: sr, msg: Cursor::new(msg) } } } impl Read for MockResponse { fn read(&mut self, buf: &mut [u8]) -> io::Result { - let buf_len = buf.len(); - for (a, b) in buf.iter_mut().zip(&self.msg[0 .. cmp::min(buf_len, self.msg.len())]) { - *a = *b - } - - Ok(cmp::min(buf.len(), self.msg.len())) + self.msg.read(buf) } } @@ -112,8 +108,8 @@ fn response_for_request_type(t: RequestType) -> Result RequestType::Text(b) => { Ok(respond_with(b)) }, - RequestType::WithHeaders(b, h) => { - Ok(respond_with_headers(b, h)) + RequestType::WithHeaders(b, mut h) => { + Ok(respond_with_headers(b, &mut h)) } } } @@ -212,6 +208,58 @@ impl HttpRequest for AssertMustHaveBodyRequest { } } +#[test] +fn test_load_should_decode_the_response_as_deflate_when_response_headers_have_content_encoding_deflate() { + struct Factory; + + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, _: Url, _: Method) -> Result { + let mut e = DeflateEncoder::new(Vec::new(), Compression::Default); + e.write(b"Yay!").unwrap(); + let encoded_content = e.finish().unwrap(); + + let mut headers = Headers::new(); + headers.set_raw("Content-Encoding", vec![b"deflate".to_vec()]); + Ok(MockRequest::new(RequestType::WithHeaders(encoded_content, headers))) + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let load_data = LoadData::new(url.clone(), None); + let mut response = load::(load_data, resource_mgr.clone(), None, &Factory).unwrap(); + + assert_eq!(read_response(&mut response), "Yay!"); +} + +#[test] +fn test_load_should_decode_the_response_as_gzip_when_response_headers_have_content_encoding_gzip() { + struct Factory; + + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, _: Url, _: Method) -> Result { + let mut e = GzEncoder::new(Vec::new(), Compression::Default); + e.write(b"Yay!").unwrap(); + let encoded_content = e.finish().unwrap(); + + let mut headers = Headers::new(); + headers.set_raw("Content-Encoding", vec![b"gzip".to_vec()]); + Ok(MockRequest::new(RequestType::WithHeaders(encoded_content, headers))) + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let load_data = LoadData::new(url.clone(), None); + let mut response = load::(load_data, resource_mgr.clone(), None, &Factory).unwrap(); + + assert_eq!(read_response(&mut response), "Yay!"); +} + #[test] fn test_load_doesnt_send_request_body_on_any_redirect() { struct Factory; @@ -494,7 +542,7 @@ fn test_load_follows_a_redirect() { let load_data = LoadData::new(url.clone(), None); match load::(load_data, resource_mgr, None, &Factory) { - Err(_) => panic!("expected to follow a redirect"), + Err(e) => panic!("expected to follow a redirect {:?}", e), Ok(mut lr) => { let response = read_response(&mut lr); assert_eq!(response, "Yay!".to_string()); diff --git a/tests/unit/net/lib.rs b/tests/unit/net/lib.rs index 203a554a5eb..8f7b0d1511f 100644 --- a/tests/unit/net/lib.rs +++ b/tests/unit/net/lib.rs @@ -11,6 +11,7 @@ extern crate url; extern crate util; extern crate time; extern crate hyper; +extern crate flate2; #[cfg(test)] mod cookie; #[cfg(test)] mod data_loader; From 04b7ce0afa7e2680b0d81d53bc75e75974e3312c Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 16 Aug 2015 17:34:34 +1000 Subject: [PATCH 21/43] Tests rewriting redirects of POST as GET --- components/net/http_loader.rs | 17 ++++++++++------- tests/unit/net/http_loader.rs | 26 ++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index d4a6b10f7b0..3abe0d89aec 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -417,7 +417,7 @@ enum Decoders { Plain(R) } -pub fn load(mut load_data: LoadData, +pub fn load(load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, request_factory: &HttpRequestFactory) @@ -433,6 +433,7 @@ pub fn load(mut load_data: LoadData, // specified in the hosts file. let mut url = replace_hosts(&load_data.url); let mut redirected_to = HashSet::new(); + let mut method = load_data.method.clone(); // If the URL is a view-source scheme then the scheme data contains the // real URL that should be used for which the source is to be viewed. @@ -488,11 +489,11 @@ pub fn load(mut load_data: LoadData, set_request_cookies(doc_url.clone(), &mut request_headers, &resource_mgr_chan); // --- Send the request - let mut req = try!(request_factory.create(url.clone(), load_data.method.clone())); + let mut req = try!(request_factory.create(url.clone(), method.clone())); *req.headers_mut() = request_headers; if log_enabled!(log::LogLevel::Info) { - info!("{}", load_data.method); + info!("{}", method); for header in req.headers_mut().iter() { info!(" - {}", header); } @@ -522,7 +523,7 @@ pub fn load(mut load_data: LoadData, let request_id = uuid::Uuid::new_v4().to_simple_string(); if let Some(ref chan) = devtools_chan { let net_event = NetworkEvent::HttpRequest(load_data.url.clone(), - load_data.method.clone(), + method.clone(), load_data.headers.clone(), load_data.data.clone()); chan.send(DevtoolsControlMsg::FromChrome( @@ -559,22 +560,24 @@ pub fn load(mut load_data: LoadData, } _ => {} } + let new_doc_url = match UrlParser::new().base_url(&doc_url).parse(&new_url) { Ok(u) => u, Err(e) => { return Err(LoadError::InvalidRedirect(doc_url, e.to_string())); } }; + info!("redirecting to {}", new_doc_url); url = replace_hosts(&new_doc_url); doc_url = new_doc_url; // According to https://tools.ietf.org/html/rfc7231#section-6.4.2, // historically UAs have rewritten POST->GET on 301 and 302 responses. - if load_data.method == Method::Post && + if method == Method::Post && (response.status() == StatusCode::MovedPermanently || response.status() == StatusCode::Found) { - load_data.method = Method::Get; + method = Method::Get; } if redirected_to.contains(&url) { @@ -593,8 +596,8 @@ pub fn load(mut load_data: LoadData, if viewing_source { adjusted_headers.set(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec![]))); } - let mut metadata: Metadata = Metadata::default(doc_url.clone()); + let mut metadata: Metadata = Metadata::default(doc_url.clone()); metadata.set_content_type(match adjusted_headers.get() { Some(&ContentType(ref mime)) => Some(mime), None => None diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 956a9b073e5..d928f6d1eaf 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -208,6 +208,32 @@ impl HttpRequest for AssertMustHaveBodyRequest { } } +#[test] +fn test_load_when_redirecting_from_a_post_should_rewrite_next_request_as_get() { + struct Factory; + + impl HttpRequestFactory for Factory { + type R=MockRequest; + + fn create(&self, url: Url, method: Method) -> Result { + if url.domain().unwrap() == "mozilla.com" { + assert_eq!(Method::Post, method); + Ok(MockRequest::new(RequestType::Redirect("http://mozilla.org".to_string()))) + } else { + assert_eq!(Method::Get, method); + Ok(MockRequest::new(RequestType::Text(<[_]>::to_vec("Yay!".as_bytes())))) + } + } + } + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task(None, None); + let mut load_data = LoadData::new(url.clone(), None); + load_data.method = Method::Post; + + let _ = load::(load_data, resource_mgr, None, &Factory); +} + #[test] fn test_load_should_decode_the_response_as_deflate_when_response_headers_have_content_encoding_deflate() { struct Factory; From 9322954f152fdec98c6419255afb207d4730ae4d Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 16 Aug 2015 17:50:37 +1000 Subject: [PATCH 22/43] Moves devtools messaging to functions --- components/net/http_loader.rs | 67 +++++++++++++++++++++++------------ 1 file changed, 45 insertions(+), 22 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 3abe0d89aec..6d83d34eaa8 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -331,6 +331,7 @@ fn set_cookies_from_response(url: Url, response: &HttpResponse, resource_mgr_cha } } +#[inline(always)] fn request_must_be_secured(url: &Url, resource_mgr_chan: &IpcSender) -> bool { let (tx, rx) = ipc::channel().unwrap(); resource_mgr_chan.send( @@ -417,6 +418,37 @@ enum Decoders { Plain(R) } +#[inline(always)] +fn send_request_to_devtools( + devtools_chan: Option>, request_id: String, + url: Url, method: Method, headers: Headers, body: Option>) { + + if let Some(ref chan) = devtools_chan { + let net_event = NetworkEvent::HttpRequest(url, method, headers, body); + chan.send(DevtoolsControlMsg::FromChrome( + ChromeToDevtoolsControlMsg::NetworkEvent( + request_id.clone(), + net_event + ) + ) + ).unwrap(); + } +} + +#[inline(always)] +fn send_response_to_devtools( + devtools_chan: Option>, request_id: String, + headers: Option, status: Option) { + if let Some(ref chan) = devtools_chan { + let net_event_response = + NetworkEvent::HttpResponse(headers.clone(), + status.clone(), + None); + chan.send(DevtoolsControlMsg::FromChrome( + ChromeToDevtoolsControlMsg::NetworkEvent(request_id, + net_event_response))).unwrap(); + } +} pub fn load(load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, @@ -488,7 +520,6 @@ pub fn load(load_data: LoadData, set_default_accept_encoding(&mut request_headers); set_request_cookies(doc_url.clone(), &mut request_headers, &resource_mgr_chan); - // --- Send the request let mut req = try!(request_factory.create(url.clone(), method.clone())); *req.headers_mut() = request_headers; @@ -517,19 +548,15 @@ pub fn load(load_data: LoadData, } }; - // --- Tell devtools we've made a request - // Send an HttpRequest message to devtools with a unique request_id - // TODO: Do this only if load_data has some pipeline_id, and send the pipeline_id in the message let request_id = uuid::Uuid::new_v4().to_simple_string(); - if let Some(ref chan) = devtools_chan { - let net_event = NetworkEvent::HttpRequest(load_data.url.clone(), - method.clone(), - load_data.headers.clone(), - load_data.data.clone()); - chan.send(DevtoolsControlMsg::FromChrome( - ChromeToDevtoolsControlMsg::NetworkEvent(request_id.clone(), - net_event))).unwrap(); - } + + // TODO: Do this only if load_data has some pipeline_id, and send the pipeline_id in the + // message + send_request_to_devtools( + devtools_chan.clone(), request_id.clone(), url.clone(), + method.clone(), load_data.headers.clone(), + if is_redirected_request { None } else { load_data.data.clone() } + ); info!("got HTTP response {}, headers:", response.status()); if log_enabled!(log::LogLevel::Info) { @@ -608,15 +635,11 @@ pub fn load(load_data: LoadData, // --- Tell devtools that we got a response // Send an HttpResponse message to devtools with the corresponding request_id // TODO: Send this message only if load_data has a pipeline_id that is not None - if let Some(ref chan) = devtools_chan { - let net_event_response = - NetworkEvent::HttpResponse(metadata.headers.clone(), - metadata.status.clone(), - None); - chan.send(DevtoolsControlMsg::FromChrome( - ChromeToDevtoolsControlMsg::NetworkEvent(request_id, - net_event_response))).unwrap(); - } + // TODO: Send this message even when the load fails? + send_response_to_devtools( + devtools_chan.clone(), request_id.clone(), + metadata.headers.clone(), metadata.status.clone() + ); return LoadResponse::from_http_response(response, metadata) } From 979382650f7ba3f11ef5241b5e01b03cb836985f Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 16 Aug 2015 17:58:48 +1000 Subject: [PATCH 23/43] Removes nossl --- components/util/opts.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/components/util/opts.rs b/components/util/opts.rs index 10a2bd84a78..445b4abc280 100644 --- a/components/util/opts.rs +++ b/components/util/opts.rs @@ -56,8 +56,6 @@ pub struct Opts { pub nonincremental_layout: bool, - pub nossl: bool, - /// Where to load userscripts from, if any. An empty string will load from /// the resources/user-agent-js directory, and if the option isn't passed userscripts /// won't be loaded @@ -383,7 +381,6 @@ pub fn default_opts() -> Opts { mem_profiler_period: None, layout_threads: 1, nonincremental_layout: false, - nossl: false, userscripts: None, user_stylesheets: Vec::new(), output_file: None, @@ -532,7 +529,6 @@ pub fn from_cmdline_args(args: &[String]) { }; let nonincremental_layout = opt_match.opt_present("i"); - let nossl = opt_match.opt_present("no-ssl"); let mut bubble_inline_sizes_separately = debug_options.bubble_widths; if debug_options.trace_layout { @@ -588,7 +584,6 @@ pub fn from_cmdline_args(args: &[String]) { mem_profiler_period: mem_profiler_period, layout_threads: layout_threads, nonincremental_layout: nonincremental_layout, - nossl: nossl, userscripts: opt_match.opt_default("userscripts", ""), user_stylesheets: user_stylesheets, output_file: opt_match.opt_str("o"), From d53af0d9ed43a9434b9f86b2c4d2869a30c55ff2 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 23 Aug 2015 10:46:47 +1200 Subject: [PATCH 24/43] Fixes merge errors --- components/net/http_loader.rs | 17 ++--------------- tests/unit/net/http_loader.rs | 32 ++++++++++++++++---------------- 2 files changed, 18 insertions(+), 31 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 6d83d34eaa8..7ca359e9ecb 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -3,26 +3,18 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, NetworkEvent}; -use hsts::{HSTSList, secure_url}; +use hsts::secure_url; use mime_classifier::MIMEClassifier; use net_traits::ProgressMsg::{Payload, Done}; use net_traits::hosts::replace_hosts; use net_traits::{ControlMsg, CookieSource, LoadData, Metadata, LoadConsumer, IncludeSubdomains}; use resource_task::{start_sending_opt, start_sending_sniffed_opt}; -use hsts::secure_url; -use file_loader; use file_loader; -use ipc_channel::ipc::{self, IpcSender}; -use log; use std::collections::HashSet; use flate2::read::{DeflateDecoder, GzDecoder}; -use hyper::Error as HttpError; -use hyper::client::Request; -use hyper::header::StrictTransportSecurity; -use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host, Location, qitem, Quality, QualityItem}; -use hyper::client::{Request, Response}; use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host}; +use hyper::client::{Request, Response}; use hyper::header::{Location, qitem, StrictTransportSecurity}; use hyper::header::{Quality, QualityItem, Headers, ContentEncoding, Encoding}; use hyper::Error as HttpError; @@ -34,22 +26,17 @@ use hyper::status::{StatusCode, StatusClass}; use ipc_channel::ipc::{self, IpcSender}; use log; use openssl::ssl::{SslContext, SslMethod, SSL_VERIFY_PEER}; -use std::collections::HashSet; use std::error::Error; use std::io::{self, Read, Write}; use std::sync::Arc; use std::sync::mpsc::{Sender, channel}; -use util::task::spawn_named; -use util::resource_files::resources_dir_path; use url::{Url, UrlParser}; -use util::opts; use util::resource_files::resources_dir_path; use util::task::spawn_named; use std::borrow::ToOwned; use std::boxed::FnBox; use uuid; -use std::fs::File; pub fn factory(resource_mgr_chan: IpcSender, devtools_chan: Option>) diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index d928f6d1eaf..46c95330b0b 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -227,7 +227,7 @@ fn test_load_when_redirecting_from_a_post_should_rewrite_next_request_as_get() { } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let mut load_data = LoadData::new(url.clone(), None); load_data.method = Method::Post; @@ -253,7 +253,7 @@ fn test_load_should_decode_the_response_as_deflate_when_response_headers_have_co } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); let mut response = load::(load_data, resource_mgr.clone(), None, &Factory).unwrap(); @@ -279,7 +279,7 @@ fn test_load_should_decode_the_response_as_gzip_when_response_headers_have_conte } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); let mut response = load::(load_data, resource_mgr.clone(), None, &Factory).unwrap(); @@ -313,7 +313,7 @@ fn test_load_doesnt_send_request_body_on_any_redirect() { } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Body on POST!".as_bytes())); @@ -337,7 +337,7 @@ fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_ar } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); @@ -365,7 +365,7 @@ fn test_load_adds_host_to_sts_list_when_url_is_https_and_sts_headers_are_present } let url = Url::parse("https://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); @@ -393,7 +393,7 @@ fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_ } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); assert_cookie_for_domain(&resource_mgr, "http://mozilla.com", ""); let load_data = LoadData::new(url.clone(), None); @@ -406,7 +406,7 @@ fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_ #[test] fn test_load_sets_requests_cookies_header_for_url_by_getting_cookies_from_the_resource_manager() { let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); resource_mgr.send(ControlMsg::SetCookiesForUrl(Url::parse("http://mozilla.com").unwrap(), "mozillaIs=theBest".to_string(), CookieSource::HTTP)).unwrap(); @@ -430,7 +430,7 @@ fn test_load_sets_content_length_to_length_of_request_body() { let content = "This is a request body"; let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec(content.as_bytes())); @@ -455,7 +455,7 @@ fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { ); let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); @@ -471,7 +471,7 @@ fn test_load_sets_default_accept_encoding_to_gzip_and_deflate() { accept_encoding_headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); @@ -500,7 +500,7 @@ fn test_load_errors_when_there_a_redirect_loop() { } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); match load::(load_data, resource_mgr, None, &Factory) { @@ -528,7 +528,7 @@ fn test_load_errors_when_there_is_too_many_redirects() { } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); match load::(load_data, resource_mgr, None, &Factory) { @@ -564,7 +564,7 @@ fn test_load_follows_a_redirect() { } let url = Url::parse("http://mozilla.com").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); match load::(load_data, resource_mgr, None, &Factory) { @@ -589,7 +589,7 @@ impl HttpRequestFactory for DontConnectFactory { #[test] fn test_load_errors_when_scheme_is_not_http_or_https() { let url = Url::parse("ftp://not-supported").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); match load::(load_data, resource_mgr, None, &DontConnectFactory) { @@ -601,7 +601,7 @@ fn test_load_errors_when_scheme_is_not_http_or_https() { #[test] fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_https() { let url = Url::parse("view-source:ftp://not-supported").unwrap(); - let resource_mgr = new_resource_task(None, None); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); let load_data = LoadData::new(url.clone(), None); match load::(load_data, resource_mgr, None, &DontConnectFactory) { From 94284fc47f74033de16680f90371ef3992cb1984 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 23 Aug 2015 10:48:53 +1200 Subject: [PATCH 25/43] Fixes new tidy problems --- components/net/http_loader.rs | 27 +++++++++--------- tests/unit/net/http_loader.rs | 52 +++++++++++++++++------------------ 2 files changed, 39 insertions(+), 40 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 7ca359e9ecb..863ed8731ec 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -2,30 +2,32 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, NetworkEvent}; -use hsts::secure_url; -use mime_classifier::MIMEClassifier; -use net_traits::ProgressMsg::{Payload, Done}; -use net_traits::hosts::replace_hosts; -use net_traits::{ControlMsg, CookieSource, LoadData, Metadata, LoadConsumer, IncludeSubdomains}; -use resource_task::{start_sending_opt, start_sending_sniffed_opt}; +use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, NetworkEvent}; use file_loader; -use std::collections::HashSet; use flate2::read::{DeflateDecoder, GzDecoder}; -use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host}; +use hsts::secure_url; +use hyper::Error as HttpError; use hyper::client::{Request, Response}; +use hyper::header::{AcceptEncoding, Accept, ContentLength, ContentType, Host}; use hyper::header::{Location, qitem, StrictTransportSecurity}; use hyper::header::{Quality, QualityItem, Headers, ContentEncoding, Encoding}; -use hyper::Error as HttpError; -use hyper::method::Method; use hyper::http::RawStatus; +use hyper::method::Method; use hyper::mime::{Mime, TopLevel, SubLevel}; use hyper::net::{Fresh, HttpsConnector, Openssl}; use hyper::status::{StatusCode, StatusClass}; use ipc_channel::ipc::{self, IpcSender}; use log; +use mime_classifier::MIMEClassifier; +use net_traits::ProgressMsg::{Payload, Done}; +use net_traits::hosts::replace_hosts; +use net_traits::{ControlMsg, CookieSource, LoadData, Metadata, LoadConsumer, IncludeSubdomains}; use openssl::ssl::{SslContext, SslMethod, SSL_VERIFY_PEER}; +use resource_task::{start_sending_opt, start_sending_sniffed_opt}; +use std::borrow::ToOwned; +use std::boxed::FnBox; +use std::collections::HashSet; use std::error::Error; use std::io::{self, Read, Write}; use std::sync::Arc; @@ -33,9 +35,6 @@ use std::sync::mpsc::{Sender, channel}; use url::{Url, UrlParser}; use util::resource_files::resources_dir_path; use util::task::spawn_named; - -use std::borrow::ToOwned; -use std::boxed::FnBox; use uuid; pub fn factory(resource_mgr_chan: IpcSender, diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 46c95330b0b..c7139554871 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -2,20 +2,20 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +use flate2::Compression; +use flate2::write::{GzEncoder, DeflateEncoder}; +use hyper::header::{Headers, Location, ContentLength}; +use hyper::http::RawStatus; +use hyper::method::Method; +use hyper::status::StatusCode; +use ipc_channel::ipc; use net::http_loader::{load, LoadError, HttpRequestFactory, HttpRequest, HttpResponse}; use net::resource_task::new_resource_task; -use net_traits::{ResourceTask, ControlMsg, CookieSource}; -use url::Url; -use ipc_channel::ipc; use net_traits::LoadData; -use hyper::method::Method; -use hyper::http::RawStatus; -use hyper::status::StatusCode; -use hyper::header::{Headers, Location, ContentLength}; -use std::io::{self, Write, Read, Cursor}; -use flate2::write::{GzEncoder, DeflateEncoder}; -use flate2::Compression; +use net_traits::{ResourceTask, ControlMsg, CookieSource}; use std::borrow::Cow; +use std::io::{self, Write, Read, Cursor}; +use url::Url; fn respond_with(body: Vec) -> MockResponse { let mut headers = Headers::new(); @@ -115,7 +115,7 @@ fn response_for_request_type(t: RequestType) -> Result } impl HttpRequest for MockRequest { - type R=MockResponse; + type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.headers } @@ -137,7 +137,7 @@ impl AssertMustHaveHeadersRequest { } impl HttpRequest for AssertMustHaveHeadersRequest { - type R=MockResponse; + type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.request_headers } @@ -160,7 +160,7 @@ struct AssertMustHaveHeadersRequestFactory { } impl HttpRequestFactory for AssertMustHaveHeadersRequestFactory { - type R=AssertMustHaveHeadersRequest; + type R = AssertMustHaveHeadersRequest; fn create(&self, _: Url, _: Method) -> Result { Ok( @@ -197,7 +197,7 @@ impl AssertMustHaveBodyRequest { } impl HttpRequest for AssertMustHaveBodyRequest { - type R=MockResponse; + type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.headers} @@ -213,7 +213,7 @@ fn test_load_when_redirecting_from_a_post_should_rewrite_next_request_as_get() { struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, url: Url, method: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { @@ -239,7 +239,7 @@ fn test_load_should_decode_the_response_as_deflate_when_response_headers_have_co struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result { let mut e = DeflateEncoder::new(Vec::new(), Compression::Default); @@ -265,7 +265,7 @@ fn test_load_should_decode_the_response_as_gzip_when_response_headers_have_conte struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result { let mut e = GzEncoder::new(Vec::new(), Compression::Default); @@ -291,7 +291,7 @@ fn test_load_doesnt_send_request_body_on_any_redirect() { struct Factory; impl HttpRequestFactory for Factory { - type R=AssertMustHaveBodyRequest; + type R = AssertMustHaveBodyRequest; fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { @@ -326,7 +326,7 @@ fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_ar struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result { let content = <[_]>::to_vec("Yay!".as_bytes()); @@ -354,7 +354,7 @@ fn test_load_adds_host_to_sts_list_when_url_is_https_and_sts_headers_are_present struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result { let content = <[_]>::to_vec("Yay!".as_bytes()); @@ -382,7 +382,7 @@ fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_ struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, _: Url, _: Method) -> Result { let content = <[_]>::to_vec("Yay!".as_bytes()); @@ -434,7 +434,7 @@ fn test_load_sets_content_length_to_length_of_request_body() { let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec(content.as_bytes())); - let mut content_len_headers= Headers::new(); + let mut content_len_headers = Headers::new(); content_len_headers.set_raw( "Content-Length".to_owned(), vec![<[_]>::to_vec(&*format!("{}", content.len()).as_bytes())] ); @@ -486,7 +486,7 @@ fn test_load_errors_when_there_a_redirect_loop() { struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { @@ -516,7 +516,7 @@ fn test_load_errors_when_there_is_too_many_redirects() { struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { @@ -544,7 +544,7 @@ fn test_load_follows_a_redirect() { struct Factory; impl HttpRequestFactory for Factory { - type R=MockRequest; + type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { @@ -579,7 +579,7 @@ fn test_load_follows_a_redirect() { struct DontConnectFactory; impl HttpRequestFactory for DontConnectFactory { - type R=MockRequest; + type R = MockRequest; fn create(&self, url: Url, _: Method) -> Result { Err(LoadError::Connection(url, "should not have connected".to_string())) From 6de61301f4b38d1fea1f3c6c7aa82c3bca27f11d Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Thu, 27 Aug 2015 10:13:38 +1200 Subject: [PATCH 26/43] Removes inline'ing --- components/net/http_loader.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 863ed8731ec..bf0a2a4072e 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -271,14 +271,12 @@ pub enum LoadError { MaxRedirects(Url) } -#[inline(always)] fn set_default_accept_encoding(headers: &mut Headers) { if !headers.has::() { headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); } } -#[inline(always)] fn set_default_accept(headers: &mut Headers) { if !headers.has::() { let accept = Accept(vec![ @@ -291,7 +289,6 @@ fn set_default_accept(headers: &mut Headers) { } } -#[inline(always)] fn set_request_cookies(url: Url, headers: &mut Headers, resource_mgr_chan: &IpcSender) { let (tx, rx) = ipc::channel().unwrap(); resource_mgr_chan.send(ControlMsg::GetCookiesForUrl(url.clone(), @@ -304,7 +301,6 @@ fn set_request_cookies(url: Url, headers: &mut Headers, resource_mgr_chan: &IpcS } } -#[inline(always)] fn set_cookies_from_response(url: Url, response: &HttpResponse, resource_mgr_chan: &IpcSender) { if let Some(cookies) = response.headers().get_raw("set-cookie") { for cookie in cookies.iter() { @@ -317,7 +313,6 @@ fn set_cookies_from_response(url: Url, response: &HttpResponse, resource_mgr_cha } } -#[inline(always)] fn request_must_be_secured(url: &Url, resource_mgr_chan: &IpcSender) -> bool { let (tx, rx) = ipc::channel().unwrap(); resource_mgr_chan.send( @@ -327,7 +322,6 @@ fn request_must_be_secured(url: &Url, resource_mgr_chan: &IpcSender) rx.recv().unwrap() } -#[inline(always)] fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mgr_chan: &IpcSender) { if url.scheme == "https" { if let Some(header) = response.headers().get::() { @@ -404,7 +398,6 @@ enum Decoders { Plain(R) } -#[inline(always)] fn send_request_to_devtools( devtools_chan: Option>, request_id: String, url: Url, method: Method, headers: Headers, body: Option>) { @@ -421,7 +414,6 @@ fn send_request_to_devtools( } } -#[inline(always)] fn send_response_to_devtools( devtools_chan: Option>, request_id: String, headers: Option, status: Option) { From 84ae53e011cdf7462179774f256c2efbe3d01c78 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Thu, 27 Aug 2015 18:31:49 +1200 Subject: [PATCH 27/43] Indents arguments to be more readable --- components/net/http_loader.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index bf0a2a4072e..464e7bc54b1 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -80,10 +80,10 @@ fn inner_url(url: &Url) -> Url { } fn load_for_consumer(load_data: LoadData, - start_chan: LoadConsumer, - classifier: Arc, - resource_mgr_chan: IpcSender, - devtools_chan: Option>) { + start_chan: LoadConsumer, + classifier: Arc, + resource_mgr_chan: IpcSender, + devtools_chan: Option>) { match load::(load_data, resource_mgr_chan, devtools_chan, &NetworkHttpRequestFactory) { Err(LoadError::UnsupportedScheme(url)) => { From 1811ffa17874461811d7b8f15038b087458011e3 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Thu, 27 Aug 2015 18:39:15 +1200 Subject: [PATCH 28/43] Uses `and_then` instead of more complicated matching --- components/net/http_loader.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 464e7bc54b1..6bf324949ca 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -124,20 +124,20 @@ pub trait HttpResponse: Read { fn status_raw(&self) -> &RawStatus; fn content_encoding(&self) -> Option { - match self.headers().get::() { - Some(&ContentEncoding(ref encodings)) => { - if encodings.contains(&Encoding::Gzip) { - Some(Encoding::Gzip) - } else if encodings.contains(&Encoding::Deflate) { - Some(Encoding::Deflate) - } else { - // TODO: Is this the correct behaviour? - None + self.headers().get::().and_then(|h| { + match h { + &ContentEncoding(ref encodings) => { + if encodings.contains(&Encoding::Gzip) { + Some(Encoding::Gzip) + } else if encodings.contains(&Encoding::Deflate) { + Some(Encoding::Deflate) + } else { + // TODO: Is this the correct behaviour? + None + } } } - - None => None - } + }) } } From d8acb893dea85c8b64993f6737df339a50709f0a Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Thu, 27 Aug 2015 18:40:08 +1200 Subject: [PATCH 29/43] Inverts conditional and returns --- components/net/http_loader.rs | 42 +++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 6bf324949ca..f1d1183e663 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -272,9 +272,11 @@ pub enum LoadError { } fn set_default_accept_encoding(headers: &mut Headers) { - if !headers.has::() { - headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); + if headers.has::() { + return } + + headers.set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]); } fn set_default_accept(headers: &mut Headers) { @@ -323,25 +325,27 @@ fn request_must_be_secured(url: &Url, resource_mgr_chan: &IpcSender) } fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mgr_chan: &IpcSender) { - if url.scheme == "https" { - if let Some(header) = response.headers().get::() { - if let Some(host) = url.domain() { - info!("adding host {} to the strict transport security list", host); - info!("- max-age {}", header.max_age); + if url.scheme != "https" { + return; + } - let include_subdomains = if header.include_subdomains { - info!("- includeSubdomains"); - IncludeSubdomains::Included - } else { - IncludeSubdomains::NotIncluded - }; + if let Some(header) = response.headers().get::() { + if let Some(host) = url.domain() { + info!("adding host {} to the strict transport security list", host); + info!("- max-age {}", header.max_age); - resource_mgr_chan.send( - ControlMsg::SetHSTSEntryForHost( - host.to_string(), include_subdomains, header.max_age - ) - ).unwrap(); - } + let include_subdomains = if header.include_subdomains { + info!("- includeSubdomains"); + IncludeSubdomains::Included + } else { + IncludeSubdomains::NotIncluded + }; + + resource_mgr_chan.send( + ControlMsg::SetHSTSEntryForHost( + host.to_string(), include_subdomains, header.max_age + ) + ).unwrap(); } } } From 918f8a77604552693a73403c828219e8f4fad268 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Thu, 27 Aug 2015 18:41:47 +1200 Subject: [PATCH 30/43] Removes unnecessary clone --- components/net/http_loader.rs | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index f1d1183e663..df82b538b30 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -293,9 +293,7 @@ fn set_default_accept(headers: &mut Headers) { fn set_request_cookies(url: Url, headers: &mut Headers, resource_mgr_chan: &IpcSender) { let (tx, rx) = ipc::channel().unwrap(); - resource_mgr_chan.send(ControlMsg::GetCookiesForUrl(url.clone(), - tx, - CookieSource::HTTP)).unwrap(); + resource_mgr_chan.send(ControlMsg::GetCookiesForUrl(url, tx, CookieSource::HTTP)).unwrap(); if let Some(cookie_list) = rx.recv().unwrap() { let mut v = Vec::new(); v.push(cookie_list.into_bytes()); @@ -409,12 +407,8 @@ fn send_request_to_devtools( if let Some(ref chan) = devtools_chan { let net_event = NetworkEvent::HttpRequest(url, method, headers, body); chan.send(DevtoolsControlMsg::FromChrome( - ChromeToDevtoolsControlMsg::NetworkEvent( - request_id.clone(), - net_event - ) - ) - ).unwrap(); + ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event) + )).unwrap(); } } @@ -422,10 +416,7 @@ fn send_response_to_devtools( devtools_chan: Option>, request_id: String, headers: Option, status: Option) { if let Some(ref chan) = devtools_chan { - let net_event_response = - NetworkEvent::HttpResponse(headers.clone(), - status.clone(), - None); + let net_event_response = NetworkEvent::HttpResponse(headers, status, None); chan.send(DevtoolsControlMsg::FromChrome( ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event_response))).unwrap(); @@ -619,7 +610,7 @@ pub fn load(load_data: LoadData, // TODO: Send this message only if load_data has a pipeline_id that is not None // TODO: Send this message even when the load fails? send_response_to_devtools( - devtools_chan.clone(), request_id.clone(), + devtools_chan, request_id, metadata.headers.clone(), metadata.status.clone() ); From 04c012dfbfd4e04c53991e0dc31eda9ea1a8fb0d Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Thu, 27 Aug 2015 18:47:11 +1200 Subject: [PATCH 31/43] Binds name instead of inlining value for readability --- components/net/http_loader.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index df82b538b30..59ed5773ceb 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -339,11 +339,13 @@ fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mg IncludeSubdomains::NotIncluded }; - resource_mgr_chan.send( - ControlMsg::SetHSTSEntryForHost( - host.to_string(), include_subdomains, header.max_age - ) - ).unwrap(); + let msg = ControlMsg::SetHSTSEntryForHost( + host.to_string(), + include_subdomains, + header.max_age + ); + + resource_mgr_chan.send(msg).unwrap(); } } } From 3b8bada5a10daa7410fe11fb0dbf653b5ac91356 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 08:58:09 +1000 Subject: [PATCH 32/43] Fixes code review nits --- components/net/http_loader.rs | 48 +++++++++++++++++++---------------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 59ed5773ceb..90cfb25862d 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -351,7 +351,7 @@ fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mg } pub struct LoadResponse { - decoder: Decoders, + decoder: Decoder, pub metadata: Metadata } @@ -360,15 +360,15 @@ impl Read for LoadResponse { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result { match self.decoder { - Decoders::Gzip(ref mut d) => d.read(buf), - Decoders::Deflate(ref mut d) => d.read(buf), - Decoders::Plain(ref mut d) => d.read(buf) + Decoder::Gzip(ref mut d) => d.read(buf), + Decoder::Deflate(ref mut d) => d.read(buf), + Decoder::Plain(ref mut d) => d.read(buf) } } } impl LoadResponse { - fn new(m: Metadata, d: Decoders) -> LoadResponse { + fn new(m: Metadata, d: Decoder) -> LoadResponse { LoadResponse { metadata: m, decoder: d } } @@ -378,7 +378,7 @@ impl LoadResponse { let result = GzDecoder::new(response); match result { Ok(response_decoding) => { - return Ok(LoadResponse::new(m, Decoders::Gzip(response_decoding))); + return Ok(LoadResponse::new(m, Decoder::Gzip(response_decoding))); } Err(err) => { return Err(LoadError::Decoding(m.final_url, err.to_string())); @@ -387,41 +387,45 @@ impl LoadResponse { } Some(Encoding::Deflate) => { let response_decoding = DeflateDecoder::new(response); - return Ok(LoadResponse::new(m, Decoders::Deflate(response_decoding))); + return Ok(LoadResponse::new(m, Decoder::Deflate(response_decoding))); } _ => { - return Ok(LoadResponse::new(m, Decoders::Plain(response))); + return Ok(LoadResponse::new(m, Decoder::Plain(response))); } } } } -enum Decoders { +enum Decoder { Gzip(GzDecoder), Deflate(DeflateDecoder), Plain(R) } -fn send_request_to_devtools( - devtools_chan: Option>, request_id: String, - url: Url, method: Method, headers: Headers, body: Option>) { +fn send_request_to_devtools(devtools_chan: Option>, + request_id: String, + url: Url, + method: Method, + headers: Headers, + body: Option>) { if let Some(ref chan) = devtools_chan { let net_event = NetworkEvent::HttpRequest(url, method, headers, body); - chan.send(DevtoolsControlMsg::FromChrome( - ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event) - )).unwrap(); + + let msg = ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event); + chan.send(DevtoolsControlMsg::FromChrome(msg)).unwrap(); } } -fn send_response_to_devtools( - devtools_chan: Option>, request_id: String, - headers: Option, status: Option) { +fn send_response_to_devtools(devtools_chan: Option>, + request_id: String, + headers: Option, + status: Option) { if let Some(ref chan) = devtools_chan { let net_event_response = NetworkEvent::HttpResponse(headers, status, None); - chan.send(DevtoolsControlMsg::FromChrome( - ChromeToDevtoolsControlMsg::NetworkEvent(request_id, - net_event_response))).unwrap(); + + let msg = ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event_response); + chan.send(DevtoolsControlMsg::FromChrome(msg)).unwrap(); } } pub fn load(load_data: LoadData, @@ -511,7 +515,7 @@ pub fn load(load_data: LoadData, // TODO - This is the wrong behaviour according to the RFC. However, I'm not // sure how much "correctness" vs. real-world is important in this case. // - // http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html + // https://tools.ietf.org/html/rfc7231#section-6.4 let is_redirected_request = iters != 1; let response = match load_data.data { Some(ref data) if !is_redirected_request => { From a1fd2353471c00385bdb0636b45e6d184a525118 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 09:02:33 +1000 Subject: [PATCH 33/43] Moves devtools request msg logic to where the request is sent --- components/net/http_loader.rs | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 90cfb25862d..2c8a0470c3d 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -517,26 +517,32 @@ pub fn load(load_data: LoadData, // // https://tools.ietf.org/html/rfc7231#section-6.4 let is_redirected_request = iters != 1; + let request_id = uuid::Uuid::new_v4().to_simple_string(); let response = match load_data.data { Some(ref data) if !is_redirected_request => { req.headers_mut().set(ContentLength(data.len() as u64)); + + // TODO: Do this only if load_data has some pipeline_id, and send the pipeline_id + // in the message + send_request_to_devtools( + devtools_chan.clone(), request_id.clone(), url.clone(), + method.clone(), load_data.headers.clone(), + load_data.data.clone() + ); + try!(req.send(&load_data.data)) } _ => { + send_request_to_devtools( + devtools_chan.clone(), request_id.clone(), url.clone(), + method.clone(), load_data.headers.clone(), + None + ); + try!(req.send(&None)) } }; - let request_id = uuid::Uuid::new_v4().to_simple_string(); - - // TODO: Do this only if load_data has some pipeline_id, and send the pipeline_id in the - // message - send_request_to_devtools( - devtools_chan.clone(), request_id.clone(), url.clone(), - method.clone(), load_data.headers.clone(), - if is_redirected_request { None } else { load_data.data.clone() } - ); - info!("got HTTP response {}, headers:", response.status()); if log_enabled!(log::LogLevel::Info) { for header in response.headers().iter() { From 667b563f2ed7b18830fbb5aa2def6d31e3304867 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 09:09:41 +1000 Subject: [PATCH 34/43] Renames LoadResponse --- components/net/http_loader.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 2c8a0470c3d..d34184fee2a 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -350,13 +350,13 @@ fn update_sts_list_from_response(url: &Url, response: &HttpResponse, resource_mg } } -pub struct LoadResponse { +pub struct StreamedResponse { decoder: Decoder, pub metadata: Metadata } -impl Read for LoadResponse { +impl Read for StreamedResponse { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result { match self.decoder { @@ -367,18 +367,18 @@ impl Read for LoadResponse { } } -impl LoadResponse { - fn new(m: Metadata, d: Decoder) -> LoadResponse { - LoadResponse { metadata: m, decoder: d } +impl StreamedResponse { + fn new(m: Metadata, d: Decoder) -> StreamedResponse { + StreamedResponse { metadata: m, decoder: d } } - fn from_http_response(response: R, m: Metadata) -> Result, LoadError> { + fn from_http_response(response: R, m: Metadata) -> Result, LoadError> { match response.content_encoding() { Some(Encoding::Gzip) => { let result = GzDecoder::new(response); match result { Ok(response_decoding) => { - return Ok(LoadResponse::new(m, Decoder::Gzip(response_decoding))); + return Ok(StreamedResponse::new(m, Decoder::Gzip(response_decoding))); } Err(err) => { return Err(LoadError::Decoding(m.final_url, err.to_string())); @@ -387,10 +387,10 @@ impl LoadResponse { } Some(Encoding::Deflate) => { let response_decoding = DeflateDecoder::new(response); - return Ok(LoadResponse::new(m, Decoder::Deflate(response_decoding))); + return Ok(StreamedResponse::new(m, Decoder::Deflate(response_decoding))); } _ => { - return Ok(LoadResponse::new(m, Decoder::Plain(response))); + return Ok(StreamedResponse::new(m, Decoder::Plain(response))); } } } @@ -432,7 +432,7 @@ pub fn load(load_data: LoadData, resource_mgr_chan: IpcSender, devtools_chan: Option>, request_factory: &HttpRequestFactory) - -> Result, LoadError> where A: HttpRequest + 'static { + -> Result, LoadError> where A: HttpRequest + 'static { // FIXME: At the time of writing this FIXME, servo didn't have any central // location for configuration. If you're reading this and such a // repository DOES exist, please update this constant to use it. @@ -626,7 +626,7 @@ pub fn load(load_data: LoadData, metadata.headers.clone(), metadata.status.clone() ); - return LoadResponse::from_http_response(response, metadata) + return StreamedResponse::from_http_response(response, metadata) } } From ac0e4b433752da09be768d0e299aa99c4e59fa7b Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 09:09:51 +1000 Subject: [PATCH 35/43] Renames RequestType --- tests/unit/net/http_loader.rs | 52 +++++++++++++++++------------------ 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index c7139554871..accf30cb594 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -83,7 +83,7 @@ fn redirect_to(host: String) -> MockResponse { } -enum RequestType { +enum ResponseType { Redirect(String), Text(Vec), WithHeaders(Vec, Headers) @@ -91,24 +91,24 @@ enum RequestType { struct MockRequest { headers: Headers, - t: RequestType + t: ResponseType } impl MockRequest { - fn new(t: RequestType) -> MockRequest { + fn new(t: ResponseType) -> MockRequest { MockRequest { headers: Headers::new(), t: t } } } -fn response_for_request_type(t: RequestType) -> Result { +fn response_for_request_type(t: ResponseType) -> Result { match t { - RequestType::Redirect(location) => { + ResponseType::Redirect(location) => { Ok(redirect_to(location)) }, - RequestType::Text(b) => { + ResponseType::Text(b) => { Ok(respond_with(b)) }, - RequestType::WithHeaders(b, mut h) => { + ResponseType::WithHeaders(b, mut h) => { Ok(respond_with_headers(b, &mut h)) } } @@ -127,11 +127,11 @@ impl HttpRequest for MockRequest { struct AssertMustHaveHeadersRequest { expected_headers: Headers, request_headers: Headers, - t: RequestType + t: ResponseType } impl AssertMustHaveHeadersRequest { - fn new(t: RequestType, expected_headers: Headers) -> Self { + fn new(t: ResponseType, expected_headers: Headers) -> Self { AssertMustHaveHeadersRequest { expected_headers: expected_headers, request_headers: Headers::new(), t: t } } } @@ -165,7 +165,7 @@ impl HttpRequestFactory for AssertMustHaveHeadersRequestFactory { fn create(&self, _: Url, _: Method) -> Result { Ok( AssertMustHaveHeadersRequest::new( - RequestType::Text(self.body.clone()), + ResponseType::Text(self.body.clone()), self.expected_headers.clone() ) ) @@ -187,11 +187,11 @@ fn assert_cookie_for_domain(resource_mgr: &ResourceTask, domain: &str, cookie: & struct AssertMustHaveBodyRequest { expected_body: Option>, headers: Headers, - t: RequestType + t: ResponseType } impl AssertMustHaveBodyRequest { - fn new(t: RequestType, expected_body: Option>) -> Self { + fn new(t: ResponseType, expected_body: Option>) -> Self { AssertMustHaveBodyRequest { expected_body: expected_body, headers: Headers::new(), t: t } } } @@ -218,10 +218,10 @@ fn test_load_when_redirecting_from_a_post_should_rewrite_next_request_as_get() { fn create(&self, url: Url, method: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { assert_eq!(Method::Post, method); - Ok(MockRequest::new(RequestType::Redirect("http://mozilla.org".to_string()))) + Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.org".to_string()))) } else { assert_eq!(Method::Get, method); - Ok(MockRequest::new(RequestType::Text(<[_]>::to_vec("Yay!".as_bytes())))) + Ok(MockRequest::new(ResponseType::Text(<[_]>::to_vec("Yay!".as_bytes())))) } } } @@ -248,7 +248,7 @@ fn test_load_should_decode_the_response_as_deflate_when_response_headers_have_co let mut headers = Headers::new(); headers.set_raw("Content-Encoding", vec![b"deflate".to_vec()]); - Ok(MockRequest::new(RequestType::WithHeaders(encoded_content, headers))) + Ok(MockRequest::new(ResponseType::WithHeaders(encoded_content, headers))) } } @@ -274,7 +274,7 @@ fn test_load_should_decode_the_response_as_gzip_when_response_headers_have_conte let mut headers = Headers::new(); headers.set_raw("Content-Encoding", vec![b"gzip".to_vec()]); - Ok(MockRequest::new(RequestType::WithHeaders(encoded_content, headers))) + Ok(MockRequest::new(ResponseType::WithHeaders(encoded_content, headers))) } } @@ -297,14 +297,14 @@ fn test_load_doesnt_send_request_body_on_any_redirect() { if url.domain().unwrap() == "mozilla.com" { Ok( AssertMustHaveBodyRequest::new( - RequestType::Redirect("http://mozilla.org".to_string()), + ResponseType::Redirect("http://mozilla.org".to_string()), Some(<[_]>::to_vec("Body on POST!".as_bytes())) ) ) } else { Ok( AssertMustHaveBodyRequest::new( - RequestType::Text(<[_]>::to_vec("Yay!".as_bytes())), + ResponseType::Text(<[_]>::to_vec("Yay!".as_bytes())), None ) ) @@ -332,7 +332,7 @@ fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_ar let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set_raw("Strict-Transport-Security", vec![b"max-age=31536000".to_vec()]); - Ok(MockRequest::new(RequestType::WithHeaders(content, headers))) + Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } @@ -360,7 +360,7 @@ fn test_load_adds_host_to_sts_list_when_url_is_https_and_sts_headers_are_present let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set_raw("Strict-Transport-Security", vec![b"max-age=31536000".to_vec()]); - Ok(MockRequest::new(RequestType::WithHeaders(content, headers))) + Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } @@ -388,7 +388,7 @@ fn test_load_sets_cookies_in_the_resource_manager_when_it_get_set_cookie_header_ let content = <[_]>::to_vec("Yay!".as_bytes()); let mut headers = Headers::new(); headers.set_raw("set-cookie", vec![b"mozillaIs=theBest".to_vec()]); - Ok(MockRequest::new(RequestType::WithHeaders(content, headers))) + Ok(MockRequest::new(ResponseType::WithHeaders(content, headers))) } } @@ -490,9 +490,9 @@ fn test_load_errors_when_there_a_redirect_loop() { fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { - Ok(MockRequest::new(RequestType::Redirect("http://mozilla.org".to_string()))) + Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.org".to_string()))) } else if url.domain().unwrap() == "mozilla.org" { - Ok(MockRequest::new(RequestType::Redirect("http://mozilla.com".to_string()))) + Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.com".to_string()))) } else { panic!("unexpected host {:?}", url) } @@ -520,7 +520,7 @@ fn test_load_errors_when_there_is_too_many_redirects() { fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { - Ok(MockRequest::new(RequestType::Redirect(format!("{}/1", url.serialize())))) + Ok(MockRequest::new(ResponseType::Redirect(format!("{}/1", url.serialize())))) } else { panic!("unexpected host {:?}", url) } @@ -548,11 +548,11 @@ fn test_load_follows_a_redirect() { fn create(&self, url: Url, _: Method) -> Result { if url.domain().unwrap() == "mozilla.com" { - Ok(MockRequest::new(RequestType::Redirect("http://mozilla.org".to_string()))) + Ok(MockRequest::new(ResponseType::Redirect("http://mozilla.org".to_string()))) } else if url.domain().unwrap() == "mozilla.org" { Ok( MockRequest::new( - RequestType::Text( + ResponseType::Text( <[_]>::to_vec("Yay!".as_bytes()) ) ) From 86df1d3d6aa7760e4cd8e4d3c9516707a544bb20 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 09:10:53 +1000 Subject: [PATCH 36/43] Renames AssertMustHaveHeadersRequest --- tests/unit/net/http_loader.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index accf30cb594..6004c128c02 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -124,19 +124,19 @@ impl HttpRequest for MockRequest { } } -struct AssertMustHaveHeadersRequest { +struct AssertRequestMustHaveHeaders { expected_headers: Headers, request_headers: Headers, t: ResponseType } -impl AssertMustHaveHeadersRequest { +impl AssertRequestMustHaveHeaders { fn new(t: ResponseType, expected_headers: Headers) -> Self { - AssertMustHaveHeadersRequest { expected_headers: expected_headers, request_headers: Headers::new(), t: t } + AssertRequestMustHaveHeaders { expected_headers: expected_headers, request_headers: Headers::new(), t: t } } } -impl HttpRequest for AssertMustHaveHeadersRequest { +impl HttpRequest for AssertRequestMustHaveHeaders { type R = MockResponse; fn headers_mut(&mut self) -> &mut Headers { &mut self.request_headers } @@ -160,11 +160,11 @@ struct AssertMustHaveHeadersRequestFactory { } impl HttpRequestFactory for AssertMustHaveHeadersRequestFactory { - type R = AssertMustHaveHeadersRequest; + type R = AssertRequestMustHaveHeaders; - fn create(&self, _: Url, _: Method) -> Result { + fn create(&self, _: Url, _: Method) -> Result { Ok( - AssertMustHaveHeadersRequest::new( + AssertRequestMustHaveHeaders::new( ResponseType::Text(self.body.clone()), self.expected_headers.clone() ) @@ -417,7 +417,7 @@ fn test_load_sets_requests_cookies_header_for_url_by_getting_cookies_from_the_re let mut cookie = Headers::new(); cookie.set_raw("Cookie".to_owned(), vec![<[_]>::to_vec("mozillaIs=theBest".as_bytes())]); - let _ = load::( + let _ = load::( load_data.clone(), resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: cookie, @@ -439,7 +439,7 @@ fn test_load_sets_content_length_to_length_of_request_body() { "Content-Length".to_owned(), vec![<[_]>::to_vec(&*format!("{}", content.len()).as_bytes())] ); - let _ = load::( + let _ = load::( load_data.clone(), resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: content_len_headers, @@ -459,7 +459,7 @@ fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); - let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { + let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: accept_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }); @@ -475,7 +475,7 @@ fn test_load_sets_default_accept_encoding_to_gzip_and_deflate() { let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); - let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { + let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: accept_encoding_headers, body: <[_]>::to_vec("Yay!".as_bytes()) }); From 287dc7371fd360aa40bda45ee0d863ada3fd5a9b Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 09:15:13 +1000 Subject: [PATCH 37/43] Extracts some dense code to a let binding --- tests/unit/net/http_loader.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 6004c128c02..64f847a4e75 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -346,7 +346,7 @@ fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_ar let (tx, rx) = ipc::channel().unwrap(); resource_mgr.send(ControlMsg::GetHostMustBeSecured("mozilla.com".to_string(), tx)).unwrap(); - assert!(!rx.recv().unwrap()); + assert_eq!(rx.recv().unwrap(), false); } #[test] @@ -435,8 +435,9 @@ fn test_load_sets_content_length_to_length_of_request_body() { load_data.data = Some(<[_]>::to_vec(content.as_bytes())); let mut content_len_headers = Headers::new(); + let content_len = format!("{}", content.len()); content_len_headers.set_raw( - "Content-Length".to_owned(), vec![<[_]>::to_vec(&*format!("{}", content.len()).as_bytes())] + "Content-Length".to_owned(), vec![<[_]>::to_vec(&*content_len.as_bytes())] ); let _ = load::( From 9b1f2231cb52554588900daefda3f742787ad112 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 09:17:44 +1000 Subject: [PATCH 38/43] Adds a test for non-default accept headers --- tests/unit/net/http_loader.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 64f847a4e75..bc08447bf51 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -448,6 +448,23 @@ fn test_load_sets_content_length_to_length_of_request_body() { }); } +#[test] +fn test_load_uses_explicit_accept_from_preserved_headers_in_load_data() { + let mut accept_headers = Headers::new(); + accept_headers.set_raw("Accept".to_owned(), vec![b"text/html".to_vec()]); + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); + load_data.preserved_headers.set_raw("Accept".to_owned(), vec![b"text/html".to_vec()]); + + let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { + expected_headers: accept_headers, + body: <[_]>::to_vec("Yay!".as_bytes()) + }); +} + #[test] fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { let mut accept_headers = Headers::new(); From 976dbc5155b0df996f4521d4646a25cb885b60ec Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Fri, 28 Aug 2015 09:20:28 +1000 Subject: [PATCH 39/43] Adds a test for non-default accept-encoding headers --- tests/unit/net/http_loader.rs | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index bc08447bf51..7a2f3981d9f 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -449,7 +449,7 @@ fn test_load_sets_content_length_to_length_of_request_body() { } #[test] -fn test_load_uses_explicit_accept_from_preserved_headers_in_load_data() { +fn test_load_uses_explicit_accept_from_headers_in_load_data() { let mut accept_headers = Headers::new(); accept_headers.set_raw("Accept".to_owned(), vec![b"text/html".to_vec()]); @@ -457,7 +457,7 @@ fn test_load_uses_explicit_accept_from_preserved_headers_in_load_data() { let resource_mgr = new_resource_task("Test-agent".to_string(), None); let mut load_data = LoadData::new(url.clone(), None); load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); - load_data.preserved_headers.set_raw("Accept".to_owned(), vec![b"text/html".to_vec()]); + load_data.headers.set_raw("Accept".to_owned(), vec![b"text/html".to_vec()]); let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { expected_headers: accept_headers, @@ -483,6 +483,23 @@ fn test_load_sets_default_accept_to_html_xhtml_xml_and_then_anything_else() { }); } +#[test] +fn test_load_uses_explicit_accept_encoding_from_load_data_headers() { + let mut accept_encoding_headers = Headers::new(); + accept_encoding_headers.set_raw("Accept-Encoding".to_owned(), vec![b"chunked".to_vec()]); + + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = Some(<[_]>::to_vec("Yay!".as_bytes())); + load_data.headers.set_raw("Accept-Encoding".to_owned(), vec![b"chunked".to_vec()]); + + let _ = load::(load_data, resource_mgr, None, &AssertMustHaveHeadersRequestFactory { + expected_headers: accept_encoding_headers, + body: <[_]>::to_vec("Yay!".as_bytes()) + }); +} + #[test] fn test_load_sets_default_accept_encoding_to_gzip_and_deflate() { let mut accept_encoding_headers = Headers::new(); From f257b5fcef9babf6d7f6c77e14df4a77c402582f Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 30 Aug 2015 12:09:53 +1000 Subject: [PATCH 40/43] Adds content-length to empty-bodied non-GET/HEAD requests --- components/net/http_loader.rs | 4 ++++ tests/unit/net/http_loader.rs | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index d34184fee2a..0f4e6e3357a 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -533,6 +533,10 @@ pub fn load(load_data: LoadData, try!(req.send(&load_data.data)) } _ => { + if load_data.method != Method::Get && load_data.method != Method::Head { + req.headers_mut().set(ContentLength(0)) + } + send_request_to_devtools( devtools_chan.clone(), request_id.clone(), url.clone(), method.clone(), load_data.headers.clone(), diff --git a/tests/unit/net/http_loader.rs b/tests/unit/net/http_loader.rs index 7a2f3981d9f..7d805f27d24 100644 --- a/tests/unit/net/http_loader.rs +++ b/tests/unit/net/http_loader.rs @@ -208,6 +208,26 @@ impl HttpRequest for AssertMustHaveBodyRequest { } } +#[test] +fn test_load_when_request_is_not_get_or_head_and_there_is_no_body_content_length_should_be_set_to_0() { + let url = Url::parse("http://mozilla.com").unwrap(); + let resource_mgr = new_resource_task("Test-agent".to_string(), None); + + let mut load_data = LoadData::new(url.clone(), None); + load_data.data = None; + load_data.method = Method::Post; + + let mut content_length = Headers::new(); + content_length.set_raw("Content-Length".to_owned(), vec![<[_]>::to_vec("0".as_bytes())]); + + let _ = load::( + load_data.clone(), resource_mgr, None, + &AssertMustHaveHeadersRequestFactory { + expected_headers: content_length, + body: <[_]>::to_vec(&[]) + }); +} + #[test] fn test_load_when_redirecting_from_a_post_should_rewrite_next_request_as_get() { struct Factory; From 4b6f07cbe65654eeac510f718eba3a9530899e91 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Sun, 30 Aug 2015 12:23:50 +1000 Subject: [PATCH 41/43] Resolves strange borrow/type checking issues from new thread naming --- components/net/http_loader.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 0f4e6e3357a..0d5b819b9a6 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -41,8 +41,10 @@ pub fn factory(resource_mgr_chan: IpcSender, devtools_chan: Option>) -> Box) + Send> { box move |load_data, senders, classifier| { - spawn_named(format!("http_loader for {}", load_data.url.serialize()), - move || load_for_consumer(load_data, senders, classifier, resource_mgr_chan, devtools_chan)) + let l: LoadData = (load_data as LoadData).clone(); + let name = format!("http_loader for {}", l.url.serialize()); + spawn_named(name, + move || load_for_consumer(l, senders, classifier, resource_mgr_chan, devtools_chan)) } } From 9d0f418ee05b86399ce11ee0cbe0423a8f40ee9e Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Mon, 31 Aug 2015 08:54:12 +1200 Subject: [PATCH 42/43] Resolves merge conflict without wonky cloning --- components/net/http_loader.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 0d5b819b9a6..8f0700853fd 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -40,11 +40,9 @@ use uuid; pub fn factory(resource_mgr_chan: IpcSender, devtools_chan: Option>) -> Box) + Send> { - box move |load_data, senders, classifier| { - let l: LoadData = (load_data as LoadData).clone(); - let name = format!("http_loader for {}", l.url.serialize()); - spawn_named(name, - move || load_for_consumer(l, senders, classifier, resource_mgr_chan, devtools_chan)) + box move |load_data: LoadData, senders, classifier| { + spawn_named(format!("http_loader for {}", load_data.url.serialize()), + move || load_for_consumer(load_data, senders, classifier, resource_mgr_chan, devtools_chan)) } } From 43e0c4ac897cb16e1f0faaa600f713d43445d3b0 Mon Sep 17 00:00:00 2001 From: Sam Gibson Date: Mon, 31 Aug 2015 13:47:35 +1200 Subject: [PATCH 43/43] Prevents unwrapping invalid location url domains --- components/net/http_loader.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/net/http_loader.rs b/components/net/http_loader.rs index 8f0700853fd..cf5078a1a4f 100644 --- a/components/net/http_loader.rs +++ b/components/net/http_loader.rs @@ -460,7 +460,7 @@ pub fn load(load_data: LoadData, loop { iters = iters + 1; - if &*url.scheme != "https" && request_must_be_secured(&url, &resource_mgr_chan) { + if &*url.scheme == "http" && request_must_be_secured(&url, &resource_mgr_chan) { info!("{} is in the strict transport security list, requesting secure host", url); url = secure_url(&url); }