mirror of
https://github.com/servo/servo.git
synced 2025-06-06 16:45:39 +00:00
Fix HSTS
This commit is contained in:
parent
267ce462d8
commit
68ebecb775
8 changed files with 254 additions and 16 deletions
|
@ -459,6 +459,11 @@ mod gen {
|
|||
}
|
||||
},
|
||||
network: {
|
||||
enforce_tls: {
|
||||
enabled: bool,
|
||||
localhost: bool,
|
||||
onion: bool,
|
||||
},
|
||||
http_cache: {
|
||||
#[serde(rename = "network.http-cache.disabled")]
|
||||
disabled: bool,
|
||||
|
|
|
@ -265,7 +265,7 @@ pub fn main_fetch(
|
|||
.hsts_list
|
||||
.read()
|
||||
.unwrap()
|
||||
.switch_known_hsts_host_domain_url_to_https(request.current_url_mut());
|
||||
.apply_hsts_rules(request.current_url_mut());
|
||||
|
||||
// Step 11.
|
||||
// Not applicable: see fetch_async.
|
||||
|
|
|
@ -3,9 +3,12 @@
|
|||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use embedder_traits::resources::{self, Resource};
|
||||
use headers::{Header, HeaderMapExt, HeaderName, HeaderValue};
|
||||
use http::HeaderMap;
|
||||
use net_traits::pub_domains::reg_suffix;
|
||||
use net_traits::IncludeSubdomains;
|
||||
use servo_url::ServoUrl;
|
||||
use servo_config::pref;
|
||||
use servo_url::{Host, ServoUrl};
|
||||
use std::collections::HashMap;
|
||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
||||
|
||||
|
@ -138,16 +141,156 @@ impl HstsList {
|
|||
}
|
||||
}
|
||||
|
||||
/// Step 10 of https://fetch.spec.whatwg.org/#concept-main-fetch.
|
||||
pub fn switch_known_hsts_host_domain_url_to_https(&self, url: &mut ServoUrl) {
|
||||
if url.scheme() != "http" {
|
||||
/// Step 2.9 of https://fetch.spec.whatwg.org/#concept-main-fetch.
|
||||
pub fn apply_hsts_rules(&self, url: &mut ServoUrl) {
|
||||
if url.scheme() != "http" && url.scheme() != "ws" {
|
||||
return;
|
||||
}
|
||||
if url
|
||||
.domain()
|
||||
.map_or(false, |domain| self.is_host_secure(domain))
|
||||
{
|
||||
url.as_mut_url().set_scheme("https").unwrap();
|
||||
|
||||
let upgrade_scheme = if pref!(network.enforce_tls.enabled) {
|
||||
if (!pref!(network.enforce_tls.localhost) &&
|
||||
match url.host() {
|
||||
Some(Host::Domain(domain)) => {
|
||||
domain.ends_with(".localhost") || domain == "localhost"
|
||||
},
|
||||
Some(Host::Ipv4(ipv4)) => ipv4.is_loopback(),
|
||||
Some(Host::Ipv6(ipv6)) => ipv6.is_loopback(),
|
||||
_ => false,
|
||||
}) ||
|
||||
(!pref!(network.enforce_tls.onion) &&
|
||||
url.domain()
|
||||
.map_or(false, |domain| domain.ends_with(".onion")))
|
||||
{
|
||||
url.domain()
|
||||
.map_or(false, |domain| self.is_host_secure(domain))
|
||||
} else {
|
||||
true
|
||||
}
|
||||
} else {
|
||||
url.domain()
|
||||
.map_or(false, |domain| self.is_host_secure(domain))
|
||||
};
|
||||
|
||||
if upgrade_scheme {
|
||||
let upgraded_scheme = match url.scheme() {
|
||||
"ws" => "wss",
|
||||
_ => "https",
|
||||
};
|
||||
url.as_mut_url().set_scheme(upgraded_scheme).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_hsts_list_from_response(&mut self, url: &ServoUrl, headers: &HeaderMap) {
|
||||
if url.scheme() != "https" && url.scheme() != "wss" {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(header) = headers.typed_get::<StrictTransportSecurity>() {
|
||||
if let Some(host) = url.domain() {
|
||||
let include_subdomains = if header.include_subdomains {
|
||||
IncludeSubdomains::Included
|
||||
} else {
|
||||
IncludeSubdomains::NotIncluded
|
||||
};
|
||||
|
||||
if let Some(entry) =
|
||||
HstsEntry::new(host.to_owned(), include_subdomains, Some(header.max_age))
|
||||
{
|
||||
info!("adding host {} to the strict transport security list", host);
|
||||
info!("- max-age {}", header.max_age);
|
||||
if header.include_subdomains {
|
||||
info!("- includeSubdomains");
|
||||
}
|
||||
|
||||
self.push(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Remove this with the next update of the `headers` crate
|
||||
// https://github.com/hyperium/headers/issues/61
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct StrictTransportSecurity {
|
||||
include_subdomains: bool,
|
||||
max_age: u64,
|
||||
}
|
||||
|
||||
enum Directive {
|
||||
MaxAge(u64),
|
||||
IncludeSubdomains,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
// taken from https://github.com/hyperium/headers
|
||||
impl Header for StrictTransportSecurity {
|
||||
fn name() -> &'static HeaderName {
|
||||
&http::header::STRICT_TRANSPORT_SECURITY
|
||||
}
|
||||
|
||||
fn decode<'i, I: Iterator<Item = &'i HeaderValue>>(
|
||||
values: &mut I,
|
||||
) -> Result<Self, headers::Error> {
|
||||
values
|
||||
.just_one()
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(|s| {
|
||||
s.split(';')
|
||||
.map(str::trim)
|
||||
.map(|sub| {
|
||||
if sub.eq_ignore_ascii_case("includeSubDomains") {
|
||||
Some(Directive::IncludeSubdomains)
|
||||
} else {
|
||||
let mut sub = sub.splitn(2, '=');
|
||||
match (sub.next(), sub.next()) {
|
||||
(Some(left), Some(right))
|
||||
if left.trim().eq_ignore_ascii_case("max-age") =>
|
||||
{
|
||||
right
|
||||
.trim()
|
||||
.trim_matches('"')
|
||||
.parse()
|
||||
.ok()
|
||||
.map(Directive::MaxAge)
|
||||
},
|
||||
_ => Some(Directive::Unknown),
|
||||
}
|
||||
}
|
||||
})
|
||||
.fold(Some((None, None)), |res, dir| match (res, dir) {
|
||||
(Some((None, sub)), Some(Directive::MaxAge(age))) => Some((Some(age), sub)),
|
||||
(Some((age, None)), Some(Directive::IncludeSubdomains)) => {
|
||||
Some((age, Some(())))
|
||||
},
|
||||
(Some((Some(_), _)), Some(Directive::MaxAge(_))) |
|
||||
(Some((_, Some(_))), Some(Directive::IncludeSubdomains)) |
|
||||
(_, None) => None,
|
||||
(res, _) => res,
|
||||
})
|
||||
.and_then(|res| match res {
|
||||
(Some(age), sub) => Some(StrictTransportSecurity {
|
||||
max_age: age,
|
||||
include_subdomains: sub.is_some(),
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
.ok_or_else(headers::Error::invalid)
|
||||
})
|
||||
.unwrap_or_else(|| Err(headers::Error::invalid()))
|
||||
}
|
||||
|
||||
fn encode<E: Extend<HeaderValue>>(&self, _values: &mut E) {}
|
||||
}
|
||||
|
||||
trait IterExt: Iterator {
|
||||
fn just_one(&mut self) -> Option<Self::Item> {
|
||||
let one = self.next()?;
|
||||
match self.next() {
|
||||
Some(_) => None,
|
||||
None => Some(one),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Iterator> IterExt for T {}
|
||||
|
|
|
@ -1382,7 +1382,7 @@ fn http_network_fetch(
|
|||
.map(|_| uuid::Uuid::new_v4().to_simple().to_string());
|
||||
|
||||
if log_enabled!(log::Level::Info) {
|
||||
info!("request for {} ({:?})", url, request.method);
|
||||
info!("{:?} request for {}", request.method, url);
|
||||
for header in request.headers.iter() {
|
||||
info!(" - {:?}", header);
|
||||
}
|
||||
|
@ -1564,9 +1564,10 @@ fn http_network_fetch(
|
|||
|
||||
// Substep 2
|
||||
|
||||
// TODO Determine if response was retrieved over HTTPS
|
||||
// TODO Servo needs to decide what ciphers are to be treated as "deprecated"
|
||||
response.https_state = HttpsState::None;
|
||||
response.https_state = match url.scheme() {
|
||||
"https" => HttpsState::Modern,
|
||||
_ => HttpsState::None,
|
||||
};
|
||||
|
||||
// TODO Read request
|
||||
|
||||
|
@ -1593,6 +1594,12 @@ fn http_network_fetch(
|
|||
if credentials_flag {
|
||||
set_cookies_from_headers(&url, &response.headers, &context.state.cookie_jar);
|
||||
}
|
||||
context
|
||||
.state
|
||||
.hsts_list
|
||||
.write()
|
||||
.unwrap()
|
||||
.update_hsts_list_from_response(&url, &response.headers);
|
||||
|
||||
// TODO these steps
|
||||
// Step 16
|
||||
|
|
|
@ -12,6 +12,7 @@ use crate::{
|
|||
use crossbeam_channel::{unbounded, Sender};
|
||||
use devtools_traits::HttpRequest as DevtoolsHttpRequest;
|
||||
use devtools_traits::HttpResponse as DevtoolsHttpResponse;
|
||||
use headers::StrictTransportSecurity;
|
||||
use headers::{AccessControlAllowCredentials, AccessControlAllowHeaders, AccessControlAllowOrigin};
|
||||
use headers::{AccessControlAllowMethods, AccessControlMaxAge, HeaderMapExt};
|
||||
use headers::{CacheControl, ContentLength, ContentType, Expires, LastModified, Pragma, UserAgent};
|
||||
|
@ -27,7 +28,9 @@ use net::fetch::methods::{self, CancellationListener, FetchContext};
|
|||
use net::filemanager_thread::FileManager;
|
||||
use net::hsts::HstsEntry;
|
||||
use net::test::HttpState;
|
||||
use net_traits::request::{Destination, Origin, RedirectMode, Referrer, Request, RequestMode};
|
||||
use net_traits::request::{
|
||||
Destination, Origin, RedirectMode, Referrer, Request, RequestBuilder, RequestMode,
|
||||
};
|
||||
use net_traits::response::{CacheState, Response, ResponseBody, ResponseType};
|
||||
use net_traits::{
|
||||
FetchTaskTarget, IncludeSubdomains, NetworkError, ReferrerPolicy, ResourceFetchTiming,
|
||||
|
@ -680,6 +683,66 @@ fn test_fetch_with_hsts() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_adds_host_to_hsts_list_when_url_is_https() {
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
response
|
||||
.headers_mut()
|
||||
.typed_insert(StrictTransportSecurity::excluding_subdomains(
|
||||
Duration::from_secs(31536000),
|
||||
));
|
||||
*response.body_mut() = b"Yay!".to_vec().into();
|
||||
};
|
||||
let cert_path = Path::new("../../resources/self_signed_certificate_for_testing.crt")
|
||||
.canonicalize()
|
||||
.unwrap();
|
||||
let key_path = Path::new("../../resources/privatekey_for_testing.key")
|
||||
.canonicalize()
|
||||
.unwrap();
|
||||
let (server, mut url) = make_ssl_server(handler, cert_path.clone(), key_path.clone());
|
||||
url.as_mut_url().set_scheme("https").unwrap();
|
||||
|
||||
let certs = fs::read_to_string(cert_path).expect("Couldn't find certificate file");
|
||||
let tls_config = create_tls_config(&certs, ALPN_H2_H1);
|
||||
|
||||
let mut context = FetchContext {
|
||||
state: Arc::new(HttpState::new(tls_config)),
|
||||
user_agent: DEFAULT_USER_AGENT.into(),
|
||||
devtools_chan: None,
|
||||
filemanager: FileManager::new(create_embedder_proxy()),
|
||||
cancellation_listener: Arc::new(Mutex::new(CancellationListener::new(None))),
|
||||
timing: ServoArc::new(Mutex::new(ResourceFetchTiming::new(
|
||||
ResourceTimingType::Navigation,
|
||||
))),
|
||||
};
|
||||
|
||||
let mut request = RequestBuilder::new(url.clone())
|
||||
.method(Method::GET)
|
||||
.body(None)
|
||||
.destination(Destination::Document)
|
||||
.origin(url.clone().origin())
|
||||
.pipeline_id(Some(TEST_PIPELINE_ID))
|
||||
.build();
|
||||
|
||||
let response = fetch_with_context(&mut request, &mut context);
|
||||
|
||||
let _ = server.close();
|
||||
|
||||
assert!(response
|
||||
.internal_response
|
||||
.unwrap()
|
||||
.status
|
||||
.unwrap()
|
||||
.0
|
||||
.is_success());
|
||||
assert!(context
|
||||
.state
|
||||
.hsts_list
|
||||
.read()
|
||||
.unwrap()
|
||||
.is_host_secure(url.host_str().unwrap()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_with_sri_network_error() {
|
||||
static MESSAGE: &'static [u8] = b"alert('Hello, Network Error');";
|
||||
|
|
|
@ -543,7 +543,7 @@ fn test_load_doesnt_send_request_body_on_any_redirect() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_doesnt_add_host_to_sts_list_when_url_is_http_even_if_sts_headers_are_present() {
|
||||
fn test_load_doesnt_add_host_to_hsts_list_when_url_is_http_even_if_hsts_headers_are_present() {
|
||||
let handler = move |_: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||
response
|
||||
.headers_mut()
|
||||
|
|
|
@ -100,6 +100,12 @@ impl<'a> Handler for Client<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
self.http_state
|
||||
.hsts_list
|
||||
.write()
|
||||
.unwrap()
|
||||
.update_hsts_list_from_response(self.resource_url, &headers);
|
||||
|
||||
let _ = self
|
||||
.event_sender
|
||||
.send(WebSocketNetworkEvent::ConnectionEstablished {
|
||||
|
@ -185,6 +191,7 @@ pub fn init(
|
|||
thread::Builder::new()
|
||||
.name(format!("WebSocket connection to {}", req_builder.url))
|
||||
.spawn(move || {
|
||||
let mut req_builder = req_builder;
|
||||
let protocols = match req_builder.mode {
|
||||
RequestMode::WebSocket { protocols } => protocols,
|
||||
_ => panic!(
|
||||
|
@ -192,6 +199,16 @@ pub fn init(
|
|||
),
|
||||
};
|
||||
|
||||
// https://fetch.spec.whatwg.org/#websocket-opening-handshake
|
||||
// By standard, we should work with an http(s):// URL (req_url),
|
||||
// but as ws-rs expects to be called with a ws(s):// URL (net_url)
|
||||
// we upgrade ws to wss, so we don't have to convert http(s) back to ws(s).
|
||||
http_state
|
||||
.hsts_list
|
||||
.read()
|
||||
.unwrap()
|
||||
.apply_hsts_rules(&mut req_builder.url);
|
||||
|
||||
let scheme = req_builder.url.scheme();
|
||||
let mut req_url = req_builder.url.clone();
|
||||
if scheme == "ws" {
|
||||
|
|
|
@ -88,6 +88,9 @@
|
|||
"layout.writing-mode.enabled": false,
|
||||
"media.glvideo.enabled": false,
|
||||
"media.testing.enabled": false,
|
||||
"network.enforce_tls.enabled": false,
|
||||
"network.enforce_tls.localhost": false,
|
||||
"network.enforce_tls.onion": false,
|
||||
"network.http-cache.disabled": false,
|
||||
"network.mime.sniff": false,
|
||||
"session-history.max-length": 20,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue