mirror of
https://github.com/servo/servo.git
synced 2025-08-16 02:45:36 +01:00
Urlmageddon: Use refcounted urls more often.
This commit is contained in:
parent
f14e7339b5
commit
913c874cb5
161 changed files with 1044 additions and 718 deletions
|
@ -34,6 +34,7 @@ profile_traits = {path = "../profile_traits"}
|
|||
rustc-serialize = "0.3"
|
||||
serde = "0.8"
|
||||
serde_derive = "0.8"
|
||||
servo_url = {path = "../url"}
|
||||
threadpool = "1.0"
|
||||
time = "0.1.17"
|
||||
unicase = "1.4.0"
|
||||
|
|
|
@ -11,6 +11,7 @@ use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
|
|||
use net_traits::ProgressMsg::Done;
|
||||
use net_traits::response::HttpsState;
|
||||
use resource_thread::{CancellationListener, send_error, start_sending_sniffed_opt};
|
||||
use servo_url::ServoUrl;
|
||||
use std::io;
|
||||
use std::sync::Arc;
|
||||
use url::Url;
|
||||
|
@ -20,7 +21,7 @@ fn url_from_non_relative_scheme(load_data: &mut LoadData, filename: &str) -> io:
|
|||
let mut path = try!(resources_dir_path());
|
||||
path.push(filename);
|
||||
assert!(path.exists());
|
||||
load_data.url = Url::from_file_path(&*path).unwrap();
|
||||
load_data.url = ServoUrl::from_url(Url::from_file_path(&*path).unwrap());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -16,9 +16,9 @@ use net_traits::filemanager_thread::{FileManagerThreadMsg, ReadFileProgress};
|
|||
use net_traits::response::HttpsState;
|
||||
use resource_thread::{send_error, start_sending_sniffed_opt};
|
||||
use resource_thread::CancellationListener;
|
||||
use servo_url::ServoUrl;
|
||||
use std::boxed::FnBox;
|
||||
use std::sync::Arc;
|
||||
use url::Url;
|
||||
use util::thread::spawn_named;
|
||||
|
||||
// TODO: Check on GET
|
||||
|
@ -124,7 +124,7 @@ fn load_blob<UI: 'static + UIProvider>
|
|||
/// https://fetch.spec.whatwg.org/#concept-basic-fetch (partial)
|
||||
// TODO: make async.
|
||||
pub fn load_blob_sync<UI: 'static + UIProvider>
|
||||
(url: Url,
|
||||
(url: ServoUrl,
|
||||
filemanager: FileManager<UI>)
|
||||
-> Result<(Headers, Vec<u8>), NetworkError> {
|
||||
let (id, origin) = match parse_blob_url(&url) {
|
||||
|
|
|
@ -6,13 +6,13 @@ use file_loader;
|
|||
use mime_classifier::MimeClassifier;
|
||||
use net_traits::{LoadConsumer, LoadData, NetworkError};
|
||||
use resource_thread::{CancellationListener, send_error};
|
||||
use servo_url::ServoUrl;
|
||||
use std::fs::canonicalize;
|
||||
use std::sync::Arc;
|
||||
use url::Url;
|
||||
use url::percent_encoding::percent_decode;
|
||||
use util::resource_files::resources_dir_path;
|
||||
|
||||
pub fn resolve_chrome_url(url: &Url) -> Result<Url, ()> {
|
||||
pub fn resolve_chrome_url(url: &ServoUrl) -> Result<ServoUrl, ()> {
|
||||
assert_eq!(url.scheme(), "chrome");
|
||||
if url.host_str() != Some("resources") {
|
||||
return Err(())
|
||||
|
@ -29,7 +29,7 @@ pub fn resolve_chrome_url(url: &Url) -> Result<Url, ()> {
|
|||
}
|
||||
match canonicalize(path) {
|
||||
Ok(ref path) if path.starts_with(&resources) && path.exists() => {
|
||||
Ok(Url::from_file_path(path).unwrap())
|
||||
Ok(ServoUrl::from_file_path(path).unwrap())
|
||||
}
|
||||
_ => Err(())
|
||||
}
|
||||
|
|
|
@ -8,10 +8,10 @@
|
|||
use cookie_rs;
|
||||
use net_traits::CookieSource;
|
||||
use net_traits::pub_domains::is_pub_domain;
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::ToOwned;
|
||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
||||
use time::{Tm, now, at, Duration};
|
||||
use url::Url;
|
||||
|
||||
/// A stored cookie that wraps the definition in cookie-rs. This is used to implement
|
||||
/// various behaviours defined in the spec that rely on an associated request URL,
|
||||
|
@ -28,7 +28,7 @@ pub struct Cookie {
|
|||
|
||||
impl Cookie {
|
||||
/// http://tools.ietf.org/html/rfc6265#section-5.3
|
||||
pub fn new_wrapped(mut cookie: cookie_rs::Cookie, request: &Url, source: CookieSource)
|
||||
pub fn new_wrapped(mut cookie: cookie_rs::Cookie, request: &ServoUrl, source: CookieSource)
|
||||
-> Option<Cookie> {
|
||||
// Step 3
|
||||
let (persistent, expiry_time) = match (&cookie.max_age, &cookie.expires) {
|
||||
|
@ -145,7 +145,7 @@ impl Cookie {
|
|||
}
|
||||
|
||||
// http://tools.ietf.org/html/rfc6265#section-5.4 step 1
|
||||
pub fn appropriate_for_url(&self, url: &Url, source: CookieSource) -> bool {
|
||||
pub fn appropriate_for_url(&self, url: &ServoUrl, source: CookieSource) -> bool {
|
||||
let domain = url.host_str();
|
||||
if self.host_only {
|
||||
if self.cookie.domain.as_ref().map(String::as_str) != domain {
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
use cookie::Cookie;
|
||||
use cookie_rs;
|
||||
use net_traits::CookieSource;
|
||||
use servo_url::ServoUrl;
|
||||
use std::cmp::Ordering;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Clone, RustcDecodable, RustcEncodable)]
|
||||
pub struct CookieStorage {
|
||||
|
@ -84,7 +84,7 @@ impl CookieStorage {
|
|||
}
|
||||
|
||||
// http://tools.ietf.org/html/rfc6265#section-5.4
|
||||
pub fn cookies_for_url(&mut self, url: &Url, source: CookieSource) -> Option<String> {
|
||||
pub fn cookies_for_url(&mut self, url: &ServoUrl, source: CookieSource) -> Option<String> {
|
||||
let filterer = |c: &&mut Cookie| -> bool {
|
||||
info!(" === SENT COOKIE : {} {} {:?} {:?}",
|
||||
c.cookie.name, c.cookie.value, c.cookie.domain, c.cookie.path);
|
||||
|
@ -116,7 +116,7 @@ impl CookieStorage {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn cookies_data_for_url<'a>(&'a mut self, url: &'a Url,
|
||||
pub fn cookies_data_for_url<'a>(&'a mut self, url: &'a ServoUrl,
|
||||
source: CookieSource) -> Box<Iterator<Item=cookie_rs::Cookie> + 'a> {
|
||||
Box::new(self.cookies.iter_mut().filter(move |c| { c.appropriate_for_url(url, source) }).map(|c| {
|
||||
c.touch();
|
||||
|
|
|
@ -9,8 +9,9 @@ use net_traits::LoadConsumer;
|
|||
use net_traits::ProgressMsg::{Done, Payload};
|
||||
use resource_thread::{CancellationListener, send_error, start_sending_sniffed_opt};
|
||||
use rustc_serialize::base64::FromBase64;
|
||||
use servo_url::ServoUrl;
|
||||
use std::sync::Arc;
|
||||
use url::{Position, Url};
|
||||
use url::Position;
|
||||
use url::percent_encoding::percent_decode;
|
||||
|
||||
pub fn factory(load_data: LoadData,
|
||||
|
@ -31,10 +32,10 @@ pub enum DecodeError {
|
|||
|
||||
pub type DecodeData = (Mime, Vec<u8>);
|
||||
|
||||
pub fn decode(url: &Url) -> Result<DecodeData, DecodeError> {
|
||||
assert!(url.scheme() == "data");
|
||||
pub fn decode(url: &ServoUrl) -> Result<DecodeData, DecodeError> {
|
||||
assert_eq!(url.scheme(), "data");
|
||||
// Split out content type and data.
|
||||
let parts: Vec<&str> = url[Position::BeforePath..Position::AfterQuery].splitn(2, ',').collect();
|
||||
let parts: Vec<&str> = url.as_url().unwrap()[Position::BeforePath..Position::AfterQuery].splitn(2, ',').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(DecodeError::InvalidDataUri);
|
||||
}
|
||||
|
@ -61,7 +62,7 @@ pub fn decode(url: &Url) -> Result<DecodeData, DecodeError> {
|
|||
if is_base64 {
|
||||
// FIXME(#2909): It’s unclear what to do with non-alphabet characters,
|
||||
// but Acid 3 apparently depends on spaces being ignored.
|
||||
bytes = bytes.into_iter().filter(|&b| b != ' ' as u8).collect::<Vec<u8>>();
|
||||
bytes = bytes.into_iter().filter(|&b| b != b' ').collect::<Vec<u8>>();
|
||||
match bytes.from_base64() {
|
||||
Err(..) => return Err(DecodeError::NonBase64DataUri),
|
||||
Ok(data) => bytes = data,
|
||||
|
|
|
@ -11,9 +11,9 @@
|
|||
|
||||
use hyper::method::Method;
|
||||
use net_traits::request::{CredentialsMode, Origin, Request};
|
||||
use servo_url::ServoUrl;
|
||||
use std::ascii::AsciiExt;
|
||||
use time::{self, Timespec};
|
||||
use url::Url;
|
||||
|
||||
/// Union type for CORS cache entries
|
||||
///
|
||||
|
@ -44,7 +44,7 @@ impl HeaderOrMethod {
|
|||
#[derive(Clone, Debug)]
|
||||
pub struct CorsCacheEntry {
|
||||
pub origin: Origin,
|
||||
pub url: Url,
|
||||
pub url: ServoUrl,
|
||||
pub max_age: u32,
|
||||
pub credentials: bool,
|
||||
pub header_or_method: HeaderOrMethod,
|
||||
|
@ -52,7 +52,7 @@ pub struct CorsCacheEntry {
|
|||
}
|
||||
|
||||
impl CorsCacheEntry {
|
||||
fn new(origin: Origin, url: Url, max_age: u32, credentials: bool,
|
||||
fn new(origin: Origin, url: ServoUrl, max_age: u32, credentials: bool,
|
||||
header_or_method: HeaderOrMethod) -> CorsCacheEntry {
|
||||
CorsCacheEntry {
|
||||
origin: origin,
|
||||
|
|
|
@ -29,6 +29,7 @@ use net_traits::request::{RedirectMode, Referrer, Request, RequestMode, Response
|
|||
use net_traits::request::{Type, Origin, Window};
|
||||
use net_traits::response::{HttpsState, Response, ResponseBody, ResponseType};
|
||||
use resource_thread::CancellationListener;
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::error::Error;
|
||||
|
@ -40,7 +41,7 @@ use std::ops::Deref;
|
|||
use std::rc::Rc;
|
||||
use std::sync::mpsc::{channel, Sender, Receiver};
|
||||
use unicase::UniCase;
|
||||
use url::{Origin as UrlOrigin, Url};
|
||||
use url::{Origin as UrlOrigin};
|
||||
use util::thread::spawn_named;
|
||||
use uuid;
|
||||
|
||||
|
@ -1305,7 +1306,7 @@ fn cors_check(request: Rc<Request>, response: &Response) -> Result<(), ()> {
|
|||
Err(())
|
||||
}
|
||||
|
||||
fn has_credentials(url: &Url) -> bool {
|
||||
fn has_credentials(url: &ServoUrl) -> bool {
|
||||
!url.username().is_empty() || url.password().is_some()
|
||||
}
|
||||
|
||||
|
|
|
@ -10,13 +10,13 @@ use net_traits::{LoadConsumer, LoadData, LoadOrigin, Metadata, NetworkError, Ref
|
|||
use net_traits::ProgressMsg::{Done, Payload};
|
||||
use resource_thread::{CancellationListener, ProgressSender};
|
||||
use resource_thread::{send_error, start_sending_sniffed_opt};
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::ToOwned;
|
||||
use std::error::Error;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use url::Url;
|
||||
use util::thread::spawn_named;
|
||||
|
||||
static READ_SIZE: usize = 8192;
|
||||
|
@ -33,7 +33,7 @@ enum LoadResult {
|
|||
|
||||
struct FileLoadOrigin;
|
||||
impl LoadOrigin for FileLoadOrigin {
|
||||
fn referrer_url(&self) -> Option<Url> {
|
||||
fn referrer_url(&self) -> Option<ServoUrl> {
|
||||
None
|
||||
}
|
||||
fn referrer_policy(&self) -> Option<ReferrerPolicy> {
|
||||
|
@ -97,7 +97,7 @@ pub fn factory(load_data: LoadData,
|
|||
// this should be one of the three errors listed in
|
||||
// http://doc.rust-lang.org/std/fs/struct.OpenOptions.html#method.open
|
||||
// but, we'll go for a "file not found!"
|
||||
let url = Url::parse("about:not-found").unwrap();
|
||||
let url = ServoUrl::parse("about:not-found").unwrap();
|
||||
let load_data_404 = LoadData::new(load_data.context, url, &FileLoadOrigin);
|
||||
about_loader::factory(load_data_404, senders, classifier, cancel_listener);
|
||||
return;
|
||||
|
|
|
@ -39,6 +39,7 @@ use openssl::ssl::error::{OpensslError, SslError};
|
|||
use profile_traits::time::{ProfilerCategory, ProfilerChan, TimerMetadata, profile};
|
||||
use profile_traits::time::{TimerMetadataFrameType, TimerMetadataReflowType};
|
||||
use resource_thread::{AuthCache, AuthCacheEntry, CancellationListener, send_error, start_sending_sniffed_opt};
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::{Cow, ToOwned};
|
||||
use std::boxed::FnBox;
|
||||
use std::collections::HashSet;
|
||||
|
@ -52,7 +53,7 @@ use time;
|
|||
use time::Tm;
|
||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
||||
use tinyfiledialogs;
|
||||
use url::{Position, Url, Origin};
|
||||
use url::{Position, Origin};
|
||||
use util::thread::spawn_named;
|
||||
use uuid;
|
||||
|
||||
|
@ -246,7 +247,7 @@ impl Read for ReadableCustomResponse {
|
|||
pub trait HttpRequestFactory {
|
||||
type R: HttpRequest;
|
||||
|
||||
fn create(&self, url: Url, method: Method, headers: Headers) -> Result<Self::R, LoadError>;
|
||||
fn create(&self, url: ServoUrl, method: Method, headers: Headers) -> Result<Self::R, LoadError>;
|
||||
}
|
||||
|
||||
pub struct NetworkHttpRequestFactory {
|
||||
|
@ -256,9 +257,11 @@ pub struct NetworkHttpRequestFactory {
|
|||
impl HttpRequestFactory for NetworkHttpRequestFactory {
|
||||
type R = WrappedHttpRequest;
|
||||
|
||||
fn create(&self, url: Url, method: Method, headers: Headers)
|
||||
fn create(&self, url: ServoUrl, method: Method, headers: Headers)
|
||||
-> Result<WrappedHttpRequest, LoadError> {
|
||||
let connection = Request::with_connector(method, url.clone(), &*self.connector);
|
||||
let connection = Request::with_connector(method,
|
||||
url.clone().into_url().unwrap(),
|
||||
&*self.connector);
|
||||
|
||||
if let Err(HttpError::Ssl(ref error)) = connection {
|
||||
let error: &(Error + Send + 'static) = &**error;
|
||||
|
@ -308,7 +311,7 @@ impl HttpRequest for WrappedHttpRequest {
|
|||
type R = WrappedHttpResponse;
|
||||
|
||||
fn send(self, body: &Option<Vec<u8>>) -> Result<WrappedHttpResponse, LoadError> {
|
||||
let url = self.request.url.clone();
|
||||
let url = ServoUrl::from_url(self.request.url.clone());
|
||||
let mut request_writer = match self.request.start() {
|
||||
Ok(streaming) => streaming,
|
||||
Err(e) => return Err(LoadError::new(url, LoadErrorType::Connection { reason: e.description().to_owned() })),
|
||||
|
@ -335,12 +338,12 @@ impl HttpRequest for WrappedHttpRequest {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct LoadError {
|
||||
pub url: Url,
|
||||
pub url: ServoUrl,
|
||||
pub error: LoadErrorType,
|
||||
}
|
||||
|
||||
impl LoadError {
|
||||
pub fn new(url: Url, error: LoadErrorType) -> LoadError {
|
||||
pub fn new(url: ServoUrl, error: LoadErrorType) -> LoadError {
|
||||
LoadError {
|
||||
url: url,
|
||||
error: error,
|
||||
|
@ -429,7 +432,7 @@ pub fn set_default_accept_language(headers: &mut Headers) {
|
|||
}
|
||||
|
||||
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-state-no-referrer-when-downgrade
|
||||
fn no_referrer_when_downgrade_header(referrer_url: Url, url: Url) -> Option<Url> {
|
||||
fn no_referrer_when_downgrade_header(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
|
||||
if referrer_url.scheme() == "https" && url.scheme() != "https" {
|
||||
return None;
|
||||
}
|
||||
|
@ -437,7 +440,7 @@ fn no_referrer_when_downgrade_header(referrer_url: Url, url: Url) -> Option<Url>
|
|||
}
|
||||
|
||||
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin
|
||||
fn strict_origin(referrer_url: Url, url: Url) -> Option<Url> {
|
||||
fn strict_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
|
||||
if referrer_url.scheme() == "https" && url.scheme() != "https" {
|
||||
return None;
|
||||
}
|
||||
|
@ -445,7 +448,7 @@ fn strict_origin(referrer_url: Url, url: Url) -> Option<Url> {
|
|||
}
|
||||
|
||||
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin-when-cross-origin
|
||||
fn strict_origin_when_cross_origin(referrer_url: Url, url: Url) -> Option<Url> {
|
||||
fn strict_origin_when_cross_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
|
||||
if referrer_url.scheme() == "https" && url.scheme() != "https" {
|
||||
return None;
|
||||
}
|
||||
|
@ -454,14 +457,17 @@ fn strict_origin_when_cross_origin(referrer_url: Url, url: Url) -> Option<Url> {
|
|||
}
|
||||
|
||||
/// https://w3c.github.io/webappsec-referrer-policy/#strip-url
|
||||
fn strip_url(mut referrer_url: Url, origin_only: bool) -> Option<Url> {
|
||||
fn strip_url(mut referrer_url: ServoUrl, origin_only: bool) -> Option<ServoUrl> {
|
||||
if referrer_url.scheme() == "https" || referrer_url.scheme() == "http" {
|
||||
referrer_url.set_username("").unwrap();
|
||||
referrer_url.set_password(None).unwrap();
|
||||
referrer_url.set_fragment(None);
|
||||
if origin_only {
|
||||
referrer_url.set_path("");
|
||||
referrer_url.set_query(None);
|
||||
{
|
||||
let referrer = referrer_url.as_mut_url().unwrap();
|
||||
referrer.set_username("").unwrap();
|
||||
referrer.set_password(None).unwrap();
|
||||
referrer.set_fragment(None);
|
||||
if origin_only {
|
||||
referrer.set_path("");
|
||||
referrer.set_query(None);
|
||||
}
|
||||
}
|
||||
return Some(referrer_url);
|
||||
}
|
||||
|
@ -471,8 +477,8 @@ fn strip_url(mut referrer_url: Url, origin_only: bool) -> Option<Url> {
|
|||
/// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
|
||||
pub fn determine_request_referrer(headers: &mut Headers,
|
||||
referrer_policy: Option<ReferrerPolicy>,
|
||||
referrer_url: Option<Url>,
|
||||
url: Url) -> Option<Url> {
|
||||
referrer_url: Option<ServoUrl>,
|
||||
url: ServoUrl) -> Option<ServoUrl> {
|
||||
//TODO - algorithm step 2 not addressed
|
||||
assert!(!headers.has::<Referer>());
|
||||
if let Some(ref_url) = referrer_url {
|
||||
|
@ -492,7 +498,7 @@ pub fn determine_request_referrer(headers: &mut Headers,
|
|||
return None;
|
||||
}
|
||||
|
||||
pub fn set_request_cookies(url: &Url, headers: &mut Headers, cookie_jar: &Arc<RwLock<CookieStorage>>) {
|
||||
pub fn set_request_cookies(url: &ServoUrl, headers: &mut Headers, cookie_jar: &Arc<RwLock<CookieStorage>>) {
|
||||
let mut cookie_jar = cookie_jar.write().unwrap();
|
||||
if let Some(cookie_list) = cookie_jar.cookies_for_url(url, CookieSource::HTTP) {
|
||||
let mut v = Vec::new();
|
||||
|
@ -502,7 +508,7 @@ pub fn set_request_cookies(url: &Url, headers: &mut Headers, cookie_jar: &Arc<Rw
|
|||
}
|
||||
|
||||
fn set_cookie_for_url(cookie_jar: &Arc<RwLock<CookieStorage>>,
|
||||
request: &Url,
|
||||
request: &ServoUrl,
|
||||
cookie_val: String) {
|
||||
let mut cookie_jar = cookie_jar.write().unwrap();
|
||||
let source = CookieSource::HTTP;
|
||||
|
@ -517,7 +523,7 @@ fn set_cookie_for_url(cookie_jar: &Arc<RwLock<CookieStorage>>,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn set_cookies_from_headers(url: &Url, headers: &Headers, cookie_jar: &Arc<RwLock<CookieStorage>>) {
|
||||
pub fn set_cookies_from_headers(url: &ServoUrl, headers: &Headers, cookie_jar: &Arc<RwLock<CookieStorage>>) {
|
||||
if let Some(cookies) = headers.get_raw("set-cookie") {
|
||||
for cookie in cookies.iter() {
|
||||
if let Ok(cookie_value) = String::from_utf8(cookie.clone()) {
|
||||
|
@ -529,7 +535,7 @@ pub fn set_cookies_from_headers(url: &Url, headers: &Headers, cookie_jar: &Arc<R
|
|||
}
|
||||
}
|
||||
|
||||
fn update_sts_list_from_response(url: &Url, response: &HttpResponse, hsts_list: &Arc<RwLock<HstsList>>) {
|
||||
fn update_sts_list_from_response(url: &ServoUrl, response: &HttpResponse, hsts_list: &Arc<RwLock<HstsList>>) {
|
||||
if url.scheme() != "https" {
|
||||
return;
|
||||
}
|
||||
|
@ -613,7 +619,7 @@ enum Decoder {
|
|||
}
|
||||
|
||||
fn prepare_devtools_request(request_id: String,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
method: Method,
|
||||
headers: Headers,
|
||||
body: Option<Vec<u8>>,
|
||||
|
@ -656,7 +662,7 @@ pub fn send_response_to_devtools(devtools_chan: &Sender<DevtoolsControlMsg>,
|
|||
let _ = devtools_chan.send(DevtoolsControlMsg::FromChrome(msg));
|
||||
}
|
||||
|
||||
fn request_must_be_secured(url: &Url, hsts_list: &Arc<RwLock<HstsList>>) -> bool {
|
||||
fn request_must_be_secured(url: &ServoUrl, hsts_list: &Arc<RwLock<HstsList>>) -> bool {
|
||||
match url.domain() {
|
||||
Some(domain) => hsts_list.read().unwrap().is_host_secure(domain),
|
||||
None => false
|
||||
|
@ -664,10 +670,10 @@ fn request_must_be_secured(url: &Url, hsts_list: &Arc<RwLock<HstsList>>) -> bool
|
|||
}
|
||||
|
||||
pub fn modify_request_headers(headers: &mut Headers,
|
||||
url: &Url,
|
||||
url: &ServoUrl,
|
||||
user_agent: &str,
|
||||
referrer_policy: Option<ReferrerPolicy>,
|
||||
referrer_url: &mut Option<Url>) {
|
||||
referrer_url: &mut Option<ServoUrl>) {
|
||||
// Ensure that the host header is set from the original url
|
||||
let host = Host {
|
||||
hostname: url.host_str().unwrap().to_owned(),
|
||||
|
@ -700,7 +706,7 @@ pub fn modify_request_headers(headers: &mut Headers,
|
|||
}
|
||||
|
||||
fn set_auth_header(headers: &mut Headers,
|
||||
url: &Url,
|
||||
url: &ServoUrl,
|
||||
auth_cache: &Arc<RwLock<AuthCache>>) {
|
||||
if !headers.has::<Authorization<Basic>>() {
|
||||
if let Some(auth) = auth_from_url(url) {
|
||||
|
@ -723,7 +729,7 @@ pub fn auth_from_cache(auth_cache: &Arc<RwLock<AuthCache>>, origin: &Origin) ->
|
|||
}
|
||||
}
|
||||
|
||||
fn auth_from_url(doc_url: &Url) -> Option<Authorization<Basic>> {
|
||||
fn auth_from_url(doc_url: &ServoUrl) -> Option<Authorization<Basic>> {
|
||||
let username = doc_url.username();
|
||||
if username != "" {
|
||||
Some(Authorization(Basic {
|
||||
|
@ -736,7 +742,7 @@ fn auth_from_url(doc_url: &Url) -> Option<Authorization<Basic>> {
|
|||
}
|
||||
|
||||
pub fn process_response_headers(response: &HttpResponse,
|
||||
url: &Url,
|
||||
url: &ServoUrl,
|
||||
cookie_jar: &Arc<RwLock<CookieStorage>>,
|
||||
hsts_list: &Arc<RwLock<HstsList>>,
|
||||
load_data: &LoadData) {
|
||||
|
@ -755,7 +761,7 @@ pub fn process_response_headers(response: &HttpResponse,
|
|||
}
|
||||
|
||||
pub fn obtain_response<A>(request_factory: &HttpRequestFactory<R=A>,
|
||||
url: &Url,
|
||||
url: &ServoUrl,
|
||||
method: &Method,
|
||||
request_headers: &Headers,
|
||||
cancel_listener: &CancellationListener,
|
||||
|
@ -925,7 +931,7 @@ pub fn load<A, B>(load_data: &LoadData,
|
|||
// the source rather than rendering the contents of the URL.
|
||||
let viewing_source = doc_url.scheme() == "view-source";
|
||||
if viewing_source {
|
||||
doc_url = Url::parse(&load_data.url[Position::BeforeUsername..]).unwrap();
|
||||
doc_url = ServoUrl::parse(&load_data.url.as_url().unwrap()[Position::BeforeUsername..]).unwrap();
|
||||
}
|
||||
|
||||
// Loop to handle redirects.
|
||||
|
@ -934,7 +940,7 @@ pub fn load<A, B>(load_data: &LoadData,
|
|||
|
||||
if doc_url.scheme() == "http" && request_must_be_secured(&doc_url, &http_state.hsts_list) {
|
||||
info!("{} is in the strict transport security list, requesting secure host", doc_url);
|
||||
doc_url = secure_url(&doc_url);
|
||||
doc_url = ServoUrl::from_url(secure_url(&doc_url.as_url().unwrap()));
|
||||
}
|
||||
|
||||
if iters > 20 {
|
||||
|
@ -958,7 +964,7 @@ pub fn load<A, B>(load_data: &LoadData,
|
|||
.unwrap_or(false);
|
||||
let load_type = if same_origin { LoadType::FirstParty } else { LoadType::ThirdParty };
|
||||
let actions = process_rules_for_request(rules, &CBRequest {
|
||||
url: &doc_url,
|
||||
url: doc_url.as_url().unwrap(),
|
||||
resource_type: to_resource_type(&load_data.context),
|
||||
load_type: load_type,
|
||||
});
|
||||
|
|
|
@ -11,6 +11,7 @@ use net_traits::image_cache_thread::{ImageCacheChan, ImageCacheCommand, ImageCac
|
|||
use net_traits::image_cache_thread::{ImageCacheResult, ImageOrMetadataAvailable, ImageResponse, UsePlaceholder};
|
||||
use net_traits::image_cache_thread::ImageResponder;
|
||||
use net_traits::request::{Destination, RequestInit, Type as RequestType};
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::ToOwned;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
|
@ -20,7 +21,6 @@ use std::mem;
|
|||
use std::sync::Arc;
|
||||
use std::sync::mpsc::{Receiver, Sender, channel};
|
||||
use threadpool::ThreadPool;
|
||||
use url::Url;
|
||||
use util::resource_files::resources_dir_path;
|
||||
use util::thread::spawn_named;
|
||||
use webrender_traits;
|
||||
|
@ -49,7 +49,7 @@ struct PendingLoad {
|
|||
|
||||
// The url being loaded. Do not forget that this may be several Mb
|
||||
// if we are loading a data: url.
|
||||
url: Arc<Url>
|
||||
url: ServoUrl,
|
||||
}
|
||||
|
||||
enum LoadResult {
|
||||
|
@ -59,7 +59,7 @@ enum LoadResult {
|
|||
}
|
||||
|
||||
impl PendingLoad {
|
||||
fn new(url: Arc<Url>) -> PendingLoad {
|
||||
fn new(url: ServoUrl) -> PendingLoad {
|
||||
PendingLoad {
|
||||
bytes: vec!(),
|
||||
metadata: None,
|
||||
|
@ -83,7 +83,7 @@ struct AllPendingLoads {
|
|||
|
||||
// Get a load key from its url. Used ony when starting and
|
||||
// finishing a load or when adding a new listener.
|
||||
url_to_load_key: HashMap<Arc<Url>, LoadKey>,
|
||||
url_to_load_key: HashMap<ServoUrl, LoadKey>,
|
||||
|
||||
// A counter used to generate instances of LoadKey
|
||||
keygen: LoadKeyGenerator,
|
||||
|
@ -118,7 +118,7 @@ impl AllPendingLoads {
|
|||
}
|
||||
|
||||
// get a PendingLoad from its url. When possible, prefer `get_by_key_mut`.
|
||||
fn get_by_url(&self, url: &Url) -> Option<&PendingLoad> {
|
||||
fn get_by_url(&self, url: &ServoUrl) -> Option<&PendingLoad> {
|
||||
self.url_to_load_key.get(url).
|
||||
and_then(|load_key|
|
||||
self.loads.get(load_key)
|
||||
|
@ -133,7 +133,7 @@ impl AllPendingLoads {
|
|||
})
|
||||
}
|
||||
|
||||
fn get_cached(&mut self, url: Arc<Url>) -> (CacheResult, LoadKey, &mut PendingLoad) {
|
||||
fn get_cached(&mut self, url: ServoUrl) -> (CacheResult, LoadKey, &mut PendingLoad) {
|
||||
match self.url_to_load_key.entry(url.clone()) {
|
||||
Occupied(url_entry) => {
|
||||
let load_key = url_entry.get();
|
||||
|
@ -255,7 +255,7 @@ struct ImageCache {
|
|||
pending_loads: AllPendingLoads,
|
||||
|
||||
// Images that have finished loading (successful or not)
|
||||
completed_loads: HashMap<Arc<Url>, CompletedLoad>,
|
||||
completed_loads: HashMap<ServoUrl, CompletedLoad>,
|
||||
|
||||
// The placeholder image used when an image fails to load
|
||||
placeholder_image: Option<Arc<Image>>,
|
||||
|
@ -498,7 +498,7 @@ impl ImageCache {
|
|||
};
|
||||
|
||||
let completed_load = CompletedLoad::new(image_response.clone());
|
||||
self.completed_loads.insert(pending_load.url, completed_load);
|
||||
self.completed_loads.insert(pending_load.url.into(), completed_load);
|
||||
|
||||
for listener in pending_load.listeners {
|
||||
listener.notify(image_response.clone());
|
||||
|
@ -511,23 +511,21 @@ impl ImageCache {
|
|||
// that image metadata is available, possibly before the image has finished
|
||||
// loading.
|
||||
fn request_image(&mut self,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
result_chan: ImageCacheChan,
|
||||
responder: Option<ImageResponder>,
|
||||
send_metadata_msg: bool) {
|
||||
let image_listener = ImageListener::new(result_chan, responder, send_metadata_msg);
|
||||
// Let's avoid copying url everywhere.
|
||||
let ref_url = Arc::new(url);
|
||||
|
||||
// Check if already completed
|
||||
match self.completed_loads.get(&ref_url) {
|
||||
match self.completed_loads.get(&url) {
|
||||
Some(completed_load) => {
|
||||
// It's already completed, return a notify straight away
|
||||
image_listener.notify(completed_load.image_response.clone());
|
||||
}
|
||||
None => {
|
||||
// Check if the load is already pending
|
||||
let (cache_result, load_key, mut pending_load) = self.pending_loads.get_cached(ref_url.clone());
|
||||
let (cache_result, load_key, mut pending_load) = self.pending_loads.get_cached(url.clone());
|
||||
pending_load.add_listener(image_listener);
|
||||
match cache_result {
|
||||
CacheResult::Miss => {
|
||||
|
@ -535,11 +533,13 @@ impl ImageCache {
|
|||
// the resource thread.
|
||||
// https://html.spec.whatwg.org/multipage/#update-the-image-data
|
||||
// step 12.
|
||||
//
|
||||
// TODO(emilio): ServoUrl in more places please!
|
||||
let request = RequestInit {
|
||||
url: (*ref_url).clone(),
|
||||
url: url.clone(),
|
||||
type_: RequestType::Image,
|
||||
destination: Destination::Image,
|
||||
origin: (*ref_url).clone(),
|
||||
origin: url.clone(),
|
||||
.. RequestInit::default()
|
||||
};
|
||||
|
||||
|
@ -578,9 +578,9 @@ impl ImageCache {
|
|||
}
|
||||
|
||||
fn get_image_if_available(&mut self,
|
||||
url: Url,
|
||||
placeholder: UsePlaceholder, )
|
||||
-> Result<Arc<Image>, ImageState> {
|
||||
url: ServoUrl,
|
||||
placeholder: UsePlaceholder, )
|
||||
-> Result<Arc<Image>, ImageState> {
|
||||
let img_or_metadata = self.get_image_or_meta_if_available(url, placeholder);
|
||||
match img_or_metadata {
|
||||
Ok(ImageOrMetadataAvailable::ImageAvailable(image)) => Ok(image),
|
||||
|
@ -590,7 +590,7 @@ impl ImageCache {
|
|||
}
|
||||
|
||||
fn get_image_or_meta_if_available(&mut self,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
placeholder: UsePlaceholder)
|
||||
-> Result<ImageOrMetadataAvailable, ImageState> {
|
||||
match self.completed_loads.get(&url) {
|
||||
|
@ -624,9 +624,9 @@ impl ImageCache {
|
|||
}
|
||||
|
||||
fn store_decode_image(&mut self,
|
||||
ref_url: Url,
|
||||
ref_url: ServoUrl,
|
||||
loaded_bytes: Vec<u8>) {
|
||||
let (cache_result, load_key, _) = self.pending_loads.get_cached(Arc::new(ref_url));
|
||||
let (cache_result, load_key, _) = self.pending_loads.get_cached(ref_url.clone());
|
||||
assert!(cache_result == CacheResult::Miss);
|
||||
let action = ResponseAction::DataAvailable(loaded_bytes);
|
||||
let _ = self.progress_sender.send(ResourceLoadInfo {
|
||||
|
|
|
@ -36,6 +36,7 @@ extern crate profile_traits;
|
|||
extern crate rustc_serialize;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate servo_url;
|
||||
extern crate threadpool;
|
||||
extern crate time;
|
||||
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
|
||||
|
|
|
@ -35,6 +35,7 @@ use net_traits::storage_thread::StorageThreadMsg;
|
|||
use profile_traits::time::ProfilerChan;
|
||||
use rustc_serialize::{Decodable, Encodable};
|
||||
use rustc_serialize::json;
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::{Cow, ToOwned};
|
||||
use std::boxed::FnBox;
|
||||
use std::cell::Cell;
|
||||
|
@ -48,7 +49,6 @@ use std::rc::Rc;
|
|||
use std::sync::{Arc, RwLock};
|
||||
use std::sync::mpsc::{Receiver, Sender, channel};
|
||||
use storage_thread::StorageThreadFactory;
|
||||
use url::Url;
|
||||
use util::prefs::PREFS;
|
||||
use util::thread::spawn_named;
|
||||
use websocket_loader;
|
||||
|
@ -76,7 +76,7 @@ impl ProgressSender {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn send_error(url: Url, err: NetworkError, start_chan: LoadConsumer) {
|
||||
pub fn send_error(url: ServoUrl, err: NetworkError, start_chan: LoadConsumer) {
|
||||
let mut metadata: Metadata = Metadata::default(url);
|
||||
metadata.status = None;
|
||||
|
||||
|
@ -477,7 +477,7 @@ impl CoreResourceManager {
|
|||
}
|
||||
|
||||
fn set_cookies_for_url(&mut self,
|
||||
request: Url,
|
||||
request: ServoUrl,
|
||||
cookie_list: String,
|
||||
source: CookieSource,
|
||||
resource_group: &ResourceGroup) {
|
||||
|
@ -492,7 +492,7 @@ impl CoreResourceManager {
|
|||
}
|
||||
}
|
||||
|
||||
fn set_cookies_for_url_with_data(&mut self, request: Url, cookie: cookie_rs::Cookie, source: CookieSource,
|
||||
fn set_cookies_for_url_with_data(&mut self, request: ServoUrl, cookie: cookie_rs::Cookie, source: CookieSource,
|
||||
resource_group: &ResourceGroup) {
|
||||
if let Some(cookie) = cookie::Cookie::new_wrapped(cookie, &request, source) {
|
||||
let mut cookie_jar = resource_group.cookie_jar.write().unwrap();
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
||||
use net_traits::storage_thread::{StorageThreadMsg, StorageType};
|
||||
use resource_thread;
|
||||
use servo_url::ServoUrl;
|
||||
use std::borrow::ToOwned;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use url::Url;
|
||||
use util::thread::spawn_named;
|
||||
|
||||
const QUOTA_SIZE_LIMIT: usize = 5 * 1024 * 1024;
|
||||
|
@ -105,7 +105,7 @@ impl StorageManager {
|
|||
}
|
||||
}
|
||||
|
||||
fn length(&self, sender: IpcSender<usize>, url: Url, storage_type: StorageType) {
|
||||
fn length(&self, sender: IpcSender<usize>, url: ServoUrl, storage_type: StorageType) {
|
||||
let origin = self.origin_as_string(url);
|
||||
let data = self.select_data(storage_type);
|
||||
sender.send(data.get(&origin).map_or(0, |&(_, ref entry)| entry.len())).unwrap();
|
||||
|
@ -113,7 +113,7 @@ impl StorageManager {
|
|||
|
||||
fn key(&self,
|
||||
sender: IpcSender<Option<String>>,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
storage_type: StorageType,
|
||||
index: u32) {
|
||||
let origin = self.origin_as_string(url);
|
||||
|
@ -126,7 +126,7 @@ impl StorageManager {
|
|||
|
||||
fn keys(&self,
|
||||
sender: IpcSender<Vec<String>>,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
storage_type: StorageType) {
|
||||
let origin = self.origin_as_string(url);
|
||||
let data = self.select_data(storage_type);
|
||||
|
@ -142,7 +142,7 @@ impl StorageManager {
|
|||
/// exceeding the quota limit
|
||||
fn set_item(&mut self,
|
||||
sender: IpcSender<Result<(bool, Option<String>), ()>>,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
storage_type: StorageType,
|
||||
name: String,
|
||||
value: String) {
|
||||
|
@ -191,7 +191,7 @@ impl StorageManager {
|
|||
|
||||
fn request_item(&self,
|
||||
sender: IpcSender<Option<String>>,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
storage_type: StorageType,
|
||||
name: String) {
|
||||
let origin = self.origin_as_string(url);
|
||||
|
@ -204,7 +204,7 @@ impl StorageManager {
|
|||
/// Sends Some(old_value) in case there was a previous value with the key name, otherwise sends None
|
||||
fn remove_item(&mut self,
|
||||
sender: IpcSender<Option<String>>,
|
||||
url: Url,
|
||||
url: ServoUrl,
|
||||
storage_type: StorageType,
|
||||
name: String) {
|
||||
let origin = self.origin_as_string(url);
|
||||
|
@ -218,7 +218,7 @@ impl StorageManager {
|
|||
sender.send(old_value).unwrap();
|
||||
}
|
||||
|
||||
fn clear(&mut self, sender: IpcSender<bool>, url: Url, storage_type: StorageType) {
|
||||
fn clear(&mut self, sender: IpcSender<bool>, url: ServoUrl, storage_type: StorageType) {
|
||||
let origin = self.origin_as_string(url);
|
||||
let data = self.select_data_mut(storage_type);
|
||||
sender.send(data.get_mut(&origin)
|
||||
|
@ -232,7 +232,7 @@ impl StorageManager {
|
|||
}})).unwrap();
|
||||
}
|
||||
|
||||
fn origin_as_string(&self, url: Url) -> String {
|
||||
fn origin_as_string(&self, url: ServoUrl) -> String {
|
||||
url.origin().ascii_serialization()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,13 +9,13 @@ use net_traits::{WebSocketCommunicate, WebSocketConnectData, WebSocketDomAction,
|
|||
use net_traits::MessageData;
|
||||
use net_traits::hosts::replace_hosts;
|
||||
use net_traits::unwrap_websocket_protocol;
|
||||
use servo_url::ServoUrl;
|
||||
use std::ascii::AsciiExt;
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::thread;
|
||||
use util::thread::spawn_named;
|
||||
use websocket::{Client, Message};
|
||||
use websocket::client::request::Url;
|
||||
use websocket::header::{Headers, Origin, WebSocketProtocol};
|
||||
use websocket::message::Type;
|
||||
use websocket::receiver::Receiver;
|
||||
|
@ -27,7 +27,7 @@ use websocket::ws::sender::Sender as Sender_Object;
|
|||
use websocket::ws::util::url::parse_url;
|
||||
|
||||
/// *Establish a WebSocket Connection* as defined in RFC 6455.
|
||||
fn establish_a_websocket_connection(resource_url: &Url, net_url: (Host, String, bool),
|
||||
fn establish_a_websocket_connection(resource_url: &ServoUrl, net_url: (Host, String, bool),
|
||||
origin: String, protocols: Vec<String>,
|
||||
cookie_jar: Arc<RwLock<CookieStorage>>)
|
||||
-> WebSocketResult<(Headers, Sender<WebSocketStream>, Receiver<WebSocketStream>)> {
|
||||
|
@ -71,7 +71,7 @@ pub fn init(connect: WebSocketCommunicate, connect_data: WebSocketConnectData, c
|
|||
|
||||
// URL that we actually fetch from the network, after applying the replacements
|
||||
// specified in the hosts file.
|
||||
let net_url_result = parse_url(&replace_hosts(&connect_data.resource_url));
|
||||
let net_url_result = parse_url(replace_hosts(&connect_data.resource_url).as_url().unwrap());
|
||||
let net_url = match net_url_result {
|
||||
Ok(net_url) => net_url,
|
||||
Err(e) => {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue