Remove CoreResourceMsg::Load.

Also remove now-dead code that rustc warns about.

It turns out that we lost support for some of our custom URL schemes; I intend
to reimplement them, but I believe this will be significantly easier to do
once the legacy code is out of the way.
This commit is contained in:
Ms2ger 2016-11-02 15:48:23 +01:00
parent ce24edc2b3
commit fb1279ec3a
12 changed files with 16 additions and 1052 deletions

View file

@ -16,7 +16,6 @@ content-blocker = "0.2.1"
cookie = {version = "0.2.5", features = ["serialize-rustc"]} cookie = {version = "0.2.5", features = ["serialize-rustc"]}
devtools_traits = {path = "../devtools_traits"} devtools_traits = {path = "../devtools_traits"}
flate2 = "0.2.0" flate2 = "0.2.0"
fnv = "1.0"
hyper = "0.9.9" hyper = "0.9.9"
hyper_serde = "0.1.4" hyper_serde = "0.1.4"
immeta = "0.3.1" immeta = "0.3.1"

View file

@ -1,68 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use file_loader;
use hyper::header::ContentType;
use hyper::mime::{Mime, SubLevel, TopLevel};
use hyper_serde::Serde;
use mime_classifier::MimeClassifier;
use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
use net_traits::ProgressMsg::Done;
use net_traits::response::HttpsState;
use resource_thread::{CancellationListener, send_error, start_sending_sniffed_opt};
use servo_url::ServoUrl;
use std::io;
use std::sync::Arc;
use url::Url;
use util::resource_files::resources_dir_path;
fn url_from_non_relative_scheme(load_data: &mut LoadData, filename: &str) -> io::Result<()> {
let mut path = try!(resources_dir_path());
path.push(filename);
assert!(path.exists());
load_data.url = ServoUrl::from_url(Url::from_file_path(&*path).unwrap());
Ok(())
}
pub fn factory(mut load_data: LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
let url = load_data.url.clone();
let res = match url.path() {
"blank" => {
let metadata = Metadata {
final_url: load_data.url,
content_type:
Some(Serde(ContentType(Mime(TopLevel::Text, SubLevel::Html, vec![])))),
charset: Some("utf-8".to_owned()),
headers: None,
status: Some((200, b"OK".to_vec())),
https_state: HttpsState::None,
referrer: None,
};
if let Ok(chan) = start_sending_sniffed_opt(start_chan,
metadata,
classifier,
&[],
load_data.context) {
let _ = chan.send(Done(Ok(())));
}
return
}
"crash" => panic!("Loading the about:crash URL."),
"failure" | "not-found" =>
url_from_non_relative_scheme(&mut load_data, &(url.path().to_owned() + ".html")),
"sslfail" => url_from_non_relative_scheme(&mut load_data, "badcert.html"),
_ => {
send_error(load_data.url, NetworkError::Internal("Unknown about: URL.".to_owned()), start_chan);
return
}
};
if res.is_ok() {
file_loader::factory(load_data, start_chan, classifier, cancel_listener)
} else {
send_error(load_data.url, NetworkError::Internal("Could not access resource folder".to_owned()), start_chan);
}
}

View file

@ -5,120 +5,16 @@
use filemanager_thread::FileManager; use filemanager_thread::FileManager;
use hyper::header::{Charset, ContentLength, ContentType, Headers}; use hyper::header::{Charset, ContentLength, ContentType, Headers};
use hyper::header::{ContentDisposition, DispositionParam, DispositionType}; use hyper::header::{ContentDisposition, DispositionParam, DispositionType};
use hyper_serde::Serde;
use ipc_channel::ipc; use ipc_channel::ipc;
use mime::{Attr, Mime}; use mime::{Attr, Mime};
use mime_classifier::MimeClassifier; use net_traits::NetworkError;
use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
use net_traits::ProgressMsg::{Done, Payload};
use net_traits::blob_url_store::parse_blob_url; use net_traits::blob_url_store::parse_blob_url;
use net_traits::filemanager_thread::ReadFileProgress; use net_traits::filemanager_thread::ReadFileProgress;
use net_traits::response::HttpsState;
use resource_thread::{send_error, start_sending_sniffed_opt};
use resource_thread::CancellationListener;
use servo_url::ServoUrl; use servo_url::ServoUrl;
use std::boxed::FnBox;
use std::sync::Arc;
use util::thread::spawn_named;
// TODO: Check on GET // TODO: Check on GET
// https://w3c.github.io/FileAPI/#requestResponseModel // https://w3c.github.io/FileAPI/#requestResponseModel
pub fn factory(filemanager: FileManager)
-> Box<FnBox(LoadData, LoadConsumer, Arc<MimeClassifier>, CancellationListener) + Send> {
box move |load_data: LoadData, start_chan, classifier, cancel_listener| {
spawn_named(format!("blob loader for {}", load_data.url), move || {
load_blob(load_data, start_chan, classifier, filemanager, cancel_listener);
})
}
}
fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
filemanager: FileManager,
cancel_listener: CancellationListener) {
let (chan, recv) = ipc::channel().unwrap();
if let Ok((id, origin, _fragment)) = parse_blob_url(&load_data.url.clone()) {
let check_url_validity = true;
filemanager.read_file(chan, id, check_url_validity, origin, Some(cancel_listener));
// Receive first chunk
match recv.recv().unwrap() {
Ok(ReadFileProgress::Meta(blob_buf)) => {
let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime!(Text / Plain));
let charset = content_type.get_param(Attr::Charset);
let mut headers = Headers::new();
if let Some(name) = blob_buf.filename {
let charset = charset.and_then(|c| c.as_str().parse().ok());
headers.set(ContentDisposition {
disposition: DispositionType::Inline,
parameters: vec![
DispositionParam::Filename(charset.unwrap_or(Charset::Us_Ascii),
None, name.as_bytes().to_vec())
]
});
}
headers.set(ContentType(content_type.clone()));
headers.set(ContentLength(blob_buf.size as u64));
let metadata = Metadata {
final_url: load_data.url.clone(),
content_type: Some(Serde(ContentType(content_type.clone()))),
charset: charset.map(|c| c.as_str().to_string()),
headers: Some(Serde(headers)),
// https://w3c.github.io/FileAPI/#TwoHundredOK
status: Some((200, b"OK".to_vec())),
https_state: HttpsState::None,
referrer: None,
};
if let Ok(chan) =
start_sending_sniffed_opt(start_chan, metadata, classifier,
&blob_buf.bytes, load_data.context.clone()) {
let _ = chan.send(Payload(blob_buf.bytes));
loop {
match recv.recv().unwrap() {
Ok(ReadFileProgress::Partial(bytes)) => {
let _ = chan.send(Payload(bytes));
}
Ok(ReadFileProgress::EOF) => {
let _ = chan.send(Done(Ok(())));
return;
}
Ok(_) => {
let err = NetworkError::Internal("Invalid filemanager reply".to_string());
let _ = chan.send(Done(Err(err)));
return;
}
Err(e) => {
let err = NetworkError::Internal(format!("{:?}", e));
let _ = chan.send(Done(Err(err)));
return;
}
}
}
}
}
Ok(_) => {
let err = NetworkError::Internal("Invalid filemanager reply".to_string());
send_error(load_data.url, err, start_chan);
}
Err(e) => {
let err = NetworkError::Internal(format!("{:?}", e));
send_error(load_data.url, err, start_chan);
}
}
} else {
let e = format!("Invalid blob URL format {:?}", load_data.url);
let format_err = NetworkError::Internal(e);
send_error(load_data.url.clone(), format_err, start_chan);
}
}
/// https://fetch.spec.whatwg.org/#concept-basic-fetch (partial) /// https://fetch.spec.whatwg.org/#concept-basic-fetch (partial)
// TODO: make async. // TODO: make async.
pub fn load_blob_sync pub fn load_blob_sync

View file

@ -2,13 +2,8 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use file_loader;
use mime_classifier::MimeClassifier;
use net_traits::{LoadConsumer, LoadData, NetworkError};
use resource_thread::{CancellationListener, send_error};
use servo_url::ServoUrl; use servo_url::ServoUrl;
use std::fs::canonicalize; use std::fs::canonicalize;
use std::sync::Arc;
use url::percent_encoding::percent_decode; use url::percent_encoding::percent_decode;
use util::resource_files::resources_dir_path; use util::resource_files::resources_dir_path;
@ -34,20 +29,3 @@ pub fn resolve_chrome_url(url: &ServoUrl) -> Result<ServoUrl, ()> {
_ => Err(()) _ => Err(())
} }
} }
pub fn factory(mut load_data: LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
let file_url = match resolve_chrome_url(&load_data.url) {
Ok(url) => url,
Err(_) => {
send_error(load_data.url,
NetworkError::Internal("Invalid chrome URL.".to_owned()),
start_chan);
return;
}
};
load_data.url = file_url;
file_loader::factory(load_data, start_chan, classifier, cancel_listener)
}

View file

@ -3,28 +3,11 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use hyper::mime::{Attr, Mime, SubLevel, TopLevel, Value}; use hyper::mime::{Attr, Mime, SubLevel, TopLevel, Value};
use mime_classifier::MimeClassifier;
use net_traits::{LoadData, Metadata, NetworkError};
use net_traits::LoadConsumer;
use net_traits::ProgressMsg::{Done, Payload};
use resource_thread::{CancellationListener, send_error, start_sending_sniffed_opt};
use rustc_serialize::base64::FromBase64; use rustc_serialize::base64::FromBase64;
use servo_url::ServoUrl; use servo_url::ServoUrl;
use std::sync::Arc;
use url::Position; use url::Position;
use url::percent_encoding::percent_decode; use url::percent_encoding::percent_decode;
pub fn factory(load_data: LoadData,
senders: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
// NB: we don't spawn a new thread.
// Hypothesis: data URLs are too small for parallel base64 etc. to be worth it.
// Should be tested at some point.
// Left in separate function to allow easy moving to a thread, if desired.
load(load_data, senders, classifier, cancel_listener)
}
pub enum DecodeError { pub enum DecodeError {
InvalidDataUri, InvalidDataUri,
NonBase64DataUri, NonBase64DataUri,
@ -70,33 +53,3 @@ pub fn decode(url: &ServoUrl) -> Result<DecodeData, DecodeError> {
} }
Ok((content_type, bytes)) Ok((content_type, bytes))
} }
pub fn load(load_data: LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
let url = load_data.url;
if cancel_listener.is_cancelled() {
return;
}
match decode(&url) {
Ok((content_type, bytes)) => {
let mut metadata = Metadata::default(url);
metadata.set_content_type(Some(content_type).as_ref());
if let Ok(chan) = start_sending_sniffed_opt(start_chan,
metadata,
classifier,
&bytes,
load_data.context) {
let _ = chan.send(Payload(bytes));
let _ = chan.send(Done(Ok(())));
}
},
Err(DecodeError::InvalidDataUri) =>
send_error(url, NetworkError::Internal("invalid data uri".to_owned()), start_chan),
Err(DecodeError::NonBase64DataUri) =>
send_error(url, NetworkError::Internal("non-base64 data uri".to_owned()), start_chan),
}
}

View file

@ -1,139 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use about_loader;
use mime_classifier::MimeClassifier;
use mime_guess::guess_mime_type;
use msg::constellation_msg::PipelineId;
use net_traits::{LoadConsumer, LoadData, LoadOrigin, Metadata, NetworkError, ReferrerPolicy};
use net_traits::ProgressMsg::{Done, Payload};
use resource_thread::{CancellationListener, ProgressSender};
use resource_thread::{send_error, start_sending_sniffed_opt};
use servo_url::ServoUrl;
use std::borrow::ToOwned;
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::sync::Arc;
use util::thread::spawn_named;
static READ_SIZE: usize = 8192;
enum ReadStatus {
Partial(Vec<u8>),
EOF,
}
enum LoadResult {
Cancelled,
Finished,
}
struct FileLoadOrigin;
impl LoadOrigin for FileLoadOrigin {
fn referrer_url(&self) -> Option<ServoUrl> {
None
}
fn referrer_policy(&self) -> Option<ReferrerPolicy> {
None
}
fn pipeline_id(&self) -> Option<PipelineId> {
None
}
}
fn read_block(reader: &mut File) -> Result<ReadStatus, String> {
let mut buf = vec![0; READ_SIZE];
match reader.read(&mut buf) {
Ok(0) => Ok(ReadStatus::EOF),
Ok(n) => {
buf.truncate(n);
Ok(ReadStatus::Partial(buf))
}
Err(e) => Err(e.description().to_owned()),
}
}
fn read_all(reader: &mut File, progress_chan: &ProgressSender, cancel_listener: &CancellationListener)
-> Result<LoadResult, String> {
while !cancel_listener.is_cancelled() {
match try!(read_block(reader)) {
ReadStatus::Partial(buf) => progress_chan.send(Payload(buf)).unwrap(),
ReadStatus::EOF => return Ok(LoadResult::Finished),
}
}
let _ = progress_chan.send(Done(Err(NetworkError::LoadCancelled)));
Ok(LoadResult::Cancelled)
}
fn get_progress_chan(load_data: LoadData, file_path: &Path,
senders: LoadConsumer, classifier: Arc<MimeClassifier>, buf: &[u8])
-> Result<ProgressSender, ()> {
let mut metadata = Metadata::default(load_data.url);
let mime_type = guess_mime_type(file_path);
metadata.set_content_type(Some(&mime_type));
return start_sending_sniffed_opt(senders, metadata, classifier, buf, load_data.context);
}
pub fn factory(load_data: LoadData,
senders: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
assert!(load_data.url.scheme() == "file");
spawn_named("file_loader".to_owned(), move || {
let file_path = match load_data.url.to_file_path() {
Ok(file_path) => file_path,
Err(_) => {
send_error(load_data.url, NetworkError::Internal("Could not parse path".to_owned()), senders);
return;
},
};
let mut file = File::open(&file_path);
let reader = match file {
Ok(ref mut reader) => reader,
Err(_) => {
// this should be one of the three errors listed in
// http://doc.rust-lang.org/std/fs/struct.OpenOptions.html#method.open
// but, we'll go for a "file not found!"
let url = ServoUrl::parse("about:not-found").unwrap();
let load_data_404 = LoadData::new(load_data.context, url, &FileLoadOrigin);
about_loader::factory(load_data_404, senders, classifier, cancel_listener);
return;
}
};
if cancel_listener.is_cancelled() {
if let Ok(progress_chan) = get_progress_chan(load_data, &file_path,
senders, classifier, &[]) {
let _ = progress_chan.send(Done(Err(NetworkError::LoadCancelled)));
}
return;
}
match read_block(reader) {
Ok(ReadStatus::Partial(buf)) => {
let progress_chan = get_progress_chan(load_data, &file_path,
senders, classifier, &buf).ok().unwrap();
progress_chan.send(Payload(buf)).unwrap();
let read_result = read_all(reader, &progress_chan, &cancel_listener);
if let Ok(load_result) = read_result {
match load_result {
LoadResult::Cancelled => return,
LoadResult::Finished => progress_chan.send(Done(Ok(()))).unwrap(),
}
}
}
Ok(ReadStatus::EOF) => {
if let Ok(chan) = get_progress_chan(load_data, &file_path,
senders, classifier, &[]) {
let _ = chan.send(Done(Ok(())));
}
}
Err(e) => {
send_error(load_data.url, NetworkError::Internal(e), senders);
}
}
});
}

View file

@ -4,90 +4,39 @@
use brotli::Decompressor; use brotli::Decompressor;
use connector::Connector; use connector::Connector;
use content_blocker_parser::{LoadType, Reaction, Request as CBRequest, ResourceType}; use content_blocker_parser::RuleList;
use content_blocker_parser::{RuleList, process_rules_for_request};
use cookie; use cookie;
use cookie_storage::CookieStorage; use cookie_storage::CookieStorage;
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, HttpRequest as DevtoolsHttpRequest}; use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, HttpRequest as DevtoolsHttpRequest};
use devtools_traits::{HttpResponse as DevtoolsHttpResponse, NetworkEvent}; use devtools_traits::{HttpResponse as DevtoolsHttpResponse, NetworkEvent};
use flate2::read::{DeflateDecoder, GzDecoder}; use flate2::read::{DeflateDecoder, GzDecoder};
use fnv::FnvHashSet; use hsts::HstsList;
use hsts::{HstsEntry, HstsList, secure_url};
use hyper::Error as HttpError; use hyper::Error as HttpError;
use hyper::LanguageTag; use hyper::LanguageTag;
use hyper::client::{Pool, Request, Response}; use hyper::client::{Pool, Request, Response};
use hyper::header::{Accept, AcceptEncoding, ContentEncoding, ContentLength, ContentType, Host, Referer}; use hyper::header::{AcceptEncoding, AcceptLanguage, Basic, ContentEncoding, ContentLength};
use hyper::header::{AcceptLanguage, Authorization, Basic}; use hyper::header::{Encoding, Header, Headers, Quality, QualityItem, Referer};
use hyper::header::{Encoding, Header, Headers, Quality, QualityItem}; use hyper::header::{SetCookie, qitem};
use hyper::header::{Location, SetCookie, StrictTransportSecurity, UserAgent, qitem};
use hyper::http::RawStatus; use hyper::http::RawStatus;
use hyper::method::Method; use hyper::method::Method;
use hyper::mime::{Mime, SubLevel, TopLevel};
use hyper::net::Fresh; use hyper::net::Fresh;
use hyper::status::{StatusClass, StatusCode}; use hyper::status::StatusCode;
use hyper_serde::Serde;
use ipc_channel::ipc::{self, IpcSender};
use log; use log;
use mime_classifier::MimeClassifier;
use msg::constellation_msg::PipelineId; use msg::constellation_msg::PipelineId;
use net_traits::{CookieSource, IncludeSubdomains, LoadConsumer, LoadContext, LoadData}; use net_traits::{CookieSource, Metadata, ReferrerPolicy};
use net_traits::{CustomResponse, CustomResponseMediator, Metadata, NetworkError, ReferrerPolicy};
use net_traits::ProgressMsg::{Done, Payload};
use net_traits::hosts::replace_hosts; use net_traits::hosts::replace_hosts;
use net_traits::response::HttpsState;
use openssl; use openssl;
use openssl::ssl::error::{OpensslError, SslError}; use openssl::ssl::error::{OpensslError, SslError};
use profile_traits::time::{ProfilerCategory, ProfilerChan, TimerMetadata, profile}; use resource_thread::{AuthCache, CancellationListener};
use profile_traits::time::{TimerMetadataFrameType, TimerMetadataReflowType};
use resource_thread::{AuthCache, AuthCacheEntry, CancellationListener, send_error, start_sending_sniffed_opt};
use servo_url::ServoUrl; use servo_url::ServoUrl;
use std::borrow::{Cow, ToOwned};
use std::boxed::FnBox;
use std::error::Error; use std::error::Error;
use std::fmt; use std::fmt;
use std::io::{self, Cursor, Read, Write}; use std::io::{self, Read, Write};
use std::ops::Deref;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::sync::mpsc::Sender; use std::sync::mpsc::Sender;
use time; use time;
use time::Tm; use time::Tm;
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] use url::Origin;
use tinyfiledialogs;
use url::{Position, Origin};
use util::thread::spawn_named;
use uuid;
pub fn factory(user_agent: Cow<'static, str>,
http_state: HttpState,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
profiler_chan: ProfilerChan,
swmanager_chan: Option<IpcSender<CustomResponseMediator>>,
connector: Arc<Pool<Connector>>)
-> Box<FnBox(LoadData,
LoadConsumer,
Arc<MimeClassifier>,
CancellationListener) + Send> {
box move |load_data: LoadData, senders, classifier, cancel_listener| {
spawn_named(format!("http_loader for {}", load_data.url), move || {
let metadata = TimerMetadata {
url: load_data.url.as_str().into(),
iframe: TimerMetadataFrameType::RootWindow,
incremental: TimerMetadataReflowType::FirstReflow,
};
profile(ProfilerCategory::NetHTTPRequestResponse, Some(metadata), profiler_chan, || {
load_for_consumer(load_data,
senders,
classifier,
connector,
http_state,
devtools_chan,
swmanager_chan,
cancel_listener,
user_agent)
})
})
}
}
pub enum ReadResult { pub enum ReadResult {
Payload(Vec<u8>), Payload(Vec<u8>),
@ -129,40 +78,6 @@ fn precise_time_ms() -> u64 {
time::precise_time_ns() / (1000 * 1000) time::precise_time_ns() / (1000 * 1000)
} }
fn load_for_consumer(load_data: LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
connector: Arc<Pool<Connector>>,
http_state: HttpState,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
swmanager_chan: Option<IpcSender<CustomResponseMediator>>,
cancel_listener: CancellationListener,
user_agent: Cow<'static, str>) {
let factory = NetworkHttpRequestFactory {
connector: connector,
};
let ui_provider = TFDProvider;
match load(&load_data, &ui_provider, &http_state,
devtools_chan, &factory,
user_agent, &cancel_listener, swmanager_chan) {
Err(error) => {
match error.error {
LoadErrorType::ConnectionAborted { .. } => unreachable!(),
LoadErrorType::Ssl { reason } => send_error(error.url.clone(),
NetworkError::SslValidation(error.url, reason),
start_chan),
LoadErrorType::Cancelled => send_error(error.url, NetworkError::LoadCancelled, start_chan),
_ => send_error(error.url, NetworkError::Internal(error.error.description().to_owned()), start_chan)
}
}
Ok(mut load_response) => {
let metadata = load_response.metadata.clone();
send_data(load_data.context, &mut load_response, start_chan, metadata, classifier, &cancel_listener)
}
}
}
pub struct WrappedHttpResponse { pub struct WrappedHttpResponse {
pub response: Response pub response: Response
} }
@ -216,34 +131,6 @@ impl HttpResponse for WrappedHttpResponse {
} }
} }
pub struct ReadableCustomResponse {
headers: Headers,
raw_status: RawStatus,
body: Cursor<Vec<u8>>
}
pub fn to_readable_response(custom_response: CustomResponse) -> ReadableCustomResponse {
ReadableCustomResponse {
headers: custom_response.headers,
raw_status: custom_response.raw_status,
body: Cursor::new(custom_response.body)
}
}
impl HttpResponse for ReadableCustomResponse {
fn headers(&self) -> &Headers { &self.headers }
fn status(&self) -> StatusCode {
StatusCode::Ok
}
fn status_raw(&self) -> &RawStatus { &self.raw_status }
}
impl Read for ReadableCustomResponse {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.body.read(buf)
}
}
pub trait HttpRequestFactory { pub trait HttpRequestFactory {
type R: HttpRequest; type R: HttpRequest;
@ -356,15 +243,8 @@ pub enum LoadErrorType {
Cancelled, Cancelled,
Connection { reason: String }, Connection { reason: String },
ConnectionAborted { reason: String }, ConnectionAborted { reason: String },
ContentBlocked,
// Preflight fetch inconsistent with main fetch
CorsPreflightFetchInconsistent,
Decoding { reason: String }, Decoding { reason: String },
InvalidRedirect { reason: String },
MaxRedirects(u32), // u32 indicates number of redirects that occurred
RedirectLoop,
Ssl { reason: String }, Ssl { reason: String },
UnsupportedScheme { scheme: String },
} }
impl fmt::Display for LoadErrorType { impl fmt::Display for LoadErrorType {
@ -379,14 +259,8 @@ impl Error for LoadErrorType {
LoadErrorType::Cancelled => "load cancelled", LoadErrorType::Cancelled => "load cancelled",
LoadErrorType::Connection { ref reason } => reason, LoadErrorType::Connection { ref reason } => reason,
LoadErrorType::ConnectionAborted { ref reason } => reason, LoadErrorType::ConnectionAborted { ref reason } => reason,
LoadErrorType::ContentBlocked => "content blocked",
LoadErrorType::CorsPreflightFetchInconsistent => "preflight fetch inconsistent with main fetch",
LoadErrorType::Decoding { ref reason } => reason, LoadErrorType::Decoding { ref reason } => reason,
LoadErrorType::InvalidRedirect { ref reason } => reason,
LoadErrorType::MaxRedirects(_) => "too many redirects",
LoadErrorType::RedirectLoop => "redirect loop",
LoadErrorType::Ssl { ref reason } => reason, LoadErrorType::Ssl { ref reason } => reason,
LoadErrorType::UnsupportedScheme { .. } => "unsupported url scheme",
} }
} }
} }
@ -403,18 +277,6 @@ pub fn set_default_accept_encoding(headers: &mut Headers) {
])); ]));
} }
fn set_default_accept(headers: &mut Headers) {
if !headers.has::<Accept>() {
let accept = Accept(vec![
qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])),
qitem(Mime(TopLevel::Application, SubLevel::Ext("xhtml+xml".to_owned()), vec![])),
QualityItem::new(Mime(TopLevel::Application, SubLevel::Xml, vec![]), Quality(900u16)),
QualityItem::new(Mime(TopLevel::Star, SubLevel::Star, vec![]), Quality(800u16)),
]);
headers.set(accept);
}
}
pub fn set_default_accept_language(headers: &mut Headers) { pub fn set_default_accept_language(headers: &mut Headers) {
if headers.has::<AcceptLanguage>() { if headers.has::<AcceptLanguage>() {
return; return;
@ -535,33 +397,6 @@ pub fn set_cookies_from_headers(url: &ServoUrl, headers: &Headers, cookie_jar: &
} }
} }
fn update_sts_list_from_response(url: &ServoUrl, response: &HttpResponse, hsts_list: &Arc<RwLock<HstsList>>) {
if url.scheme() != "https" {
return;
}
if let Some(header) = response.headers().get::<StrictTransportSecurity>() {
if let Some(host) = url.domain() {
let mut hsts_list = hsts_list.write().unwrap();
let include_subdomains = if header.include_subdomains {
IncludeSubdomains::Included
} else {
IncludeSubdomains::NotIncluded
};
if let Some(entry) = HstsEntry::new(host.to_owned(), include_subdomains, Some(header.max_age)) {
info!("adding host {} to the strict transport security list", host);
info!("- max-age {}", header.max_age);
if header.include_subdomains {
info!("- includeSubdomains");
}
hsts_list.push(entry);
}
}
}
}
pub struct StreamedResponse { pub struct StreamedResponse {
decoder: Decoder, decoder: Decoder,
pub metadata: Metadata pub metadata: Metadata
@ -662,63 +497,6 @@ pub fn send_response_to_devtools(devtools_chan: &Sender<DevtoolsControlMsg>,
let _ = devtools_chan.send(DevtoolsControlMsg::FromChrome(msg)); let _ = devtools_chan.send(DevtoolsControlMsg::FromChrome(msg));
} }
fn request_must_be_secured(url: &ServoUrl, hsts_list: &Arc<RwLock<HstsList>>) -> bool {
match url.domain() {
Some(domain) => hsts_list.read().unwrap().is_host_secure(domain),
None => false
}
}
pub fn modify_request_headers(headers: &mut Headers,
url: &ServoUrl,
user_agent: &str,
referrer_policy: Option<ReferrerPolicy>,
referrer_url: &mut Option<ServoUrl>) {
// Ensure that the host header is set from the original url
let host = Host {
hostname: url.host_str().unwrap().to_owned(),
port: url.port_or_known_default()
};
headers.set(host);
// If the user-agent has not already been set, then use the
// browser's default user-agent or the user-agent override
// from the command line. If the user-agent is set, don't
// modify it, as setting of the user-agent by the user is
// allowed.
// https://fetch.spec.whatwg.org/#concept-http-network-or-cache-fetch step 8
if !headers.has::<UserAgent>() {
headers.set(UserAgent(user_agent.to_owned()));
}
set_default_accept(headers);
set_default_accept_language(headers);
set_default_accept_encoding(headers);
*referrer_url = determine_request_referrer(headers,
referrer_policy.clone(),
referrer_url.clone(),
url.clone());
if let Some(referrer_val) = referrer_url.clone() {
headers.set(Referer(referrer_val.into_string()));
}
}
fn set_auth_header(headers: &mut Headers,
url: &ServoUrl,
auth_cache: &Arc<RwLock<AuthCache>>) {
if !headers.has::<Authorization<Basic>>() {
if let Some(auth) = auth_from_url(url) {
headers.set(auth);
} else {
if let Some(basic) = auth_from_cache(auth_cache, &url.origin()) {
headers.set(Authorization(basic));
}
}
}
}
pub fn auth_from_cache(auth_cache: &Arc<RwLock<AuthCache>>, origin: &Origin) -> Option<Basic> { pub fn auth_from_cache(auth_cache: &Arc<RwLock<AuthCache>>, origin: &Origin) -> Option<Basic> {
if let Some(ref auth_entry) = auth_cache.read().unwrap().entries.get(&origin.ascii_serialization()) { if let Some(ref auth_entry) = auth_cache.read().unwrap().entries.get(&origin.ascii_serialization()) {
let user_name = auth_entry.user_name.clone(); let user_name = auth_entry.user_name.clone();
@ -729,37 +507,6 @@ pub fn auth_from_cache(auth_cache: &Arc<RwLock<AuthCache>>, origin: &Origin) ->
} }
} }
fn auth_from_url(doc_url: &ServoUrl) -> Option<Authorization<Basic>> {
let username = doc_url.username();
if username != "" {
Some(Authorization(Basic {
username: username.to_owned(),
password: Some(doc_url.password().unwrap_or("").to_owned())
}))
} else {
None
}
}
pub fn process_response_headers(response: &HttpResponse,
url: &ServoUrl,
cookie_jar: &Arc<RwLock<CookieStorage>>,
hsts_list: &Arc<RwLock<HstsList>>,
load_data: &LoadData) {
info!("got HTTP response {}, headers:", response.status());
if log_enabled!(log::LogLevel::Info) {
for header in response.headers().iter() {
info!(" - {}", header);
}
}
// https://fetch.spec.whatwg.org/#concept-http-network-fetch step 9
if load_data.credentials_flag {
set_cookies_from_headers(url, response.headers(), cookie_jar);
}
update_sts_list_from_response(url, response, hsts_list);
}
pub fn obtain_response<A>(request_factory: &HttpRequestFactory<R=A>, pub fn obtain_response<A>(request_factory: &HttpRequestFactory<R=A>,
url: &ServoUrl, url: &ServoUrl,
method: &Method, method: &Method,
@ -871,312 +618,6 @@ pub trait UIProvider {
fn input_username_and_password(&self, prompt: &str) -> (Option<String>, Option<String>); fn input_username_and_password(&self, prompt: &str) -> (Option<String>, Option<String>);
} }
impl UIProvider for TFDProvider {
#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
fn input_username_and_password(&self, prompt: &str) -> (Option<String>, Option<String>) {
(tinyfiledialogs::input_box(prompt, "Username:", ""),
tinyfiledialogs::input_box(prompt, "Password:", ""))
}
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
fn input_username_and_password(&self, _prompt: &str) -> (Option<String>, Option<String>) {
(None, None)
}
}
struct TFDProvider;
pub fn load<A, B>(load_data: &LoadData,
ui_provider: &B,
http_state: &HttpState,
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
request_factory: &HttpRequestFactory<R=A>,
user_agent: Cow<'static, str>,
cancel_listener: &CancellationListener,
swmanager_chan: Option<IpcSender<CustomResponseMediator>>)
-> Result<StreamedResponse, LoadError> where A: HttpRequest + 'static, B: UIProvider {
let mut iters = 0;
// URL of the document being loaded, as seen by all the higher-level code.
let mut doc_url = load_data.url.clone();
let mut redirected_to = FnvHashSet::with_hasher(Default::default());
let mut method = load_data.method.clone();
// URL of referrer - to be updated with redirects
let mut referrer_url = load_data.referrer_url.clone();
let mut new_auth_header: Option<Authorization<Basic>> = None;
if cancel_listener.is_cancelled() {
return Err(LoadError::new(doc_url, LoadErrorType::Cancelled));
}
let (msg_sender, msg_receiver) = ipc::channel().unwrap();
let response_mediator = CustomResponseMediator {
response_chan: msg_sender,
load_url: doc_url.clone()
};
if let Some(sender) = swmanager_chan {
let _ = sender.send(response_mediator);
if let Ok(Some(custom_response)) = msg_receiver.recv() {
let metadata = Metadata::default(doc_url.clone());
let readable_response = to_readable_response(custom_response);
return StreamedResponse::from_http_response(box readable_response, metadata);
}
} else {
debug!("Did not receive a custom response");
}
// If the URL is a view-source scheme then the scheme data contains the
// real URL that should be used for which the source is to be viewed.
// Change our existing URL to that and keep note that we are viewing
// the source rather than rendering the contents of the URL.
let viewing_source = doc_url.scheme() == "view-source";
if viewing_source {
doc_url = ServoUrl::parse(&load_data.url[Position::BeforeUsername..]).unwrap();
}
// Loop to handle redirects.
loop {
iters = iters + 1;
if doc_url.scheme() == "http" && request_must_be_secured(&doc_url, &http_state.hsts_list) {
info!("{} is in the strict transport security list, requesting secure host", doc_url);
doc_url = ServoUrl::from_url(secure_url(&doc_url.as_url().unwrap()));
}
if iters > 20 {
return Err(LoadError::new(doc_url, LoadErrorType::MaxRedirects(iters - 1)));
}
if !matches!(doc_url.scheme(), "http" | "https") {
let scheme = doc_url.scheme().to_owned();
return Err(LoadError::new(doc_url, LoadErrorType::UnsupportedScheme { scheme: scheme }));
}
if cancel_listener.is_cancelled() {
return Err(LoadError::new(doc_url, LoadErrorType::Cancelled));
}
let mut block_cookies = false;
if let Some(ref rules) = *http_state.blocked_content {
let same_origin =
load_data.referrer_url.as_ref()
.map(|url| url.origin() == doc_url.origin())
.unwrap_or(false);
let load_type = if same_origin { LoadType::FirstParty } else { LoadType::ThirdParty };
let actions = process_rules_for_request(rules, &CBRequest {
url: doc_url.as_url().unwrap(),
resource_type: to_resource_type(&load_data.context),
load_type: load_type,
});
for action in actions {
match action {
Reaction::Block => {
return Err(LoadError::new(doc_url, LoadErrorType::ContentBlocked));
},
Reaction::BlockCookies => block_cookies = true,
Reaction::HideMatchingElements(_) => (),
}
}
}
info!("requesting {}", doc_url);
// Avoid automatically preserving request headers when redirects occur.
// See https://bugzilla.mozilla.org/show_bug.cgi?id=401564 and
// https://bugzilla.mozilla.org/show_bug.cgi?id=216828 .
// Only preserve ones which have been explicitly marked as such.
let mut request_headers = if iters == 1 {
let mut combined_headers = load_data.headers.clone();
combined_headers.extend(load_data.preserved_headers.iter());
combined_headers
} else {
load_data.preserved_headers.clone()
};
let request_id = devtools_chan.as_ref().map(|_| {
uuid::Uuid::new_v4().simple().to_string()
});
modify_request_headers(&mut request_headers, &doc_url,
&user_agent, load_data.referrer_policy,
&mut referrer_url);
// https://fetch.spec.whatwg.org/#concept-http-network-or-cache-fetch step 11
if load_data.credentials_flag {
if !block_cookies {
set_request_cookies(&doc_url, &mut request_headers, &http_state.cookie_jar);
}
// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch step 12
set_auth_header(&mut request_headers, &doc_url, &http_state.auth_cache);
}
//if there is a new auth header then set the request headers with it
if let Some(ref auth_header) = new_auth_header {
request_headers.set(auth_header.clone());
}
let (response, msg) =
try!(obtain_response(request_factory, &doc_url, &method, &request_headers,
&cancel_listener, &load_data.data, &load_data.method,
&load_data.pipeline_id, iters,
request_id.as_ref().map(Deref::deref), false));
process_response_headers(&response, &doc_url, &http_state.cookie_jar, &http_state.hsts_list, &load_data);
//if response status is unauthorized then prompt user for username and password
if response.status() == StatusCode::Unauthorized &&
response.headers().get_raw("WWW-Authenticate").is_some() {
let (username_option, password_option) =
ui_provider.input_username_and_password(doc_url.as_str());
match username_option {
Some(name) => {
new_auth_header = Some(Authorization(Basic { username: name, password: password_option }));
continue;
},
None => {},
}
}
new_auth_header = None;
if let Some(auth_header) = request_headers.get::<Authorization<Basic>>() {
if response.status().class() == StatusClass::Success ||
response.status().class() == StatusClass::Redirection {
let auth_entry = AuthCacheEntry {
user_name: auth_header.username.to_owned(),
password: auth_header.password.to_owned().unwrap(),
};
let serialized_origin = doc_url.origin().ascii_serialization();
http_state.auth_cache.write().unwrap().entries.insert(serialized_origin, auth_entry);
}
}
// --- Loop if there's a redirect
if response.status().class() == StatusClass::Redirection {
if let Some(&Location(ref new_url)) = response.headers().get::<Location>() {
// CORS (https://fetch.spec.whatwg.org/#http-fetch, status section, point 9, 10)
if let Some(ref c) = load_data.cors {
if c.preflight {
return Err(LoadError::new(doc_url, LoadErrorType::CorsPreflightFetchInconsistent));
} else {
// XXXManishearth There are some CORS-related steps here,
// but they don't seem necessary until credentials are implemented
}
}
let new_doc_url = match doc_url.join(&new_url) {
Ok(u) => u,
Err(e) => return Err(
LoadError::new(doc_url, LoadErrorType::InvalidRedirect { reason: e.to_string() })),
};
// According to https://tools.ietf.org/html/rfc7231#section-6.4.2,
// historically UAs have rewritten POST->GET on 301 and 302 responses.
if method == Method::Post &&
(response.status() == StatusCode::MovedPermanently ||
response.status() == StatusCode::Found) {
method = Method::Get;
}
if redirected_to.contains(&new_doc_url) {
return Err(LoadError::new(doc_url, LoadErrorType::RedirectLoop));
}
info!("redirecting to {}", new_doc_url);
doc_url = new_doc_url;
redirected_to.insert(doc_url.clone());
}
}
// Only notify the devtools about the final request that received a response.
if let Some(m) = msg {
send_request_to_devtools(m, devtools_chan.as_ref().unwrap());
}
let mut adjusted_headers = response.headers().clone();
if viewing_source {
adjusted_headers.set(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec![])));
}
let mut metadata: Metadata = Metadata::default(doc_url.clone());
metadata.set_content_type(match adjusted_headers.get() {
Some(&ContentType(ref mime)) => Some(mime),
None => None
});
metadata.headers = Some(Serde(adjusted_headers));
metadata.status = Some((response.status_raw().0,
response.status_raw().1.as_bytes().to_vec()));
metadata.https_state = if doc_url.scheme() == "https" {
HttpsState::Modern
} else {
HttpsState::None
};
metadata.referrer = referrer_url.clone();
// --- Tell devtools that we got a response
// Send an HttpResponse message to devtools with the corresponding request_id
// TODO: Send this message even when the load fails?
if let Some(pipeline_id) = load_data.pipeline_id {
if let Some(ref chan) = devtools_chan {
send_response_to_devtools(
&chan, request_id.unwrap(),
metadata.headers.clone().map(Serde::into_inner),
metadata.status.clone(),
pipeline_id);
}
}
if response.status().class() == StatusClass::Redirection {
continue;
} else {
return StreamedResponse::from_http_response(box response, metadata);
}
}
}
fn send_data<R: Read>(context: LoadContext,
reader: &mut R,
start_chan: LoadConsumer,
metadata: Metadata,
classifier: Arc<MimeClassifier>,
cancel_listener: &CancellationListener) {
let (progress_chan, mut chunk) = {
let buf = match read_block(reader) {
Ok(ReadResult::Payload(buf)) => buf,
_ => vec!(),
};
let p = match start_sending_sniffed_opt(start_chan, metadata, classifier, &buf, context) {
Ok(p) => p,
_ => return
};
(p, buf)
};
loop {
if cancel_listener.is_cancelled() {
let _ = progress_chan.send(Done(Err(NetworkError::LoadCancelled)));
return;
}
if progress_chan.send(Payload(chunk)).is_err() {
// The send errors when the receiver is out of scope,
// which will happen if the fetch has timed out (or has been aborted)
// so we don't need to continue with the loading of the file here.
return;
}
chunk = match read_block(reader) {
Ok(ReadResult::Payload(buf)) => buf,
Ok(ReadResult::EOF) | Err(_) => break,
};
}
let _ = progress_chan.send(Done(Ok(())));
}
// FIXME: This incredibly hacky. Make it more robust, and at least test it. // FIXME: This incredibly hacky. Make it more robust, and at least test it.
fn is_cert_verify_error(error: &OpensslError) -> bool { fn is_cert_verify_error(error: &OpensslError) -> bool {
match error { match error {
@ -1205,17 +646,3 @@ fn format_ssl_error(error: &OpensslError) -> String {
} }
} }
} }
fn to_resource_type(context: &LoadContext) -> ResourceType {
match *context {
LoadContext::Browsing => ResourceType::Document,
LoadContext::Image => ResourceType::Image,
LoadContext::AudioVideo => ResourceType::Media,
LoadContext::Plugin => ResourceType::Raw,
LoadContext::Style => ResourceType::StyleSheet,
LoadContext::Script => ResourceType::Script,
LoadContext::Font => ResourceType::Font,
LoadContext::TextTrack => ResourceType::Media,
LoadContext::CacheManifest => ResourceType::Raw,
}
}

View file

@ -3,7 +3,6 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(box_syntax)] #![feature(box_syntax)]
#![feature(fnbox)]
#![feature(mpsc_select)] #![feature(mpsc_select)]
#![feature(plugin)] #![feature(plugin)]
#![feature(proc_macro)] #![feature(proc_macro)]
@ -18,7 +17,6 @@ extern crate content_blocker as content_blocker_parser;
extern crate cookie as cookie_rs; extern crate cookie as cookie_rs;
extern crate devtools_traits; extern crate devtools_traits;
extern crate flate2; extern crate flate2;
extern crate fnv;
extern crate hyper; extern crate hyper;
extern crate hyper_serde; extern crate hyper_serde;
extern crate immeta; extern crate immeta;
@ -49,7 +47,6 @@ extern crate uuid;
extern crate webrender_traits; extern crate webrender_traits;
extern crate websocket; extern crate websocket;
mod about_loader;
mod blob_loader; mod blob_loader;
mod chrome_loader; mod chrome_loader;
mod connector; mod connector;
@ -57,7 +54,6 @@ mod content_blocker;
pub mod cookie; pub mod cookie;
pub mod cookie_storage; pub mod cookie_storage;
mod data_loader; mod data_loader;
mod file_loader;
pub mod filemanager_thread; pub mod filemanager_thread;
pub mod hsts; pub mod hsts;
mod http_loader; mod http_loader;

View file

@ -3,21 +3,16 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A thread that takes a URL and streams back the binary data. //! A thread that takes a URL and streams back the binary data.
use about_loader;
use blob_loader;
use chrome_loader;
use connector::{Connector, create_http_connector}; use connector::{Connector, create_http_connector};
use content_blocker::BLOCKED_CONTENT_RULES; use content_blocker::BLOCKED_CONTENT_RULES;
use cookie; use cookie;
use cookie_rs; use cookie_rs;
use cookie_storage::CookieStorage; use cookie_storage::CookieStorage;
use data_loader;
use devtools_traits::DevtoolsControlMsg; use devtools_traits::DevtoolsControlMsg;
use fetch::methods::{FetchContext, fetch}; use fetch::methods::{FetchContext, fetch};
use file_loader;
use filemanager_thread::{FileManager, TFDProvider}; use filemanager_thread::{FileManager, TFDProvider};
use hsts::HstsList; use hsts::HstsList;
use http_loader::{self, HttpState}; use http_loader::HttpState;
use hyper::client::pool::Pool; use hyper::client::pool::Pool;
use hyper::header::{ContentType, Header, SetCookie}; use hyper::header::{ContentType, Header, SetCookie};
use hyper::mime::{Mime, SubLevel, TopLevel}; use hyper::mime::{Mime, SubLevel, TopLevel};
@ -26,7 +21,7 @@ use ipc_channel::ipc::{self, IpcReceiver, IpcReceiverSet, IpcSender};
use mime_classifier::{ApacheBugFlag, MimeClassifier, NoSniffFlag}; use mime_classifier::{ApacheBugFlag, MimeClassifier, NoSniffFlag};
use net_traits::{CookieSource, CoreResourceThread, Metadata, ProgressMsg}; use net_traits::{CookieSource, CoreResourceThread, Metadata, ProgressMsg};
use net_traits::{CoreResourceMsg, FetchResponseMsg, FetchTaskTarget, LoadConsumer}; use net_traits::{CoreResourceMsg, FetchResponseMsg, FetchTaskTarget, LoadConsumer};
use net_traits::{CustomResponseMediator, LoadData, LoadResponse, NetworkError, ResourceId}; use net_traits::{CustomResponseMediator, LoadResponse, NetworkError, ResourceId};
use net_traits::{ResourceThreads, WebSocketCommunicate, WebSocketConnectData}; use net_traits::{ResourceThreads, WebSocketCommunicate, WebSocketConnectData};
use net_traits::LoadContext; use net_traits::LoadContext;
use net_traits::ProgressMsg::Done; use net_traits::ProgressMsg::Done;
@ -37,7 +32,6 @@ use rustc_serialize::{Decodable, Encodable};
use rustc_serialize::json; use rustc_serialize::json;
use servo_url::ServoUrl; use servo_url::ServoUrl;
use std::borrow::{Cow, ToOwned}; use std::borrow::{Cow, ToOwned};
use std::boxed::FnBox;
use std::cell::Cell; use std::cell::Cell;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
@ -47,7 +41,7 @@ use std::ops::Deref;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::rc::Rc; use std::rc::Rc;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::sync::mpsc::{Receiver, Sender, channel}; use std::sync::mpsc::{Receiver, Sender};
use storage_thread::StorageThreadFactory; use storage_thread::StorageThreadFactory;
use util::prefs::PREFS; use util::prefs::PREFS;
use util::thread::spawn_named; use util::thread::spawn_named;
@ -252,10 +246,8 @@ impl ResourceChannelManager {
fn process_msg(&mut self, fn process_msg(&mut self,
msg: CoreResourceMsg, msg: CoreResourceMsg,
group: &ResourceGroup, group: &ResourceGroup,
control_sender: &CoreResourceThread) -> bool { _control_sender: &CoreResourceThread) -> bool {
match msg { match msg {
CoreResourceMsg::Load(load_data, consumer, id_sender) =>
self.resource_manager.load(load_data, consumer, id_sender, control_sender.clone(), group),
CoreResourceMsg::Fetch(init, sender) => CoreResourceMsg::Fetch(init, sender) =>
self.resource_manager.fetch(init, sender, group), self.resource_manager.fetch(init, sender, group),
CoreResourceMsg::WebsocketConnect(connect, connect_data) => CoreResourceMsg::WebsocketConnect(connect, connect_data) =>
@ -451,28 +443,22 @@ pub struct AuthCache {
pub struct CoreResourceManager { pub struct CoreResourceManager {
user_agent: Cow<'static, str>, user_agent: Cow<'static, str>,
mime_classifier: Arc<MimeClassifier>,
devtools_chan: Option<Sender<DevtoolsControlMsg>>, devtools_chan: Option<Sender<DevtoolsControlMsg>>,
swmanager_chan: Option<IpcSender<CustomResponseMediator>>, swmanager_chan: Option<IpcSender<CustomResponseMediator>>,
profiler_chan: ProfilerChan,
filemanager: FileManager, filemanager: FileManager,
cancel_load_map: HashMap<ResourceId, Sender<()>>, cancel_load_map: HashMap<ResourceId, Sender<()>>,
next_resource_id: ResourceId,
} }
impl CoreResourceManager { impl CoreResourceManager {
pub fn new(user_agent: Cow<'static, str>, pub fn new(user_agent: Cow<'static, str>,
devtools_channel: Option<Sender<DevtoolsControlMsg>>, devtools_channel: Option<Sender<DevtoolsControlMsg>>,
profiler_chan: ProfilerChan) -> CoreResourceManager { _profiler_chan: ProfilerChan) -> CoreResourceManager {
CoreResourceManager { CoreResourceManager {
user_agent: user_agent, user_agent: user_agent,
mime_classifier: Arc::new(MimeClassifier::new()),
devtools_chan: devtools_channel, devtools_chan: devtools_channel,
swmanager_chan: None, swmanager_chan: None,
profiler_chan: profiler_chan,
filemanager: FileManager::new(), filemanager: FileManager::new(),
cancel_load_map: HashMap::new(), cancel_load_map: HashMap::new(),
next_resource_id: ResourceId(0),
} }
} }
@ -500,66 +486,6 @@ impl CoreResourceManager {
} }
} }
fn load(&mut self,
load_data: LoadData,
consumer: LoadConsumer,
id_sender: Option<IpcSender<ResourceId>>,
resource_thread: CoreResourceThread,
resource_grp: &ResourceGroup) {
fn from_factory(factory: fn(LoadData, LoadConsumer, Arc<MimeClassifier>, CancellationListener))
-> Box<FnBox(LoadData,
LoadConsumer,
Arc<MimeClassifier>,
CancellationListener) + Send> {
box move |load_data, senders, classifier, cancel_listener| {
factory(load_data, senders, classifier, cancel_listener)
}
}
let cancel_resource = id_sender.map(|sender| {
let current_res_id = self.next_resource_id;
let _ = sender.send(current_res_id);
let (cancel_sender, cancel_receiver) = channel();
self.cancel_load_map.insert(current_res_id, cancel_sender);
self.next_resource_id.0 += 1;
CancellableResource::new(cancel_receiver, current_res_id, resource_thread)
});
let cancel_listener = CancellationListener::new(cancel_resource);
let loader = match load_data.url.scheme() {
"chrome" => from_factory(chrome_loader::factory),
"file" => from_factory(file_loader::factory),
"http" | "https" | "view-source" => {
let http_state = HttpState {
blocked_content: BLOCKED_CONTENT_RULES.clone(),
hsts_list: resource_grp.hsts_list.clone(),
cookie_jar: resource_grp.cookie_jar.clone(),
auth_cache: resource_grp.auth_cache.clone()
};
http_loader::factory(self.user_agent.clone(),
http_state,
self.devtools_chan.clone(),
self.profiler_chan.clone(),
self.swmanager_chan.clone(),
resource_grp.connector.clone())
},
"data" => from_factory(data_loader::factory),
"about" => from_factory(about_loader::factory),
"blob" => blob_loader::factory(self.filemanager.clone()),
_ => {
debug!("resource_thread: no loader for scheme {}", load_data.url.scheme());
send_error(load_data.url, NetworkError::Internal("no loader for scheme".to_owned()), consumer);
return
}
};
debug!("loading url: {}", load_data.url);
loader.call_box((load_data,
consumer,
self.mime_classifier.clone(),
cancel_listener));
}
fn fetch(&self, fn fetch(&self,
init: RequestInit, init: RequestInit,
sender: IpcSender<FetchResponseMsg>, sender: IpcSender<FetchResponseMsg>,

View file

@ -414,8 +414,6 @@ pub struct WebSocketConnectData {
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub enum CoreResourceMsg { pub enum CoreResourceMsg {
/// Request the data associated with a particular URL
Load(LoadData, LoadConsumer, Option<IpcSender<ResourceId>>),
Fetch(RequestInit, IpcSender<FetchResponseMsg>), Fetch(RequestInit, IpcSender<FetchResponseMsg>),
/// Try to make a websocket connection to a URL. /// Try to make a websocket connection to a URL.
WebsocketConnect(WebSocketCommunicate, WebSocketConnectData), WebsocketConnect(WebSocketCommunicate, WebSocketConnectData),

1
ports/cef/Cargo.lock generated
View file

@ -1471,7 +1471,6 @@ dependencies = [
"cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"devtools_traits 0.0.1", "devtools_traits 0.0.1",
"flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper_serde 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "hyper_serde 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"immeta 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "immeta 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",

View file

@ -1558,7 +1558,6 @@ dependencies = [
"cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "cookie 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"devtools_traits 0.0.1", "devtools_traits 0.0.1",
"flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper_serde 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "hyper_serde 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"immeta 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "immeta 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",