Integration and improvements of File API backends

1. More complete origin check in FileManagerThreadMsg
2. Add reference counting logic to file manage store and script API
3. Integrate the support of slicing
This commit is contained in:
Zhen Zhang 2016-06-18 18:14:40 +08:00
parent 212aa4437e
commit 14d68968ed
14 changed files with 543 additions and 244 deletions

View file

@ -2,60 +2,32 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use filemanager_thread::BlobURLStore;
use hyper::header::{DispositionType, ContentDisposition, DispositionParam};
use hyper::header::{Headers, ContentType, ContentLength, Charset};
use hyper::http::RawStatus;
use mime::{Mime, Attr};
use mime_classifier::MimeClassifier;
use net_traits::ProgressMsg::Done;
use net_traits::blob_url_store::{parse_blob_url, BlobURLStoreEntry, BlobURLStoreError};
use net_traits::blob_url_store::BlobURLStoreEntry;
use net_traits::filemanager_thread::RelativePos;
use net_traits::response::HttpsState;
use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
use resource_thread::{send_error, start_sending_sniffed_opt};
use std::str;
use std::sync::{Arc, RwLock};
use net_traits::{LoadConsumer, LoadData, Metadata};
use resource_thread::start_sending_sniffed_opt;
use std::ops::Index;
use std::sync::Arc;
// TODO: Check on GET
// https://w3c.github.io/FileAPI/#requestResponseModel
pub fn load(load_data: LoadData, consumer: LoadConsumer,
blob_url_store: Arc<RwLock<BlobURLStore>>,
classifier: Arc<MimeClassifier>) { // XXX: Move it into net process later
match parse_blob_url(&load_data.url) {
None => {
let format_err = NetworkError::Internal(format!("Invalid blob URL format {:?}", load_data.url));
send_error(load_data.url.clone(), format_err, consumer);
}
Some((uuid, _fragment)) => {
match blob_url_store.read().unwrap().request(uuid, &load_data.url.origin()) {
Ok(entry) => load_blob(&load_data, consumer, classifier, entry),
Err(e) => {
let err = match e {
BlobURLStoreError::InvalidKey =>
format!("Invalid blob URL key {:?}", uuid.simple().to_string()),
BlobURLStoreError::InvalidOrigin =>
format!("Invalid blob URL origin {:?}", load_data.url.origin()),
};
send_error(load_data.url.clone(), NetworkError::Internal(err), consumer);
}
}
}
}
}
fn load_blob(load_data: &LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
entry: &BlobURLStoreEntry) {
pub fn load_blob(load_data: &LoadData, start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>, opt_filename: Option<String>,
rel_pos: &RelativePos, entry: &BlobURLStoreEntry) {
let content_type: Mime = entry.type_string.parse().unwrap_or(mime!(Text / Plain));
let charset = content_type.get_param(Attr::Charset);
let mut headers = Headers::new();
if let Some(ref name) = entry.filename {
if let Some(name) = opt_filename {
let charset = charset.and_then(|c| c.as_str().parse().ok());
headers.set(ContentDisposition {
disposition: DispositionType::Inline,
@ -66,8 +38,10 @@ fn load_blob(load_data: &LoadData,
});
}
let range = rel_pos.to_abs_range(entry.size as usize);
headers.set(ContentType(content_type.clone()));
headers.set(ContentLength(entry.size));
headers.set(ContentLength(range.len() as u64));
let metadata = Metadata {
final_url: load_data.url.clone(),
@ -81,7 +55,7 @@ fn load_blob(load_data: &LoadData,
if let Ok(chan) =
start_sending_sniffed_opt(start_chan, metadata, classifier,
&entry.bytes, load_data.context.clone()) {
&entry.bytes.index(range), load_data.context.clone()) {
let _ = chan.send(Done(Ok(())));
}
}