mirror of
https://github.com/servo/servo.git
synced 2025-08-04 05:00:08 +01:00
Auto merge of #11875 - izgzhen:file-manager-backend, r=Manishearth
Integration and improvements of File API backends Basically three major changes: 1. More complete origin check in `FileManagerThreadMsg` 2. Add reference counting logic to file manage store and script API 3. Integrate the support of slicing r? @Manishearth --- <!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `__` with appropriate data: --> - [x] `./mach build -d` does not report any errors - [x] `./mach test-tidy` does not report any errors - [ ] These changes fix #__ (github issue number if applicable). <!-- Either: --> - [ ] There are tests for these changes OR - [ ] These changes do not require tests because _____ <!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. --> <!-- Reviewable:start --> --- This change is [<img src="https://reviewable.io/review_button.svg" height="35" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/11875) <!-- Reviewable:end -->
This commit is contained in:
commit
36974f0746
14 changed files with 543 additions and 244 deletions
|
@ -2,60 +2,32 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
use filemanager_thread::BlobURLStore;
|
|
||||||
use hyper::header::{DispositionType, ContentDisposition, DispositionParam};
|
use hyper::header::{DispositionType, ContentDisposition, DispositionParam};
|
||||||
use hyper::header::{Headers, ContentType, ContentLength, Charset};
|
use hyper::header::{Headers, ContentType, ContentLength, Charset};
|
||||||
use hyper::http::RawStatus;
|
use hyper::http::RawStatus;
|
||||||
use mime::{Mime, Attr};
|
use mime::{Mime, Attr};
|
||||||
use mime_classifier::MimeClassifier;
|
use mime_classifier::MimeClassifier;
|
||||||
use net_traits::ProgressMsg::Done;
|
use net_traits::ProgressMsg::Done;
|
||||||
use net_traits::blob_url_store::{parse_blob_url, BlobURLStoreEntry, BlobURLStoreError};
|
use net_traits::blob_url_store::BlobURLStoreEntry;
|
||||||
|
use net_traits::filemanager_thread::RelativePos;
|
||||||
use net_traits::response::HttpsState;
|
use net_traits::response::HttpsState;
|
||||||
use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
|
use net_traits::{LoadConsumer, LoadData, Metadata};
|
||||||
use resource_thread::{send_error, start_sending_sniffed_opt};
|
use resource_thread::start_sending_sniffed_opt;
|
||||||
use std::str;
|
use std::ops::Index;
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
|
||||||
// TODO: Check on GET
|
// TODO: Check on GET
|
||||||
// https://w3c.github.io/FileAPI/#requestResponseModel
|
// https://w3c.github.io/FileAPI/#requestResponseModel
|
||||||
|
|
||||||
pub fn load(load_data: LoadData, consumer: LoadConsumer,
|
pub fn load_blob(load_data: &LoadData, start_chan: LoadConsumer,
|
||||||
blob_url_store: Arc<RwLock<BlobURLStore>>,
|
classifier: Arc<MimeClassifier>, opt_filename: Option<String>,
|
||||||
classifier: Arc<MimeClassifier>) { // XXX: Move it into net process later
|
rel_pos: &RelativePos, entry: &BlobURLStoreEntry) {
|
||||||
|
|
||||||
match parse_blob_url(&load_data.url) {
|
|
||||||
None => {
|
|
||||||
let format_err = NetworkError::Internal(format!("Invalid blob URL format {:?}", load_data.url));
|
|
||||||
send_error(load_data.url.clone(), format_err, consumer);
|
|
||||||
}
|
|
||||||
Some((uuid, _fragment)) => {
|
|
||||||
match blob_url_store.read().unwrap().request(uuid, &load_data.url.origin()) {
|
|
||||||
Ok(entry) => load_blob(&load_data, consumer, classifier, entry),
|
|
||||||
Err(e) => {
|
|
||||||
let err = match e {
|
|
||||||
BlobURLStoreError::InvalidKey =>
|
|
||||||
format!("Invalid blob URL key {:?}", uuid.simple().to_string()),
|
|
||||||
BlobURLStoreError::InvalidOrigin =>
|
|
||||||
format!("Invalid blob URL origin {:?}", load_data.url.origin()),
|
|
||||||
};
|
|
||||||
send_error(load_data.url.clone(), NetworkError::Internal(err), consumer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_blob(load_data: &LoadData,
|
|
||||||
start_chan: LoadConsumer,
|
|
||||||
classifier: Arc<MimeClassifier>,
|
|
||||||
entry: &BlobURLStoreEntry) {
|
|
||||||
let content_type: Mime = entry.type_string.parse().unwrap_or(mime!(Text / Plain));
|
let content_type: Mime = entry.type_string.parse().unwrap_or(mime!(Text / Plain));
|
||||||
let charset = content_type.get_param(Attr::Charset);
|
let charset = content_type.get_param(Attr::Charset);
|
||||||
|
|
||||||
let mut headers = Headers::new();
|
let mut headers = Headers::new();
|
||||||
|
|
||||||
if let Some(ref name) = entry.filename {
|
if let Some(name) = opt_filename {
|
||||||
let charset = charset.and_then(|c| c.as_str().parse().ok());
|
let charset = charset.and_then(|c| c.as_str().parse().ok());
|
||||||
headers.set(ContentDisposition {
|
headers.set(ContentDisposition {
|
||||||
disposition: DispositionType::Inline,
|
disposition: DispositionType::Inline,
|
||||||
|
@ -66,8 +38,10 @@ fn load_blob(load_data: &LoadData,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let range = rel_pos.to_abs_range(entry.size as usize);
|
||||||
|
|
||||||
headers.set(ContentType(content_type.clone()));
|
headers.set(ContentType(content_type.clone()));
|
||||||
headers.set(ContentLength(entry.size));
|
headers.set(ContentLength(range.len() as u64));
|
||||||
|
|
||||||
let metadata = Metadata {
|
let metadata = Metadata {
|
||||||
final_url: load_data.url.clone(),
|
final_url: load_data.url.clone(),
|
||||||
|
@ -81,7 +55,7 @@ fn load_blob(load_data: &LoadData,
|
||||||
|
|
||||||
if let Ok(chan) =
|
if let Ok(chan) =
|
||||||
start_sending_sniffed_opt(start_chan, metadata, classifier,
|
start_sending_sniffed_opt(start_chan, metadata, classifier,
|
||||||
&entry.bytes, load_data.context.clone()) {
|
&entry.bytes.index(range), load_data.context.clone()) {
|
||||||
let _ = chan.send(Done(Ok(())));
|
let _ = chan.send(Done(Ok(())));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,21 +2,24 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
use blob_loader;
|
use blob_loader::load_blob;
|
||||||
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
||||||
use mime_classifier::MimeClassifier;
|
use mime_classifier::MimeClassifier;
|
||||||
use mime_guess::guess_mime_type_opt;
|
use mime_guess::guess_mime_type_opt;
|
||||||
use net_traits::blob_url_store::{BlobURLStoreEntry, BlobURLStoreError, BlobURLStoreMsg};
|
use net_traits::blob_url_store::{BlobURLStoreEntry, BlobURLStoreError, parse_blob_url};
|
||||||
use net_traits::filemanager_thread::{FileManagerThreadMsg, FileManagerResult, FilterPattern};
|
use net_traits::filemanager_thread::{FileManagerThreadMsg, FileManagerResult, FilterPattern, FileOrigin};
|
||||||
use net_traits::filemanager_thread::{SelectedFile, FileManagerThreadError, SelectedFileId};
|
use net_traits::filemanager_thread::{SelectedFile, RelativePos, FileManagerThreadError, SelectedFileId};
|
||||||
|
use net_traits::{LoadConsumer, LoadData, NetworkError};
|
||||||
|
use resource_thread::send_error;
|
||||||
|
use std::cell::Cell;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::Arc;
|
||||||
#[cfg(any(target_os = "macos", target_os = "linux"))]
|
#[cfg(any(target_os = "macos", target_os = "linux"))]
|
||||||
use tinyfiledialogs;
|
use tinyfiledialogs;
|
||||||
use url::{Url, Origin};
|
use url::Url;
|
||||||
use util::thread::spawn_named;
|
use util::thread::spawn_named;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
@ -87,11 +90,26 @@ impl<UI: 'static + UIProvider> FileManagerThreadFactory<UI> for IpcSender<FileMa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct FileStoreEntry {
|
||||||
|
/// Origin of the entry's "creator"
|
||||||
|
origin: FileOrigin,
|
||||||
|
/// Backend implementation
|
||||||
|
file_impl: FileImpl,
|
||||||
|
/// Reference counting
|
||||||
|
refs: Cell<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// File backend implementation
|
||||||
|
enum FileImpl {
|
||||||
|
PathOnly(PathBuf),
|
||||||
|
Memory(BlobURLStoreEntry),
|
||||||
|
Sliced(Uuid, RelativePos),
|
||||||
|
}
|
||||||
|
|
||||||
struct FileManager<UI: 'static + UIProvider> {
|
struct FileManager<UI: 'static + UIProvider> {
|
||||||
receiver: IpcReceiver<FileManagerThreadMsg>,
|
receiver: IpcReceiver<FileManagerThreadMsg>,
|
||||||
idmap: HashMap<Uuid, PathBuf>,
|
store: HashMap<Uuid, FileStoreEntry>,
|
||||||
classifier: Arc<MimeClassifier>,
|
classifier: Arc<MimeClassifier>,
|
||||||
blob_url_store: Arc<RwLock<BlobURLStore>>,
|
|
||||||
ui: &'static UI,
|
ui: &'static UI,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,10 +117,9 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
fn new(recv: IpcReceiver<FileManagerThreadMsg>, ui: &'static UI) -> FileManager<UI> {
|
fn new(recv: IpcReceiver<FileManagerThreadMsg>, ui: &'static UI) -> FileManager<UI> {
|
||||||
FileManager {
|
FileManager {
|
||||||
receiver: recv,
|
receiver: recv,
|
||||||
idmap: HashMap::new(),
|
store: HashMap::new(),
|
||||||
classifier: Arc::new(MimeClassifier::new()),
|
classifier: Arc::new(MimeClassifier::new()),
|
||||||
blob_url_store: Arc::new(RwLock::new(BlobURLStore::new())),
|
ui: ui,
|
||||||
ui: ui
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,33 +127,97 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
fn start(&mut self) {
|
fn start(&mut self) {
|
||||||
loop {
|
loop {
|
||||||
match self.receiver.recv().unwrap() {
|
match self.receiver.recv().unwrap() {
|
||||||
FileManagerThreadMsg::SelectFile(filter, sender) => self.select_file(filter, sender),
|
FileManagerThreadMsg::SelectFile(filter, sender, origin) => self.select_file(filter, sender, origin),
|
||||||
FileManagerThreadMsg::SelectFiles(filter, sender) => self.select_files(filter, sender),
|
FileManagerThreadMsg::SelectFiles(filter, sender, origin) => self.select_files(filter, sender, origin),
|
||||||
FileManagerThreadMsg::ReadFile(sender, id) => {
|
FileManagerThreadMsg::ReadFile(sender, id, origin) => {
|
||||||
match self.try_read_file(id) {
|
match self.try_read_file(id, origin) {
|
||||||
Ok(buffer) => { let _ = sender.send(Ok(buffer)); }
|
Ok(buffer) => { let _ = sender.send(Ok(buffer)); }
|
||||||
Err(_) => { let _ = sender.send(Err(FileManagerThreadError::ReadFileError)); }
|
Err(_) => { let _ = sender.send(Err(FileManagerThreadError::ReadFileError)); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FileManagerThreadMsg::DeleteFileID(id) => self.delete_fileid(id),
|
FileManagerThreadMsg::TransferMemory(entry, rel_pos, sender, origin) =>
|
||||||
FileManagerThreadMsg::BlobURLStoreMsg(msg) => self.blob_url_store.write().unwrap().process(msg),
|
self.transfer_memory(entry, rel_pos, sender, origin),
|
||||||
|
FileManagerThreadMsg::AddSlicedEntry(id, rel_pos, sender, origin) =>
|
||||||
|
self.add_sliced_entry(id, rel_pos, sender, origin),
|
||||||
FileManagerThreadMsg::LoadBlob(load_data, consumer) => {
|
FileManagerThreadMsg::LoadBlob(load_data, consumer) => {
|
||||||
blob_loader::load(load_data, consumer,
|
match parse_blob_url(&load_data.url) {
|
||||||
self.blob_url_store.clone(),
|
None => {
|
||||||
self.classifier.clone());
|
let e = format!("Invalid blob URL format {:?}", load_data.url);
|
||||||
|
let format_err = NetworkError::Internal(e);
|
||||||
|
send_error(load_data.url.clone(), format_err, consumer);
|
||||||
|
}
|
||||||
|
Some((id, _fragment)) => {
|
||||||
|
self.process_request(&load_data, consumer, &RelativePos::full_range(), &id);
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
FileManagerThreadMsg::DecRef(id, origin) => {
|
||||||
|
if let Ok(id) = Uuid::parse_str(&id.0) {
|
||||||
|
self.dec_ref(id, origin);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FileManagerThreadMsg::IncRef(id, origin) => {
|
||||||
|
if let Ok(id) = Uuid::parse_str(&id.0) {
|
||||||
|
self.inc_ref(id, origin);
|
||||||
|
}
|
||||||
|
}
|
||||||
FileManagerThreadMsg::Exit => break,
|
FileManagerThreadMsg::Exit => break,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn inc_ref(&mut self, id: Uuid, origin_in: FileOrigin) {
|
||||||
|
match self.store.get(&id) {
|
||||||
|
Some(entry) => {
|
||||||
|
if entry.origin == origin_in {
|
||||||
|
entry.refs.set(entry.refs.get() + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => return, // Invalid UUID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_sliced_entry(&mut self, id: SelectedFileId, rel_pos: RelativePos,
|
||||||
|
sender: IpcSender<Result<SelectedFileId, BlobURLStoreError>>,
|
||||||
|
origin_in: FileOrigin) {
|
||||||
|
if let Ok(id) = Uuid::parse_str(&id.0) {
|
||||||
|
match self.store.get(&id) {
|
||||||
|
Some(entry) => {
|
||||||
|
if entry.origin == origin_in {
|
||||||
|
// inc_ref on parent entry
|
||||||
|
entry.refs.set(entry.refs.get() + 1);
|
||||||
|
} else {
|
||||||
|
let _ = sender.send(Err(BlobURLStoreError::InvalidOrigin));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
let _ = sender.send(Err(BlobURLStoreError::InvalidFileID));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let new_id = Uuid::new_v4();
|
||||||
|
self.store.insert(new_id, FileStoreEntry {
|
||||||
|
origin: origin_in.clone(),
|
||||||
|
file_impl: FileImpl::Sliced(id, rel_pos),
|
||||||
|
refs: Cell::new(1),
|
||||||
|
});
|
||||||
|
|
||||||
|
let _ = sender.send(Ok(SelectedFileId(new_id.simple().to_string())));
|
||||||
|
} else {
|
||||||
|
let _ = sender.send(Err(BlobURLStoreError::InvalidFileID));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn select_file(&mut self, patterns: Vec<FilterPattern>,
|
fn select_file(&mut self, patterns: Vec<FilterPattern>,
|
||||||
sender: IpcSender<FileManagerResult<SelectedFile>>) {
|
sender: IpcSender<FileManagerResult<SelectedFile>>,
|
||||||
|
origin: FileOrigin) {
|
||||||
match self.ui.open_file_dialog("", patterns) {
|
match self.ui.open_file_dialog("", patterns) {
|
||||||
Some(s) => {
|
Some(s) => {
|
||||||
let selected_path = Path::new(&s);
|
let selected_path = Path::new(&s);
|
||||||
|
|
||||||
match self.create_entry(selected_path) {
|
match self.create_entry(selected_path, &origin) {
|
||||||
Some(triple) => { let _ = sender.send(Ok(triple)); }
|
Some(triple) => { let _ = sender.send(Ok(triple)); }
|
||||||
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
|
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
|
||||||
};
|
};
|
||||||
|
@ -149,7 +230,8 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn select_files(&mut self, patterns: Vec<FilterPattern>,
|
fn select_files(&mut self, patterns: Vec<FilterPattern>,
|
||||||
sender: IpcSender<FileManagerResult<Vec<SelectedFile>>>) {
|
sender: IpcSender<FileManagerResult<Vec<SelectedFile>>>,
|
||||||
|
origin: FileOrigin) {
|
||||||
match self.ui.open_file_dialog_multi("", patterns) {
|
match self.ui.open_file_dialog_multi("", patterns) {
|
||||||
Some(v) => {
|
Some(v) => {
|
||||||
let mut selected_paths = vec![];
|
let mut selected_paths = vec![];
|
||||||
|
@ -161,7 +243,7 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
let mut replies = vec![];
|
let mut replies = vec![];
|
||||||
|
|
||||||
for path in selected_paths {
|
for path in selected_paths {
|
||||||
match self.create_entry(path) {
|
match self.create_entry(path, &origin) {
|
||||||
Some(triple) => replies.push(triple),
|
Some(triple) => replies.push(triple),
|
||||||
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
|
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
|
||||||
};
|
};
|
||||||
|
@ -176,11 +258,17 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_entry(&mut self, file_path: &Path) -> Option<SelectedFile> {
|
fn create_entry(&mut self, file_path: &Path, origin: &str) -> Option<SelectedFile> {
|
||||||
match File::open(file_path) {
|
match File::open(file_path) {
|
||||||
Ok(handler) => {
|
Ok(handler) => {
|
||||||
let id = Uuid::new_v4();
|
let id = Uuid::new_v4();
|
||||||
self.idmap.insert(id, file_path.to_path_buf());
|
let file_impl = FileImpl::PathOnly(file_path.to_path_buf());
|
||||||
|
|
||||||
|
self.store.insert(id, FileStoreEntry {
|
||||||
|
origin: origin.to_string(),
|
||||||
|
file_impl: file_impl,
|
||||||
|
refs: Cell::new(1),
|
||||||
|
});
|
||||||
|
|
||||||
// Unix Epoch: https://doc.servo.org/std/time/constant.UNIX_EPOCH.html
|
// Unix Epoch: https://doc.servo.org/std/time/constant.UNIX_EPOCH.html
|
||||||
let epoch = handler.metadata().and_then(|metadata| metadata.modified()).map_err(|_| ())
|
let epoch = handler.metadata().and_then(|metadata| metadata.modified()).map_err(|_| ())
|
||||||
|
@ -215,79 +303,138 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_read_file(&mut self, id: SelectedFileId) -> Result<Vec<u8>, ()> {
|
fn try_read_file(&self, id: SelectedFileId, origin_in: String) -> Result<Vec<u8>, ()> {
|
||||||
let id = try!(Uuid::parse_str(&id.0).map_err(|_| ()));
|
let id = try!(Uuid::parse_str(&id.0).map_err(|_| ()));
|
||||||
|
|
||||||
match self.idmap.get(&id) {
|
match self.store.get(&id) {
|
||||||
Some(filepath) => {
|
Some(entry) => {
|
||||||
let mut buffer = vec![];
|
match entry.file_impl {
|
||||||
let mut handler = try!(File::open(&filepath).map_err(|_| ()));
|
FileImpl::PathOnly(ref filepath) => {
|
||||||
try!(handler.read_to_end(&mut buffer).map_err(|_| ()));
|
if *entry.origin == origin_in {
|
||||||
Ok(buffer)
|
let mut buffer = vec![];
|
||||||
},
|
let mut handler = try!(File::open(filepath).map_err(|_| ()));
|
||||||
None => Err(())
|
try!(handler.read_to_end(&mut buffer).map_err(|_| ()));
|
||||||
}
|
Ok(buffer)
|
||||||
}
|
} else {
|
||||||
|
Err(())
|
||||||
fn delete_fileid(&mut self, id: SelectedFileId) {
|
}
|
||||||
if let Ok(id) = Uuid::parse_str(&id.0) {
|
},
|
||||||
self.idmap.remove(&id);
|
FileImpl::Memory(ref buffered) => {
|
||||||
}
|
Ok(buffered.bytes.clone())
|
||||||
}
|
},
|
||||||
}
|
FileImpl::Sliced(ref id, ref _rel_pos) => {
|
||||||
|
self.try_read_file(SelectedFileId(id.simple().to_string()), origin_in)
|
||||||
pub struct BlobURLStore {
|
|
||||||
entries: HashMap<Uuid, (Origin, BlobURLStoreEntry)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BlobURLStore {
|
|
||||||
pub fn new() -> BlobURLStore {
|
|
||||||
BlobURLStore {
|
|
||||||
entries: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process(&mut self, msg: BlobURLStoreMsg) {
|
|
||||||
match msg {
|
|
||||||
BlobURLStoreMsg::AddEntry(entry, origin_str, sender) => {
|
|
||||||
match Url::parse(&origin_str) {
|
|
||||||
Ok(base_url) => {
|
|
||||||
let id = Uuid::new_v4();
|
|
||||||
self.add_entry(id, base_url.origin(), entry);
|
|
||||||
|
|
||||||
let _ = sender.send(Ok(id.simple().to_string()));
|
|
||||||
}
|
}
|
||||||
Err(_) => {
|
}
|
||||||
let _ = sender.send(Err(BlobURLStoreError::InvalidOrigin));
|
},
|
||||||
|
None => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dec_ref(&mut self, id: Uuid, origin_in: FileOrigin) {
|
||||||
|
let (is_last_ref, opt_parent_id) = match self.store.get(&id) {
|
||||||
|
Some(entry) => {
|
||||||
|
if *entry.origin == origin_in {
|
||||||
|
let r = entry.refs.get();
|
||||||
|
|
||||||
|
if r > 1 {
|
||||||
|
entry.refs.set(r - 1);
|
||||||
|
(false, None)
|
||||||
|
} else {
|
||||||
|
if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
|
||||||
|
// if it has a reference to parent id, dec_ref on parent later
|
||||||
|
(true, Some(parent_id.clone()))
|
||||||
|
} else {
|
||||||
|
(true, None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else { // Invalid origin
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => return, // Invalid UUID
|
||||||
|
};
|
||||||
|
|
||||||
|
if is_last_ref {
|
||||||
|
self.store.remove(&id);
|
||||||
|
|
||||||
|
if let Some(parent_id) = opt_parent_id {
|
||||||
|
self.dec_ref(parent_id, origin_in);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_request(&self, load_data: &LoadData, consumer: LoadConsumer,
|
||||||
|
rel_pos: &RelativePos, id: &Uuid) {
|
||||||
|
let origin_in = load_data.url.origin().unicode_serialization();
|
||||||
|
match self.store.get(id) {
|
||||||
|
Some(entry) => {
|
||||||
|
match entry.file_impl {
|
||||||
|
FileImpl::Memory(ref buffered) => {
|
||||||
|
if *entry.origin == origin_in {
|
||||||
|
load_blob(&load_data, consumer, self.classifier.clone(),
|
||||||
|
None, rel_pos, buffered);
|
||||||
|
} else {
|
||||||
|
let e = format!("Invalid blob URL origin {:?}", origin_in);
|
||||||
|
send_error(load_data.url.clone(), NetworkError::Internal(e), consumer);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FileImpl::PathOnly(ref filepath) => {
|
||||||
|
let opt_filename = filepath.file_name()
|
||||||
|
.and_then(|osstr| osstr.to_str())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
|
if *entry.origin == origin_in {
|
||||||
|
let mut bytes = vec![];
|
||||||
|
let mut handler = File::open(filepath).unwrap();
|
||||||
|
let mime = guess_mime_type_opt(filepath);
|
||||||
|
let size = handler.read_to_end(&mut bytes).unwrap();
|
||||||
|
|
||||||
|
let entry = BlobURLStoreEntry {
|
||||||
|
type_string: match mime {
|
||||||
|
Some(x) => format!("{}", x),
|
||||||
|
None => "".to_string(),
|
||||||
|
},
|
||||||
|
size: size as u64,
|
||||||
|
bytes: bytes,
|
||||||
|
};
|
||||||
|
|
||||||
|
load_blob(&load_data, consumer, self.classifier.clone(),
|
||||||
|
opt_filename, rel_pos, &entry);
|
||||||
|
} else {
|
||||||
|
let e = format!("Invalid blob URL origin {:?}", origin_in);
|
||||||
|
send_error(load_data.url.clone(), NetworkError::Internal(e), consumer);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FileImpl::Sliced(ref id, ref rel_pos) => {
|
||||||
|
self.process_request(load_data, consumer, rel_pos, id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
BlobURLStoreMsg::DeleteEntry(id) => {
|
_ => {
|
||||||
if let Ok(id) = Uuid::parse_str(&id) {
|
let e = format!("Invalid blob URL key {:?}", id.simple().to_string());
|
||||||
self.delete_entry(id);
|
send_error(load_data.url.clone(), NetworkError::Internal(e), consumer);
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn request(&self, id: Uuid, origin: &Origin) -> Result<&BlobURLStoreEntry, BlobURLStoreError> {
|
|
||||||
match self.entries.get(&id) {
|
|
||||||
Some(ref pair) => {
|
|
||||||
if pair.0 == *origin {
|
|
||||||
Ok(&pair.1)
|
|
||||||
} else {
|
|
||||||
Err(BlobURLStoreError::InvalidOrigin)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
None => Err(BlobURLStoreError::InvalidKey)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_entry(&mut self, id: Uuid, origin: Origin, blob: BlobURLStoreEntry) {
|
fn transfer_memory(&mut self, entry: BlobURLStoreEntry, rel_pos: RelativePos,
|
||||||
self.entries.insert(id, (origin, blob));
|
sender: IpcSender<Result<SelectedFileId, BlobURLStoreError>>, origin: FileOrigin) {
|
||||||
}
|
match Url::parse(&origin) { // parse to check sanity
|
||||||
|
Ok(_) => {
|
||||||
|
let id = Uuid::new_v4();
|
||||||
|
self.store.insert(id, FileStoreEntry {
|
||||||
|
origin: origin.clone(),
|
||||||
|
file_impl: FileImpl::Memory(entry),
|
||||||
|
refs: Cell::new(1),
|
||||||
|
});
|
||||||
|
let sliced_id = SelectedFileId(id.simple().to_string());
|
||||||
|
|
||||||
pub fn delete_entry(&mut self, id: Uuid) {
|
self.add_sliced_entry(sliced_id, rel_pos, sender, origin);
|
||||||
self.entries.remove(&id);
|
}
|
||||||
|
Err(_) => {
|
||||||
|
let _ = sender.send(Err(BlobURLStoreError::InvalidOrigin));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ hyper = { version = "0.9.9", features = [ "serde-serialization" ] }
|
||||||
image = "0.10"
|
image = "0.10"
|
||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
log = "0.3.5"
|
log = "0.3.5"
|
||||||
|
num-traits = "0.1.32"
|
||||||
serde = "0.7.11"
|
serde = "0.7.11"
|
||||||
serde_macros = "0.7.11"
|
serde_macros = "0.7.11"
|
||||||
url = {version = "1.0.0", features = ["heap_size"]}
|
url = {version = "1.0.0", features = ["heap_size"]}
|
||||||
|
|
|
@ -2,37 +2,24 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
use ipc_channel::ipc::IpcSender;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// Errors returns to BlobURLStoreMsg::Request
|
/// Errors returns to BlobURLStoreMsg::Request
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub enum BlobURLStoreError {
|
pub enum BlobURLStoreError {
|
||||||
/// Invalid UUID key
|
/// Invalid File UUID
|
||||||
InvalidKey,
|
InvalidFileID,
|
||||||
/// Invalid URL origin
|
/// Invalid URL origin
|
||||||
InvalidOrigin,
|
InvalidOrigin,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub enum BlobURLStoreMsg {
|
|
||||||
/// Add an entry and send back the associated uuid
|
|
||||||
/// XXX: Second field is an unicode-serialized Origin, it is a temporary workaround
|
|
||||||
/// and should not be trusted. See issue https://github.com/servo/servo/issues/11722
|
|
||||||
AddEntry(BlobURLStoreEntry, String, IpcSender<Result<String, BlobURLStoreError>>),
|
|
||||||
/// Delete an entry by uuid
|
|
||||||
DeleteEntry(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Blob URL store entry, a packaged form of Blob DOM object
|
/// Blob URL store entry, a packaged form of Blob DOM object
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
#[derive(Clone, Serialize, Deserialize)]
|
||||||
pub struct BlobURLStoreEntry {
|
pub struct BlobURLStoreEntry {
|
||||||
/// MIME type string
|
/// MIME type string
|
||||||
pub type_string: String,
|
pub type_string: String,
|
||||||
/// Some filename if the backend of Blob is a file
|
|
||||||
pub filename: Option<String>,
|
|
||||||
/// Size of content in bytes
|
/// Size of content in bytes
|
||||||
pub size: u64,
|
pub size: u64,
|
||||||
/// Content of blob
|
/// Content of blob
|
||||||
|
|
|
@ -2,11 +2,102 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
use blob_url_store::BlobURLStoreMsg;
|
use blob_url_store::{BlobURLStoreEntry, BlobURLStoreError};
|
||||||
use ipc_channel::ipc::IpcSender;
|
use ipc_channel::ipc::IpcSender;
|
||||||
|
use num_traits::ToPrimitive;
|
||||||
|
use std::cmp::{max, min};
|
||||||
|
use std::ops::Range;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use super::{LoadConsumer, LoadData};
|
use super::{LoadConsumer, LoadData};
|
||||||
|
|
||||||
|
// HACK: We should send Origin directly instead of this in future, blocked on #11722
|
||||||
|
/// File manager store entry's origin
|
||||||
|
pub type FileOrigin = String;
|
||||||
|
|
||||||
|
/// Relative slice positions of a sequence,
|
||||||
|
/// whose semantic should be consistent with (start, end) parameters in
|
||||||
|
/// https://w3c.github.io/FileAPI/#dfn-slice
|
||||||
|
#[derive(Clone, Deserialize, Serialize)]
|
||||||
|
pub struct RelativePos {
|
||||||
|
/// Relative to first byte if non-negative,
|
||||||
|
/// relative to one past last byte if negative,
|
||||||
|
pub start: i64,
|
||||||
|
/// Relative offset from first byte if Some(non-negative),
|
||||||
|
/// relative to one past last byte if Some(negative),
|
||||||
|
/// None if one past last byte
|
||||||
|
pub end: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RelativePos {
|
||||||
|
/// Full range from start to end
|
||||||
|
pub fn full_range() -> RelativePos {
|
||||||
|
RelativePos {
|
||||||
|
start: 0,
|
||||||
|
end: Some(0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Instantiate optional slice position parameters
|
||||||
|
pub fn from_opts(start: Option<i64>, end: Option<i64>) -> RelativePos {
|
||||||
|
RelativePos {
|
||||||
|
start: start.unwrap_or(0),
|
||||||
|
end: end,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Slice the inner sliced range by repositioning
|
||||||
|
pub fn slice_inner(&self, rel_pos: &RelativePos) -> RelativePos {
|
||||||
|
RelativePos {
|
||||||
|
start: self.start + rel_pos.start,
|
||||||
|
end: match (self.end, rel_pos.end) {
|
||||||
|
(Some(old_end), Some(rel_end)) => Some(old_end + rel_end),
|
||||||
|
(old, None) => old,
|
||||||
|
(None, rel) => rel,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute absolute range by giving the total size
|
||||||
|
/// https://w3c.github.io/FileAPI/#slice-method-algo
|
||||||
|
pub fn to_abs_range(&self, size: usize) -> Range<usize> {
|
||||||
|
let size = size as i64;
|
||||||
|
|
||||||
|
let start = {
|
||||||
|
if self.start < 0 {
|
||||||
|
max(size + self.start, 0)
|
||||||
|
} else {
|
||||||
|
min(self.start, size)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let end = match self.end {
|
||||||
|
Some(rel_end) => {
|
||||||
|
if rel_end < 0 {
|
||||||
|
max(size + rel_end, 0)
|
||||||
|
} else {
|
||||||
|
min(rel_end, size)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => size,
|
||||||
|
};
|
||||||
|
|
||||||
|
let span: i64 = max(end - start, 0);
|
||||||
|
|
||||||
|
Range {
|
||||||
|
start: start.to_usize().unwrap(),
|
||||||
|
end: (start + span).to_usize().unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inverse operation of to_abs_range
|
||||||
|
pub fn from_abs_range(range: Range<usize>, size: usize) -> RelativePos {
|
||||||
|
RelativePos {
|
||||||
|
start: range.start as i64,
|
||||||
|
end: Some(size as i64 - range.end as i64),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct SelectedFileId(pub String);
|
pub struct SelectedFileId(pub String);
|
||||||
|
|
||||||
|
@ -27,23 +118,29 @@ pub struct FilterPattern(pub String);
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize)]
|
||||||
pub enum FileManagerThreadMsg {
|
pub enum FileManagerThreadMsg {
|
||||||
/// Select a single file, return triple (FileID, FileName, lastModified)
|
/// Select a single file, return triple (FileID, FileName, lastModified)
|
||||||
SelectFile(Vec<FilterPattern>, IpcSender<FileManagerResult<SelectedFile>>),
|
SelectFile(Vec<FilterPattern>, IpcSender<FileManagerResult<SelectedFile>>, FileOrigin),
|
||||||
|
|
||||||
/// Select multiple files, return a vector of triples
|
/// Select multiple files, return a vector of triples
|
||||||
SelectFiles(Vec<FilterPattern>, IpcSender<FileManagerResult<Vec<SelectedFile>>>),
|
SelectFiles(Vec<FilterPattern>, IpcSender<FileManagerResult<Vec<SelectedFile>>>, FileOrigin),
|
||||||
|
|
||||||
/// Read file, return the bytes
|
/// Read file, return the bytes
|
||||||
ReadFile(IpcSender<FileManagerResult<Vec<u8>>>, SelectedFileId),
|
ReadFile(IpcSender<FileManagerResult<Vec<u8>>>, SelectedFileId, FileOrigin),
|
||||||
|
|
||||||
/// Delete the FileID entry
|
|
||||||
DeleteFileID(SelectedFileId),
|
|
||||||
|
|
||||||
// Blob URL message
|
|
||||||
BlobURLStoreMsg(BlobURLStoreMsg),
|
|
||||||
|
|
||||||
/// Load resource by Blob URL
|
/// Load resource by Blob URL
|
||||||
LoadBlob(LoadData, LoadConsumer),
|
LoadBlob(LoadData, LoadConsumer),
|
||||||
|
|
||||||
|
/// Add an entry and send back the associated uuid
|
||||||
|
TransferMemory(BlobURLStoreEntry, RelativePos, IpcSender<Result<SelectedFileId, BlobURLStoreError>>, FileOrigin),
|
||||||
|
|
||||||
|
/// Add a sliced entry pointing to the parent id with a relative slicing positing
|
||||||
|
AddSlicedEntry(SelectedFileId, RelativePos, IpcSender<Result<SelectedFileId, BlobURLStoreError>>, FileOrigin),
|
||||||
|
|
||||||
|
/// Decrease reference count
|
||||||
|
DecRef(SelectedFileId, FileOrigin),
|
||||||
|
|
||||||
|
/// Increase reference count
|
||||||
|
IncRef(SelectedFileId, FileOrigin),
|
||||||
|
|
||||||
/// Shut down this thread
|
/// Shut down this thread
|
||||||
Exit,
|
Exit,
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ extern crate lazy_static;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate log;
|
extern crate log;
|
||||||
extern crate msg;
|
extern crate msg;
|
||||||
|
extern crate num_traits;
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
extern crate url;
|
extern crate url;
|
||||||
extern crate util;
|
extern crate util;
|
||||||
|
|
|
@ -57,7 +57,7 @@ use js::jsval::JSVal;
|
||||||
use js::rust::Runtime;
|
use js::rust::Runtime;
|
||||||
use libc;
|
use libc;
|
||||||
use msg::constellation_msg::{FrameType, PipelineId, SubpageId, WindowSizeData, WindowSizeType, ReferrerPolicy};
|
use msg::constellation_msg::{FrameType, PipelineId, SubpageId, WindowSizeData, WindowSizeType, ReferrerPolicy};
|
||||||
use net_traits::filemanager_thread::SelectedFileId;
|
use net_traits::filemanager_thread::{SelectedFileId, RelativePos};
|
||||||
use net_traits::image::base::{Image, ImageMetadata};
|
use net_traits::image::base::{Image, ImageMetadata};
|
||||||
use net_traits::image_cache_thread::{ImageCacheChan, ImageCacheThread};
|
use net_traits::image_cache_thread::{ImageCacheChan, ImageCacheThread};
|
||||||
use net_traits::response::HttpsState;
|
use net_traits::response::HttpsState;
|
||||||
|
@ -331,6 +331,7 @@ no_jsmanaged_fields!(ReferrerPolicy);
|
||||||
no_jsmanaged_fields!(ResourceThreads);
|
no_jsmanaged_fields!(ResourceThreads);
|
||||||
no_jsmanaged_fields!(SystemTime);
|
no_jsmanaged_fields!(SystemTime);
|
||||||
no_jsmanaged_fields!(SelectedFileId);
|
no_jsmanaged_fields!(SelectedFileId);
|
||||||
|
no_jsmanaged_fields!(RelativePos);
|
||||||
no_jsmanaged_fields!(OpaqueStyleAndLayoutData);
|
no_jsmanaged_fields!(OpaqueStyleAndLayoutData);
|
||||||
no_jsmanaged_fields!(CSSErrorReporter);
|
no_jsmanaged_fields!(CSSErrorReporter);
|
||||||
no_jsmanaged_fields!(WebGLBufferId);
|
no_jsmanaged_fields!(WebGLBufferId);
|
||||||
|
|
|
@ -8,17 +8,18 @@ use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
|
||||||
use dom::bindings::codegen::UnionTypes::BlobOrString;
|
use dom::bindings::codegen::UnionTypes::BlobOrString;
|
||||||
use dom::bindings::error::{Error, Fallible};
|
use dom::bindings::error::{Error, Fallible};
|
||||||
use dom::bindings::global::GlobalRef;
|
use dom::bindings::global::GlobalRef;
|
||||||
use dom::bindings::js::Root;
|
use dom::bindings::js::{JS, Root};
|
||||||
use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object};
|
use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object};
|
||||||
use dom::bindings::str::DOMString;
|
use dom::bindings::str::DOMString;
|
||||||
use encoding::all::UTF_8;
|
use encoding::all::UTF_8;
|
||||||
use encoding::types::{EncoderTrap, Encoding};
|
use encoding::types::{EncoderTrap, Encoding};
|
||||||
use ipc_channel::ipc;
|
use ipc_channel::ipc;
|
||||||
use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId};
|
use net_traits::IpcSend;
|
||||||
use num_traits::ToPrimitive;
|
use net_traits::blob_url_store::BlobURLStoreEntry;
|
||||||
|
use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId, RelativePos};
|
||||||
use std::ascii::AsciiExt;
|
use std::ascii::AsciiExt;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::cmp::{max, min};
|
use std::ops::Range;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Clone, JSTraceable)]
|
#[derive(Clone, JSTraceable)]
|
||||||
|
@ -31,36 +32,12 @@ pub struct DataSlice {
|
||||||
impl DataSlice {
|
impl DataSlice {
|
||||||
/// Construct DataSlice from reference counted bytes
|
/// Construct DataSlice from reference counted bytes
|
||||||
pub fn new(bytes: Arc<Vec<u8>>, start: Option<i64>, end: Option<i64>) -> DataSlice {
|
pub fn new(bytes: Arc<Vec<u8>>, start: Option<i64>, end: Option<i64>) -> DataSlice {
|
||||||
let size = bytes.len() as i64;
|
let range = RelativePos::from_opts(start, end).to_abs_range(bytes.len());
|
||||||
let relativeStart: i64 = match start {
|
|
||||||
None => 0,
|
|
||||||
Some(start) => {
|
|
||||||
if start < 0 {
|
|
||||||
max(size + start, 0)
|
|
||||||
} else {
|
|
||||||
min(start, size)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let relativeEnd: i64 = match end {
|
|
||||||
None => size,
|
|
||||||
Some(end) => {
|
|
||||||
if end < 0 {
|
|
||||||
max(size + end, 0)
|
|
||||||
} else {
|
|
||||||
min(end, size)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let span: i64 = max(relativeEnd - relativeStart, 0);
|
|
||||||
let start = relativeStart.to_usize().unwrap();
|
|
||||||
let end = (relativeStart + span).to_usize().unwrap();
|
|
||||||
|
|
||||||
DataSlice {
|
DataSlice {
|
||||||
bytes: bytes,
|
bytes: bytes,
|
||||||
bytes_start: start,
|
bytes_start: range.start,
|
||||||
bytes_end: end
|
bytes_end: range.end,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,15 +64,30 @@ impl DataSlice {
|
||||||
pub fn size(&self) -> u64 {
|
pub fn size(&self) -> u64 {
|
||||||
(self.bytes_end as u64) - (self.bytes_start as u64)
|
(self.bytes_end as u64) - (self.bytes_start as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Further adjust the slice range based on passed-in relative positions
|
||||||
|
pub fn slice(&self, pos: &RelativePos) -> DataSlice {
|
||||||
|
let old_size = self.size();
|
||||||
|
let range = pos.to_abs_range(old_size as usize);
|
||||||
|
DataSlice {
|
||||||
|
bytes: self.bytes.clone(),
|
||||||
|
bytes_start: self.bytes_start + range.start,
|
||||||
|
bytes_end: self.bytes_start + range.end,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_root]
|
||||||
#[derive(Clone, JSTraceable)]
|
#[derive(JSTraceable)]
|
||||||
pub enum BlobImpl {
|
pub enum BlobImpl {
|
||||||
/// File-based, cached backend
|
/// File-based blob, including id and possibly cached content
|
||||||
File(SelectedFileId, DOMRefCell<Option<DataSlice>>),
|
File(SelectedFileId, DOMRefCell<Option<DataSlice>>),
|
||||||
/// Memory-based backend
|
/// Memory-based blob
|
||||||
Memory(DataSlice),
|
Memory(DataSlice),
|
||||||
|
/// Sliced blob, including parent blob and
|
||||||
|
/// relative positions representing current slicing range,
|
||||||
|
/// it is leaf of a two-layer fat tree
|
||||||
|
Sliced(JS<Blob>, RelativePos),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlobImpl {
|
impl BlobImpl {
|
||||||
|
@ -120,26 +112,58 @@ impl BlobImpl {
|
||||||
pub struct Blob {
|
pub struct Blob {
|
||||||
reflector_: Reflector,
|
reflector_: Reflector,
|
||||||
#[ignore_heap_size_of = "No clear owner"]
|
#[ignore_heap_size_of = "No clear owner"]
|
||||||
blob_impl: BlobImpl,
|
blob_impl: DOMRefCell<BlobImpl>,
|
||||||
typeString: String,
|
typeString: String,
|
||||||
isClosed_: Cell<bool>,
|
isClosed_: Cell<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Blob {
|
impl Blob {
|
||||||
|
#[allow(unrooted_must_root)]
|
||||||
pub fn new(global: GlobalRef, blob_impl: BlobImpl, typeString: String) -> Root<Blob> {
|
pub fn new(global: GlobalRef, blob_impl: BlobImpl, typeString: String) -> Root<Blob> {
|
||||||
let boxed_blob = box Blob::new_inherited(blob_impl, typeString);
|
let boxed_blob = box Blob::new_inherited(blob_impl, typeString);
|
||||||
reflect_dom_object(boxed_blob, global, BlobBinding::Wrap)
|
reflect_dom_object(boxed_blob, global, BlobBinding::Wrap)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unrooted_must_root)]
|
||||||
pub fn new_inherited(blob_impl: BlobImpl, typeString: String) -> Blob {
|
pub fn new_inherited(blob_impl: BlobImpl, typeString: String) -> Blob {
|
||||||
Blob {
|
Blob {
|
||||||
reflector_: Reflector::new(),
|
reflector_: Reflector::new(),
|
||||||
blob_impl: blob_impl,
|
blob_impl: DOMRefCell::new(blob_impl),
|
||||||
typeString: typeString,
|
typeString: typeString,
|
||||||
isClosed_: Cell::new(false),
|
isClosed_: Cell::new(false),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unrooted_must_root)]
|
||||||
|
fn new_sliced(parent: &Blob, rel_pos: RelativePos,
|
||||||
|
relativeContentType: DOMString) -> Root<Blob> {
|
||||||
|
let global = parent.global();
|
||||||
|
let blob_impl = match *parent.blob_impl.borrow() {
|
||||||
|
BlobImpl::File(ref id, _) => {
|
||||||
|
inc_ref_id(global.r(), id.clone());
|
||||||
|
|
||||||
|
// Create new parent node
|
||||||
|
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
|
||||||
|
}
|
||||||
|
BlobImpl::Memory(_) => {
|
||||||
|
// Create new parent node
|
||||||
|
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
|
||||||
|
}
|
||||||
|
BlobImpl::Sliced(ref grandparent, ref old_rel_pos) => {
|
||||||
|
// Adjust the slicing position, using same parent
|
||||||
|
let new_rel_pos = old_rel_pos.slice_inner(&rel_pos);
|
||||||
|
|
||||||
|
if let BlobImpl::File(ref id, _) = *grandparent.blob_impl.borrow() {
|
||||||
|
inc_ref_id(global.r(), id.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
BlobImpl::Sliced(grandparent.clone(), new_rel_pos)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Blob::new(global.r(), blob_impl, relativeContentType.into())
|
||||||
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/FileAPI/#constructorBlob
|
// https://w3c.github.io/FileAPI/#constructorBlob
|
||||||
pub fn Constructor(global: GlobalRef,
|
pub fn Constructor(global: GlobalRef,
|
||||||
blobParts: Option<Vec<BlobOrString>>,
|
blobParts: Option<Vec<BlobOrString>>,
|
||||||
|
@ -160,19 +184,29 @@ impl Blob {
|
||||||
|
|
||||||
/// Get a slice to inner data, this might incur synchronous read and caching
|
/// Get a slice to inner data, this might incur synchronous read and caching
|
||||||
pub fn get_slice(&self) -> Result<DataSlice, ()> {
|
pub fn get_slice(&self) -> Result<DataSlice, ()> {
|
||||||
match self.blob_impl {
|
match *self.blob_impl.borrow() {
|
||||||
BlobImpl::File(ref id, ref slice) => {
|
BlobImpl::File(ref id, ref cached) => {
|
||||||
match *slice.borrow() {
|
let buffer = match *cached.borrow() {
|
||||||
Some(ref s) => Ok(s.clone()),
|
Some(ref s) => Ok(s.clone()),
|
||||||
None => {
|
None => {
|
||||||
let global = self.global();
|
let global = self.global();
|
||||||
let s = read_file(global.r(), id.clone())?;
|
let s = read_file(global.r(), id.clone())?;
|
||||||
*slice.borrow_mut() = Some(s.clone()); // Cached
|
|
||||||
Ok(s)
|
Ok(s)
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache
|
||||||
|
if let Ok(buf) = buffer.clone() {
|
||||||
|
*cached.borrow_mut() = Some(buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
buffer
|
||||||
|
}
|
||||||
|
BlobImpl::Memory(ref s) => Ok(s.clone()),
|
||||||
|
BlobImpl::Sliced(ref parent, ref rel_pos) => {
|
||||||
|
let dataslice = parent.get_slice_or_empty();
|
||||||
|
Ok(dataslice.slice(rel_pos))
|
||||||
}
|
}
|
||||||
BlobImpl::Memory(ref s) => Ok(s.clone())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,12 +214,83 @@ impl Blob {
|
||||||
pub fn get_slice_or_empty(&self) -> DataSlice {
|
pub fn get_slice_or_empty(&self) -> DataSlice {
|
||||||
self.get_slice().unwrap_or(DataSlice::empty())
|
self.get_slice().unwrap_or(DataSlice::empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_id(&self) -> SelectedFileId {
|
||||||
|
match *self.blob_impl.borrow() {
|
||||||
|
BlobImpl::File(ref id, _) => id.clone(),
|
||||||
|
BlobImpl::Memory(ref slice) => self.promote_to_file(slice),
|
||||||
|
BlobImpl::Sliced(ref parent, ref rel_pos) => {
|
||||||
|
match *parent.blob_impl.borrow() {
|
||||||
|
BlobImpl::Sliced(_, _) => {
|
||||||
|
debug!("Sliced can't have a sliced parent");
|
||||||
|
// Return dummy id
|
||||||
|
SelectedFileId("".to_string())
|
||||||
|
}
|
||||||
|
BlobImpl::File(ref parent_id, _) =>
|
||||||
|
self.create_sliced_id(parent_id, rel_pos),
|
||||||
|
BlobImpl::Memory(ref parent_slice) => {
|
||||||
|
let parent_id = parent.promote_to_file(parent_slice);
|
||||||
|
*self.blob_impl.borrow_mut() = BlobImpl::Sliced(parent.clone(), rel_pos.clone());
|
||||||
|
self.create_sliced_id(&parent_id, rel_pos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Promite memory-based Blob to file-based,
|
||||||
|
/// The bytes in data slice will be transferred to file manager thread
|
||||||
|
fn promote_to_file(&self, self_slice: &DataSlice) -> SelectedFileId {
|
||||||
|
let global = self.global();
|
||||||
|
let origin = global.r().get_url().origin().unicode_serialization();
|
||||||
|
let filemanager = global.r().resource_threads().sender();
|
||||||
|
let bytes = self_slice.get_bytes();
|
||||||
|
let rel_pos = RelativePos::from_abs_range(Range {
|
||||||
|
start: self_slice.bytes_start,
|
||||||
|
end: self_slice.bytes_end,
|
||||||
|
}, self_slice.bytes.len());
|
||||||
|
|
||||||
|
let entry = BlobURLStoreEntry {
|
||||||
|
type_string: self.typeString.clone(),
|
||||||
|
size: self.Size(),
|
||||||
|
bytes: bytes.to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let (tx, rx) = ipc::channel().unwrap();
|
||||||
|
let _ = filemanager.send(FileManagerThreadMsg::TransferMemory(entry, rel_pos, tx, origin.clone()));
|
||||||
|
|
||||||
|
match rx.recv().unwrap() {
|
||||||
|
Ok(new_id) => SelectedFileId(new_id.0),
|
||||||
|
// Dummy id
|
||||||
|
Err(_) => SelectedFileId("".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_sliced_id(&self, parent_id: &SelectedFileId,
|
||||||
|
rel_pos: &RelativePos) -> SelectedFileId {
|
||||||
|
let global = self.global();
|
||||||
|
|
||||||
|
let origin = global.r().get_url().origin().unicode_serialization();
|
||||||
|
|
||||||
|
let filemanager = global.r().resource_threads().sender();
|
||||||
|
let (tx, rx) = ipc::channel().unwrap();
|
||||||
|
let msg = FileManagerThreadMsg::AddSlicedEntry(parent_id.clone(),
|
||||||
|
rel_pos.clone(),
|
||||||
|
tx, origin.clone());
|
||||||
|
let _ = filemanager.send(msg);
|
||||||
|
let new_id = rx.recv().unwrap().unwrap();
|
||||||
|
|
||||||
|
// Return the indirect id reference
|
||||||
|
SelectedFileId(new_id.0)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_file(global: GlobalRef, id: SelectedFileId) -> Result<DataSlice, ()> {
|
fn read_file(global: GlobalRef, id: SelectedFileId) -> Result<DataSlice, ()> {
|
||||||
let file_manager = global.filemanager_thread();
|
let file_manager = global.filemanager_thread();
|
||||||
let (chan, recv) = ipc::channel().map_err(|_|())?;
|
let (chan, recv) = ipc::channel().map_err(|_|())?;
|
||||||
let _ = file_manager.send(FileManagerThreadMsg::ReadFile(chan, id));
|
let origin = global.get_url().origin().unicode_serialization();
|
||||||
|
let msg = FileManagerThreadMsg::ReadFile(chan, id, origin);
|
||||||
|
let _ = file_manager.send(msg);
|
||||||
|
|
||||||
let result = match recv.recv() {
|
let result = match recv.recv() {
|
||||||
Ok(ret) => ret,
|
Ok(ret) => ret,
|
||||||
|
@ -248,10 +353,8 @@ impl BlobMethods for Blob {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let global = self.global();
|
let rel_pos = RelativePos::from_opts(start, end);
|
||||||
let bytes = self.get_slice_or_empty().bytes.clone();
|
Blob::new_sliced(self, rel_pos, relativeContentType)
|
||||||
let slice = DataSlice::new(bytes, start, end);
|
|
||||||
Blob::new(global.r(), BlobImpl::new_from_slice(slice), relativeContentType.into())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/FileAPI/#dfn-isClosed
|
// https://w3c.github.io/FileAPI/#dfn-isClosed
|
||||||
|
@ -274,7 +377,6 @@ impl BlobMethods for Blob {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl BlobBinding::BlobPropertyBag {
|
impl BlobBinding::BlobPropertyBag {
|
||||||
/// Get the normalized inner type string
|
/// Get the normalized inner type string
|
||||||
/// https://w3c.github.io/FileAPI/#dfn-type
|
/// https://w3c.github.io/FileAPI/#dfn-type
|
||||||
|
@ -292,3 +394,11 @@ fn is_ascii_printable(string: &str) -> bool {
|
||||||
// https://w3c.github.io/FileAPI/#constructorBlob
|
// https://w3c.github.io/FileAPI/#constructorBlob
|
||||||
string.chars().all(|c| c >= '\x20' && c <= '\x7E')
|
string.chars().all(|c| c >= '\x20' && c <= '\x7E')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Bump the reference counter in file manager thread
|
||||||
|
fn inc_ref_id(global: GlobalRef, id: SelectedFileId) {
|
||||||
|
let file_manager = global.filemanager_thread();
|
||||||
|
let origin = global.get_url().origin().unicode_serialization();
|
||||||
|
let msg = FileManagerThreadMsg::IncRef(id, origin);
|
||||||
|
let _ = file_manager.send(msg);
|
||||||
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ pub struct File {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl File {
|
impl File {
|
||||||
|
#[allow(unrooted_must_root)]
|
||||||
fn new_inherited(blob_impl: BlobImpl, name: DOMString,
|
fn new_inherited(blob_impl: BlobImpl, name: DOMString,
|
||||||
modified: Option<i64>, typeString: &str) -> File {
|
modified: Option<i64>, typeString: &str) -> File {
|
||||||
File {
|
File {
|
||||||
|
@ -39,6 +40,7 @@ impl File {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unrooted_must_root)]
|
||||||
pub fn new(global: GlobalRef, blob_impl: BlobImpl,
|
pub fn new(global: GlobalRef, blob_impl: BlobImpl,
|
||||||
name: DOMString, modified: Option<i64>, typeString: &str) -> Root<File> {
|
name: DOMString, modified: Option<i64>, typeString: &str) -> Root<File> {
|
||||||
reflect_dom_object(box File::new_inherited(blob_impl, name, modified, typeString),
|
reflect_dom_object(box File::new_inherited(blob_impl, name, modified, typeString),
|
||||||
|
|
|
@ -1152,6 +1152,7 @@ impl Activatable for HTMLInputElement {
|
||||||
InputType::InputFile => {
|
InputType::InputFile => {
|
||||||
// https://html.spec.whatwg.org/multipage/#file-upload-state-(type=file)
|
// https://html.spec.whatwg.org/multipage/#file-upload-state-(type=file)
|
||||||
let window = window_from_node(self);
|
let window = window_from_node(self);
|
||||||
|
let origin = window.get_url().origin().unicode_serialization();
|
||||||
let filemanager = window.resource_threads().sender();
|
let filemanager = window.resource_threads().sender();
|
||||||
|
|
||||||
let mut files: Vec<Root<File>> = vec![];
|
let mut files: Vec<Root<File>> = vec![];
|
||||||
|
@ -1162,7 +1163,7 @@ impl Activatable for HTMLInputElement {
|
||||||
|
|
||||||
if self.Multiple() {
|
if self.Multiple() {
|
||||||
let (chan, recv) = ipc::channel().expect("Error initializing channel");
|
let (chan, recv) = ipc::channel().expect("Error initializing channel");
|
||||||
let msg = FileManagerThreadMsg::SelectFiles(filter, chan);
|
let msg = FileManagerThreadMsg::SelectFiles(filter, chan, origin);
|
||||||
let _ = filemanager.send(msg).unwrap();
|
let _ = filemanager.send(msg).unwrap();
|
||||||
|
|
||||||
match recv.recv().expect("IpcSender side error") {
|
match recv.recv().expect("IpcSender side error") {
|
||||||
|
@ -1182,7 +1183,7 @@ impl Activatable for HTMLInputElement {
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
let (chan, recv) = ipc::channel().expect("Error initializing channel");
|
let (chan, recv) = ipc::channel().expect("Error initializing channel");
|
||||||
let msg = FileManagerThreadMsg::SelectFile(filter, chan);
|
let msg = FileManagerThreadMsg::SelectFile(filter, chan, origin);
|
||||||
let _ = filemanager.send(msg).unwrap();
|
let _ = filemanager.send(msg).unwrap();
|
||||||
|
|
||||||
match recv.recv().expect("IpcSender side error") {
|
match recv.recv().expect("IpcSender side error") {
|
||||||
|
|
|
@ -13,10 +13,9 @@ use dom::bindings::str::{DOMString, USVString};
|
||||||
use dom::blob::Blob;
|
use dom::blob::Blob;
|
||||||
use dom::urlhelper::UrlHelper;
|
use dom::urlhelper::UrlHelper;
|
||||||
use dom::urlsearchparams::URLSearchParams;
|
use dom::urlsearchparams::URLSearchParams;
|
||||||
use ipc_channel::ipc;
|
|
||||||
use net_traits::IpcSend;
|
use net_traits::IpcSend;
|
||||||
use net_traits::blob_url_store::{BlobURLStoreEntry, BlobURLStoreMsg, parse_blob_url};
|
use net_traits::blob_url_store::parse_blob_url;
|
||||||
use net_traits::filemanager_thread::FileManagerThreadMsg;
|
use net_traits::filemanager_thread::{SelectedFileId, FileManagerThreadMsg};
|
||||||
use std::borrow::ToOwned;
|
use std::borrow::ToOwned;
|
||||||
use std::default::Default;
|
use std::default::Default;
|
||||||
use url::quirks::domain_to_unicode;
|
use url::quirks::domain_to_unicode;
|
||||||
|
@ -125,34 +124,9 @@ impl URL {
|
||||||
return DOMString::from(URL::unicode_serialization_blob_url(&origin, &id));
|
return DOMString::from(URL::unicode_serialization_blob_url(&origin, &id));
|
||||||
}
|
}
|
||||||
|
|
||||||
let filemanager = global.resource_threads().sender();
|
let id = blob.get_id();
|
||||||
|
|
||||||
let slice = blob.get_slice_or_empty();
|
DOMString::from(URL::unicode_serialization_blob_url(&origin, &id.0))
|
||||||
let bytes = slice.get_bytes();
|
|
||||||
|
|
||||||
let entry = BlobURLStoreEntry {
|
|
||||||
type_string: blob.Type().to_string(),
|
|
||||||
filename: None, // XXX: the filename is currently only in File object now
|
|
||||||
size: blob.Size(),
|
|
||||||
bytes: bytes.to_vec(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (tx, rx) = ipc::channel().unwrap();
|
|
||||||
|
|
||||||
let msg = BlobURLStoreMsg::AddEntry(entry, origin.clone(), tx);
|
|
||||||
|
|
||||||
let _ = filemanager.send(FileManagerThreadMsg::BlobURLStoreMsg(msg));
|
|
||||||
|
|
||||||
match rx.recv().unwrap() {
|
|
||||||
Ok(id) => {
|
|
||||||
DOMString::from(URL::unicode_serialization_blob_url(&origin, &id))
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
// Generate a dummy id
|
|
||||||
let id = Uuid::new_v4().simple().to_string();
|
|
||||||
DOMString::from(URL::unicode_serialization_blob_url(&origin, &id))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/FileAPI/#dfn-revokeObjectURL
|
// https://w3c.github.io/FileAPI/#dfn-revokeObjectURL
|
||||||
|
@ -166,13 +140,15 @@ impl URL {
|
||||||
|
|
||||||
NOTE: The first step is unnecessary, since closed blobs do not exist in the store
|
NOTE: The first step is unnecessary, since closed blobs do not exist in the store
|
||||||
*/
|
*/
|
||||||
|
let origin = global.get_url().origin().unicode_serialization();
|
||||||
|
|
||||||
match Url::parse(&url) {
|
match Url::parse(&url) {
|
||||||
Ok(url) => match parse_blob_url(&url) {
|
Ok(url) => match parse_blob_url(&url) {
|
||||||
Some((id, _)) => {
|
Some((id, _)) => {
|
||||||
let filemanager = global.resource_threads().sender();
|
let filemanager = global.resource_threads().sender();
|
||||||
let msg = BlobURLStoreMsg::DeleteEntry(id.simple().to_string());
|
let id = SelectedFileId(id.simple().to_string());
|
||||||
let _ = filemanager.send(FileManagerThreadMsg::BlobURLStoreMsg(msg));
|
let msg = FileManagerThreadMsg::DecRef(id, origin);
|
||||||
|
let _ = filemanager.send(msg);
|
||||||
}
|
}
|
||||||
None => {}
|
None => {}
|
||||||
},
|
},
|
||||||
|
|
1
components/servo/Cargo.lock
generated
1
components/servo/Cargo.lock
generated
|
@ -1478,6 +1478,7 @@ dependencies = [
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"msg 0.0.1",
|
"msg 0.0.1",
|
||||||
|
"num-traits 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_macros 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_macros 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
|
1
ports/cef/Cargo.lock
generated
1
ports/cef/Cargo.lock
generated
|
@ -1359,6 +1359,7 @@ dependencies = [
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"msg 0.0.1",
|
"msg 0.0.1",
|
||||||
|
"num-traits 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_macros 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_macros 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
|
|
@ -35,12 +35,12 @@ fn test_filemanager() {
|
||||||
.expect("Read tests/unit/net/test.txt error");
|
.expect("Read tests/unit/net/test.txt error");
|
||||||
|
|
||||||
let patterns = vec![FilterPattern(".txt".to_string())];
|
let patterns = vec![FilterPattern(".txt".to_string())];
|
||||||
|
let origin = "test.com".to_string();
|
||||||
|
|
||||||
{
|
{
|
||||||
// Try to select a dummy file "tests/unit/net/test.txt"
|
// Try to select a dummy file "tests/unit/net/test.txt"
|
||||||
let (tx, rx) = ipc::channel().unwrap();
|
let (tx, rx) = ipc::channel().unwrap();
|
||||||
chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx)).unwrap();
|
chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx, origin.clone())).unwrap();
|
||||||
let selected = rx.recv().expect("File manager channel is broken")
|
let selected = rx.recv().expect("File manager channel is broken")
|
||||||
.expect("The file manager failed to find test.txt");
|
.expect("The file manager failed to find test.txt");
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ fn test_filemanager() {
|
||||||
// Test by reading, expecting same content
|
// Test by reading, expecting same content
|
||||||
{
|
{
|
||||||
let (tx2, rx2) = ipc::channel().unwrap();
|
let (tx2, rx2) = ipc::channel().unwrap();
|
||||||
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone())).unwrap();
|
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone(), origin.clone())).unwrap();
|
||||||
|
|
||||||
let msg = rx2.recv().expect("File manager channel is broken");
|
let msg = rx2.recv().expect("File manager channel is broken");
|
||||||
|
|
||||||
|
@ -60,12 +60,12 @@ fn test_filemanager() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete the id
|
// Delete the id
|
||||||
chan.send(FileManagerThreadMsg::DeleteFileID(selected.id.clone())).unwrap();
|
chan.send(FileManagerThreadMsg::DecRef(selected.id.clone(), origin.clone())).unwrap();
|
||||||
|
|
||||||
// Test by reading again, expecting read error because we invalidated the id
|
// Test by reading again, expecting read error because we invalidated the id
|
||||||
{
|
{
|
||||||
let (tx2, rx2) = ipc::channel().unwrap();
|
let (tx2, rx2) = ipc::channel().unwrap();
|
||||||
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone())).unwrap();
|
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone(), origin.clone())).unwrap();
|
||||||
|
|
||||||
let msg = rx2.recv().expect("File manager channel is broken");
|
let msg = rx2.recv().expect("File manager channel is broken");
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ fn test_filemanager() {
|
||||||
|
|
||||||
{
|
{
|
||||||
let (tx, rx) = ipc::channel().unwrap();
|
let (tx, rx) = ipc::channel().unwrap();
|
||||||
let _ = chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx));
|
let _ = chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx, origin.clone()));
|
||||||
|
|
||||||
assert!(rx.try_recv().is_err(), "The thread should not respond normally after exited");
|
assert!(rx.try_recv().is_err(), "The thread should not respond normally after exited");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue