Rustfmt net crate

This commit is contained in:
Pyfisch 2018-11-03 15:28:48 +01:00
parent ba1ed11ced
commit 2481ad25f8
30 changed files with 4957 additions and 2870 deletions

View file

@ -34,7 +34,7 @@ struct FileStoreEntry {
/// UUIDs only become valid blob URIs when explicitly requested
/// by the user with createObjectURL. Validity can be revoked as well.
/// (The UUID is the one that maps to this entry in `FileManagerStore`)
is_valid_url: AtomicBool
is_valid_url: AtomicBool,
}
#[derive(Clone)]
@ -71,29 +71,38 @@ impl FileManager {
}
}
pub fn read_file(&self,
sender: IpcSender<FileManagerResult<ReadFileProgress>>,
id: Uuid,
check_url_validity: bool,
origin: FileOrigin) {
pub fn read_file(
&self,
sender: IpcSender<FileManagerResult<ReadFileProgress>>,
id: Uuid,
check_url_validity: bool,
origin: FileOrigin,
) {
let store = self.store.clone();
thread::Builder::new().name("read file".to_owned()).spawn(move || {
if let Err(e) = store.try_read_file(&sender, id, check_url_validity,
origin) {
let _ = sender.send(Err(FileManagerThreadError::BlobURLStoreError(e)));
}
}).expect("Thread spawning failed");
thread::Builder::new()
.name("read file".to_owned())
.spawn(move || {
if let Err(e) = store.try_read_file(&sender, id, check_url_validity, origin) {
let _ = sender.send(Err(FileManagerThreadError::BlobURLStoreError(e)));
}
})
.expect("Thread spawning failed");
}
pub fn promote_memory(&self,
blob_buf: BlobBuf,
set_valid: bool,
sender: IpcSender<Result<Uuid, BlobURLStoreError>>,
origin: FileOrigin) {
pub fn promote_memory(
&self,
blob_buf: BlobBuf,
set_valid: bool,
sender: IpcSender<Result<Uuid, BlobURLStoreError>>,
origin: FileOrigin,
) {
let store = self.store.clone();
thread::Builder::new().name("transfer memory".to_owned()).spawn(move || {
store.promote_memory(blob_buf, set_valid, sender, origin);
}).expect("Thread spawning failed");
thread::Builder::new()
.name("transfer memory".to_owned())
.spawn(move || {
store.promote_memory(blob_buf, set_valid, sender, origin);
})
.expect("Thread spawning failed");
}
/// Message handler
@ -102,35 +111,41 @@ impl FileManager {
FileManagerThreadMsg::SelectFile(filter, sender, origin, opt_test_path) => {
let store = self.store.clone();
let embedder = self.embedder_proxy.clone();
thread::Builder::new().name("select file".to_owned()).spawn(move || {
store.select_file(filter, sender, origin, opt_test_path, embedder);
}).expect("Thread spawning failed");
}
thread::Builder::new()
.name("select file".to_owned())
.spawn(move || {
store.select_file(filter, sender, origin, opt_test_path, embedder);
})
.expect("Thread spawning failed");
},
FileManagerThreadMsg::SelectFiles(filter, sender, origin, opt_test_paths) => {
let store = self.store.clone();
let embedder = self.embedder_proxy.clone();
thread::Builder::new().name("select files".to_owned()).spawn(move || {
store.select_files(filter, sender, origin, opt_test_paths, embedder);
}).expect("Thread spawning failed");
}
thread::Builder::new()
.name("select files".to_owned())
.spawn(move || {
store.select_files(filter, sender, origin, opt_test_paths, embedder);
})
.expect("Thread spawning failed");
},
FileManagerThreadMsg::ReadFile(sender, id, check_url_validity, origin) => {
self.read_file(sender, id, check_url_validity, origin);
}
},
FileManagerThreadMsg::PromoteMemory(blob_buf, set_valid, sender, origin) => {
self.promote_memory(blob_buf, set_valid, sender, origin);
}
FileManagerThreadMsg::AddSlicedURLEntry(id, rel_pos, sender, origin) =>{
},
FileManagerThreadMsg::AddSlicedURLEntry(id, rel_pos, sender, origin) => {
self.store.add_sliced_url_entry(id, rel_pos, sender, origin);
}
},
FileManagerThreadMsg::DecRef(id, origin, sender) => {
let _ = sender.send(self.store.dec_ref(&id, &origin));
}
},
FileManagerThreadMsg::RevokeBlobURL(id, origin, sender) => {
let _ = sender.send(self.store.set_blob_url_validity(false, &id, &origin));
}
},
FileManagerThreadMsg::ActivateBlobURL(id, sender, origin) => {
let _ = sender.send(self.store.set_blob_url_validity(true, &id, &origin));
}
},
}
}
}
@ -150,8 +165,12 @@ impl FileManagerStore {
}
/// Copy out the file backend implementation content
fn get_impl(&self, id: &Uuid, origin_in: &FileOrigin,
check_url_validity: bool) -> Result<FileImpl, BlobURLStoreError> {
fn get_impl(
&self,
id: &Uuid,
origin_in: &FileOrigin,
check_url_validity: bool,
) -> Result<FileImpl, BlobURLStoreError> {
match self.entries.read().unwrap().get(id) {
Some(ref entry) => {
if *origin_in != *entry.origin {
@ -164,7 +183,7 @@ impl FileManagerStore {
Ok(entry.file_impl.clone())
}
}
}
},
None => Err(BlobURLStoreError::InvalidFileID),
}
}
@ -177,7 +196,7 @@ impl FileManagerStore {
self.entries.write().unwrap().remove(id);
}
fn inc_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError>{
fn inc_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
match self.entries.read().unwrap().get(id) {
Some(entry) => {
if entry.origin == *origin_in {
@ -186,41 +205,53 @@ impl FileManagerStore {
} else {
Err(BlobURLStoreError::InvalidOrigin)
}
}
},
None => Err(BlobURLStoreError::InvalidFileID),
}
}
fn add_sliced_url_entry(&self, parent_id: Uuid, rel_pos: RelativePos,
sender: IpcSender<Result<Uuid, BlobURLStoreError>>,
origin_in: FileOrigin) {
fn add_sliced_url_entry(
&self,
parent_id: Uuid,
rel_pos: RelativePos,
sender: IpcSender<Result<Uuid, BlobURLStoreError>>,
origin_in: FileOrigin,
) {
match self.inc_ref(&parent_id, &origin_in) {
Ok(_) => {
let new_id = Uuid::new_v4();
self.insert(new_id, FileStoreEntry {
origin: origin_in,
file_impl: FileImpl::Sliced(parent_id, rel_pos),
refs: AtomicUsize::new(1),
// Valid here since AddSlicedURLEntry implies URL creation
// from a BlobImpl::Sliced
is_valid_url: AtomicBool::new(true),
});
self.insert(
new_id,
FileStoreEntry {
origin: origin_in,
file_impl: FileImpl::Sliced(parent_id, rel_pos),
refs: AtomicUsize::new(1),
// Valid here since AddSlicedURLEntry implies URL creation
// from a BlobImpl::Sliced
is_valid_url: AtomicBool::new(true),
},
);
// We assume that the returned id will be held by BlobImpl::File
let _ = sender.send(Ok(new_id));
}
},
Err(e) => {
let _ = sender.send(Err(e));
}
},
}
}
fn query_files_from_embedder(&self,
patterns: Vec<FilterPattern>,
multiple_files: bool,
embedder_proxy: EmbedderProxy) -> Option<Vec<String>> {
fn query_files_from_embedder(
&self,
patterns: Vec<FilterPattern>,
multiple_files: bool,
embedder_proxy: EmbedderProxy,
) -> Option<Vec<String>> {
let (ipc_sender, ipc_receiver) = ipc::channel().expect("Failed to create IPC channel!");
let msg = (None, EmbedderMsg::SelectFiles(patterns, multiple_files, ipc_sender));
let msg = (
None,
EmbedderMsg::SelectFiles(patterns, multiple_files, ipc_sender),
);
embedder_proxy.send(msg);
match ipc_receiver.recv() {
@ -228,23 +259,26 @@ impl FileManagerStore {
Err(e) => {
warn!("Failed to receive files from embedder ({}).", e);
None
}
},
}
}
fn select_file(&self,
patterns: Vec<FilterPattern>,
sender: IpcSender<FileManagerResult<SelectedFile>>,
origin: FileOrigin,
opt_test_path: Option<String>,
embedder_proxy: EmbedderProxy) {
fn select_file(
&self,
patterns: Vec<FilterPattern>,
sender: IpcSender<FileManagerResult<SelectedFile>>,
origin: FileOrigin,
opt_test_path: Option<String>,
embedder_proxy: EmbedderProxy,
) {
// Check if the select_files preference is enabled
// to ensure process-level security against compromised script;
// Then try applying opt_test_path directly for testing convenience
let opt_s = if select_files_pref_enabled() {
opt_test_path
} else {
self.query_files_from_embedder(patterns, false, embedder_proxy).and_then(|mut x| x.pop())
self.query_files_from_embedder(patterns, false, embedder_proxy)
.and_then(|mut x| x.pop())
};
match opt_s {
@ -252,20 +286,22 @@ impl FileManagerStore {
let selected_path = Path::new(&s);
let result = self.create_entry(selected_path, &origin);
let _ = sender.send(result);
}
},
None => {
let _ = sender.send(Err(FileManagerThreadError::UserCancelled));
return;
}
},
}
}
fn select_files(&self,
patterns: Vec<FilterPattern>,
sender: IpcSender<FileManagerResult<Vec<SelectedFile>>>,
origin: FileOrigin,
opt_test_paths: Option<Vec<String>>,
embedder_proxy: EmbedderProxy) {
fn select_files(
&self,
patterns: Vec<FilterPattern>,
sender: IpcSender<FileManagerResult<Vec<SelectedFile>>>,
origin: FileOrigin,
opt_test_paths: Option<Vec<String>>,
embedder_proxy: EmbedderProxy,
) {
// Check if the select_files preference is enabled
// to ensure process-level security against compromised script;
// Then try applying opt_test_paths directly for testing convenience
@ -291,30 +327,42 @@ impl FileManagerStore {
Err(e) => {
let _ = sender.send(Err(e));
return;
}
},
};
}
let _ = sender.send(Ok(replies));
}
},
None => {
let _ = sender.send(Err(FileManagerThreadError::UserCancelled));
return;
}
},
}
}
fn create_entry(&self, file_path: &Path, origin: &str) -> Result<SelectedFile, FileManagerThreadError> {
fn create_entry(
&self,
file_path: &Path,
origin: &str,
) -> Result<SelectedFile, FileManagerThreadError> {
use net_traits::filemanager_thread::FileManagerThreadError::FileSystemError;
let file = File::open(file_path).map_err(|e| FileSystemError(e.to_string()))?;
let metadata = file.metadata().map_err(|e| FileSystemError(e.to_string()))?;
let modified = metadata.modified().map_err(|e| FileSystemError(e.to_string()))?;
let elapsed = modified.elapsed().map_err(|e| FileSystemError(e.to_string()))?;
let metadata = file
.metadata()
.map_err(|e| FileSystemError(e.to_string()))?;
let modified = metadata
.modified()
.map_err(|e| FileSystemError(e.to_string()))?;
let elapsed = modified
.elapsed()
.map_err(|e| FileSystemError(e.to_string()))?;
// Unix Epoch: https://doc.servo.org/std/time/constant.UNIX_EPOCH.html
let modified_epoch = elapsed.as_secs() * 1000 + elapsed.subsec_nanos() as u64 / 1000000;
let file_size = metadata.len();
let file_name = file_path.file_name().ok_or(FileSystemError("Invalid filepath".to_string()))?;
let file_name = file_path
.file_name()
.ok_or(FileSystemError("Invalid filepath".to_string()))?;
let file_impl = FileImpl::MetaDataOnly(FileMetaData {
path: file_path.to_path_buf(),
@ -324,18 +372,21 @@ impl FileManagerStore {
let id = Uuid::new_v4();
self.insert(id, FileStoreEntry {
origin: origin.to_string(),
file_impl: file_impl,
refs: AtomicUsize::new(1),
// Invalid here since create_entry is called by file selection
is_valid_url: AtomicBool::new(false),
});
self.insert(
id,
FileStoreEntry {
origin: origin.to_string(),
file_impl: file_impl,
refs: AtomicUsize::new(1),
// Invalid here since create_entry is called by file selection
is_valid_url: AtomicBool::new(false),
},
);
let filename_path = Path::new(file_name);
let type_string = match guess_mime_type_opt(filename_path) {
Some(x) => format!("{}", x),
None => "".to_string(),
None => "".to_string(),
};
Ok(SelectedFile {
@ -347,9 +398,14 @@ impl FileManagerStore {
})
}
fn get_blob_buf(&self, sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
id: &Uuid, origin_in: &FileOrigin, rel_pos: RelativePos,
check_url_validity: bool) -> Result<(), BlobURLStoreError> {
fn get_blob_buf(
&self,
sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
id: &Uuid,
origin_in: &FileOrigin,
rel_pos: RelativePos,
check_url_validity: bool,
) -> Result<(), BlobURLStoreError> {
let file_impl = self.get_impl(id, origin_in, check_url_validity)?;
match file_impl {
FileImpl::Memory(buf) => {
@ -365,7 +421,7 @@ impl FileManagerStore {
let _ = sender.send(Ok(ReadFileProgress::EOF));
Ok(())
}
},
FileImpl::MetaDataOnly(metadata) => {
/* XXX: Snapshot state check (optional) https://w3c.github.io/FileAPI/#snapshot-state.
Concretely, here we create another file, and this file might not
@ -373,45 +429,62 @@ impl FileManagerStore {
create_entry is called.
*/
let opt_filename = metadata.path.file_name()
.and_then(|osstr| osstr.to_str())
.map(|s| s.to_string());
let opt_filename = metadata
.path
.file_name()
.and_then(|osstr| osstr.to_str())
.map(|s| s.to_string());
let mime = guess_mime_type_opt(metadata.path.clone());
let range = rel_pos.to_abs_range(metadata.size as usize);
let mut file = File::open(&metadata.path)
.map_err(|e| BlobURLStoreError::External(e.to_string()))?;
let seeked_start = file.seek(SeekFrom::Start(range.start as u64))
.map_err(|e| BlobURLStoreError::External(e.to_string()))?;
.map_err(|e| BlobURLStoreError::External(e.to_string()))?;
let seeked_start = file
.seek(SeekFrom::Start(range.start as u64))
.map_err(|e| BlobURLStoreError::External(e.to_string()))?;
if seeked_start == (range.start as u64) {
let type_string = match mime {
Some(x) => format!("{}", x),
None => "".to_string(),
None => "".to_string(),
};
chunked_read(sender, &mut file, range.len(), opt_filename,
type_string);
chunked_read(sender, &mut file, range.len(), opt_filename, type_string);
Ok(())
} else {
Err(BlobURLStoreError::InvalidEntry)
}
}
},
FileImpl::Sliced(parent_id, inner_rel_pos) => {
// Next time we don't need to check validity since
// we have already done that for requesting URL if necessary
self.get_blob_buf(sender, &parent_id, origin_in,
rel_pos.slice_inner(&inner_rel_pos), false)
}
self.get_blob_buf(
sender,
&parent_id,
origin_in,
rel_pos.slice_inner(&inner_rel_pos),
false,
)
},
}
}
// Convenient wrapper over get_blob_buf
fn try_read_file(&self, sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
id: Uuid, check_url_validity: bool, origin_in: FileOrigin)
-> Result<(), BlobURLStoreError> {
self.get_blob_buf(sender, &id, &origin_in, RelativePos::full_range(), check_url_validity)
fn try_read_file(
&self,
sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
id: Uuid,
check_url_validity: bool,
origin_in: FileOrigin,
) -> Result<(), BlobURLStoreError> {
self.get_blob_buf(
sender,
&id,
&origin_in,
RelativePos::full_range(),
check_url_validity,
)
}
fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
@ -436,7 +509,7 @@ impl FileManagerStore {
} else {
return Err(BlobURLStoreError::InvalidOrigin);
}
}
},
None => return Err(BlobURLStoreError::InvalidFileID),
};
@ -454,28 +527,41 @@ impl FileManagerStore {
Ok(())
}
fn promote_memory(&self, blob_buf: BlobBuf, set_valid: bool,
sender: IpcSender<Result<Uuid, BlobURLStoreError>>, origin: FileOrigin) {
match Url::parse(&origin) { // parse to check sanity
fn promote_memory(
&self,
blob_buf: BlobBuf,
set_valid: bool,
sender: IpcSender<Result<Uuid, BlobURLStoreError>>,
origin: FileOrigin,
) {
match Url::parse(&origin) {
// parse to check sanity
Ok(_) => {
let id = Uuid::new_v4();
self.insert(id, FileStoreEntry {
origin: origin.clone(),
file_impl: FileImpl::Memory(blob_buf),
refs: AtomicUsize::new(1),
is_valid_url: AtomicBool::new(set_valid),
});
self.insert(
id,
FileStoreEntry {
origin: origin.clone(),
file_impl: FileImpl::Memory(blob_buf),
refs: AtomicUsize::new(1),
is_valid_url: AtomicBool::new(set_valid),
},
);
let _ = sender.send(Ok(id));
}
},
Err(_) => {
let _ = sender.send(Err(BlobURLStoreError::InvalidOrigin));
}
},
}
}
fn set_blob_url_validity(&self, validity: bool, id: &Uuid,
origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
fn set_blob_url_validity(
&self,
validity: bool,
id: &Uuid,
origin_in: &FileOrigin,
) -> Result<(), BlobURLStoreError> {
let (do_remove, opt_parent_id, res) = match self.entries.read().unwrap().get(id) {
Some(entry) => {
if *entry.origin == *origin_in {
@ -485,7 +571,7 @@ impl FileManagerStore {
// Check if it is the last possible reference
// since refs only accounts for blob id holders
// and store entry id holders
let zero_refs = entry.refs.load(Ordering::Acquire) == 0;
let zero_refs = entry.refs.load(Ordering::Acquire) == 0;
if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
(zero_refs, Some(parent_id.clone()), Ok(()))
@ -498,8 +584,8 @@ impl FileManagerStore {
} else {
(false, None, Err(BlobURLStoreError::InvalidOrigin))
}
}
None => (false, None, Err(BlobURLStoreError::InvalidFileID))
},
None => (false, None, Err(BlobURLStoreError::InvalidFileID)),
};
if do_remove {
@ -515,15 +601,21 @@ impl FileManagerStore {
}
fn select_files_pref_enabled() -> bool {
PREFS.get("dom.testing.htmlinputelement.select_files.enabled")
.as_boolean().unwrap_or(false)
PREFS
.get("dom.testing.htmlinputelement.select_files.enabled")
.as_boolean()
.unwrap_or(false)
}
const CHUNK_SIZE: usize = 8192;
fn chunked_read(sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
file: &mut File, size: usize, opt_filename: Option<String>,
type_string: String) {
fn chunked_read(
sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
file: &mut File,
size: usize,
opt_filename: Option<String>,
type_string: String,
) {
// First chunk
let mut buf = vec![0; CHUNK_SIZE];
match file.read(&mut buf) {
@ -536,11 +628,11 @@ fn chunked_read(sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
bytes: buf,
};
let _ = sender.send(Ok(ReadFileProgress::Meta(blob_buf)));
}
},
Err(e) => {
let _ = sender.send(Err(FileManagerThreadError::FileSystemError(e.to_string())));
return;
}
},
}
// Send the remaining chunks
@ -550,15 +642,15 @@ fn chunked_read(sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
Ok(0) => {
let _ = sender.send(Ok(ReadFileProgress::EOF));
return;
}
},
Ok(n) => {
buf.truncate(n);
let _ = sender.send(Ok(ReadFileProgress::Partial(buf)));
}
},
Err(e) => {
let _ = sender.send(Err(FileManagerThreadError::FileSystemError(e.to_string())));
return;
}
},
}
}
}