Use stream in file read operation (#35969)

* use read_all_bytes in file read operation

Signed-off-by: gterzian <2792687+gterzian@users.noreply.github.com>

* add docs

Signed-off-by: gterzian <2792687+gterzian@users.noreply.github.com>

---------

Signed-off-by: gterzian <2792687+gterzian@users.noreply.github.com>
This commit is contained in:
Gregory Terzian 2025-03-16 18:37:07 +08:00 committed by GitHub
parent d8fc1d8bb8
commit 3ecd1c0699
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 98 additions and 36 deletions

View file

@ -4,6 +4,7 @@
use std::cell::Cell; use std::cell::Cell;
use std::ptr; use std::ptr;
use std::rc::Rc;
use base64::Engine; use base64::Engine;
use dom_struct::dom_struct; use dom_struct::dom_struct;
@ -34,7 +35,7 @@ use crate::dom::event::{Event, EventBubbles, EventCancelable};
use crate::dom::eventtarget::EventTarget; use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope; use crate::dom::globalscope::GlobalScope;
use crate::dom::progressevent::ProgressEvent; use crate::dom::progressevent::ProgressEvent;
use crate::realms::enter_realm; use crate::realms::{InRealm, enter_realm};
use crate::script_runtime::{CanGc, JSContext}; use crate::script_runtime::{CanGc, JSContext};
use crate::task::TaskOnce; use crate::task::TaskOnce;
@ -390,18 +391,24 @@ impl FileReaderMethods<crate::DomTypeHolder> for FileReader {
event_handler!(loadend, GetOnloadend, SetOnloadend); event_handler!(loadend, GetOnloadend, SetOnloadend);
// https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer // https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
fn ReadAsArrayBuffer(&self, blob: &Blob) -> ErrorResult { fn ReadAsArrayBuffer(&self, blob: &Blob, realm: InRealm, can_gc: CanGc) -> ErrorResult {
self.read(FileReaderFunction::ArrayBuffer, blob, None) self.read(FileReaderFunction::ArrayBuffer, blob, None, realm, can_gc)
} }
// https://w3c.github.io/FileAPI/#dfn-readAsDataURL // https://w3c.github.io/FileAPI/#dfn-readAsDataURL
fn ReadAsDataURL(&self, blob: &Blob) -> ErrorResult { fn ReadAsDataURL(&self, blob: &Blob, realm: InRealm, can_gc: CanGc) -> ErrorResult {
self.read(FileReaderFunction::DataUrl, blob, None) self.read(FileReaderFunction::DataUrl, blob, None, realm, can_gc)
} }
// https://w3c.github.io/FileAPI/#dfn-readAsText // https://w3c.github.io/FileAPI/#dfn-readAsText
fn ReadAsText(&self, blob: &Blob, label: Option<DOMString>) -> ErrorResult { fn ReadAsText(
self.read(FileReaderFunction::Text, blob, label) &self,
blob: &Blob,
label: Option<DOMString>,
realm: InRealm,
can_gc: CanGc,
) -> ErrorResult {
self.read(FileReaderFunction::Text, blob, label, realm, can_gc)
} }
// https://w3c.github.io/FileAPI/#dfn-abort // https://w3c.github.io/FileAPI/#dfn-abort
@ -474,18 +481,31 @@ impl FileReader {
function: FileReaderFunction, function: FileReaderFunction,
blob: &Blob, blob: &Blob,
label: Option<DOMString>, label: Option<DOMString>,
realm: InRealm,
can_gc: CanGc,
) -> ErrorResult { ) -> ErrorResult {
// Step 1 let cx = GlobalScope::get_cx();
// If frs state is "loading", throw an InvalidStateError DOMException.
if self.ready_state.get() == FileReaderReadyState::Loading { if self.ready_state.get() == FileReaderReadyState::Loading {
return Err(Error::InvalidState); return Err(Error::InvalidState);
} }
// Step 2 // Set frs state to "loading".
self.change_ready_state(FileReaderReadyState::Loading); self.change_ready_state(FileReaderReadyState::Loading);
// Step 3 // Set frs result to null.
*self.result.borrow_mut() = None; *self.result.borrow_mut() = None;
// Set frs error to null.
// See the note below in the error steps.
// Let stream be the result of calling get stream on blob.
let stream = blob.get_stream(can_gc);
// Let reader be the result of getting a reader from stream.
let reader = stream.and_then(|s| s.acquire_default_reader(can_gc))?;
let type_ = blob.Type(); let type_ = blob.Type();
let load_data = ReadMetaData::new(String::from(type_), label.map(String::from), function); let load_data = ReadMetaData::new(String::from(type_), label.map(String::from), function);
@ -494,35 +514,76 @@ impl FileReader {
self.generation_id.set(GenerationId(prev_id + 1)); self.generation_id.set(GenerationId(prev_id + 1));
let gen_id = self.generation_id.get(); let gen_id = self.generation_id.get();
// Step 10, in parallel, wait on stream promises to resolve and queue tasks. let filereader_success = DomRoot::from_ref(self);
let filereader_error = DomRoot::from_ref(self);
// TODO: follow the spec which requires implementing blob `get_stream`, // In parallel, while true:
// see https://github.com/servo/servo/issues/25209 // Wait for chunkPromise to be fulfilled or rejected.
// Note: the spec appears wrong or outdated,
// so for now we use the simple `read_all_bytes` call,
// which means we cannot fire the progress event at each chunk.
// This can be revisisted following the discussion at
// <https://github.com/w3c/FileAPI/issues/208>
// Currently bytes are first read "sync", and then the appropriate tasks are queued. // Read all bytes from stream with reader.
reader.read_all_bytes(
cx,
&self.global(),
Rc::new(move |blob_contents| {
let global = filereader_success.global();
let task_manager = global.task_manager();
let task_source = task_manager.file_reading_task_source();
// Read the blob bytes "sync". // If chunkPromise is fulfilled,
let blob_contents = blob.get_bytes().unwrap_or_else(|_| vec![]); // and isFirstChunk is true,
// queue a task
let filereader = Trusted::new(self); // Note: this should be done for the first chunk,
let global = self.global(); // see issue above.
let task_manager = global.task_manager(); task_source.queue(FileReadingTask::ProcessRead(
let task_source = task_manager.file_reading_task_source(); Trusted::new(&filereader_success.clone()),
gen_id,
// Queue tasks as appropriate. ));
task_source.queue(FileReadingTask::ProcessRead(filereader.clone(), gen_id)); // If chunkPromise is fulfilled
// with an object whose done property is false
if !blob_contents.is_empty() { // and whose value property is a Uint8Array object
task_source.queue(FileReadingTask::ProcessReadData(filereader.clone(), gen_id)); // Note: this should be done for each chunk,
} // see issue above.
if !blob_contents.is_empty() {
task_source.queue(FileReadingTask::ProcessReadEOF( task_source.queue(FileReadingTask::ProcessReadData(
filereader, Trusted::new(&filereader_success.clone()),
gen_id, gen_id,
load_data, ));
blob_contents, }
)); // Otherwise,
// if chunkPromise is fulfilled with an object whose done property is true,
// queue a task
// Note: we are in the succes steps of `read_all_bytes`,
// so the last chunk has been received.
task_source.queue(FileReadingTask::ProcessReadEOF(
Trusted::new(&filereader_success.clone()),
gen_id,
load_data.clone(),
blob_contents.to_vec(),
));
}),
Rc::new(move |_cx, _error| {
let global = filereader_error.global();
let task_manager = global.task_manager();
let task_source = task_manager.file_reading_task_source();
// Otherwise, if chunkPromise is rejected with an error error,
// queue a task
// Note: not using the error from `read_all_bytes`,
// see issue above.
task_source.queue(FileReadingTask::ProcessReadError(
Trusted::new(&filereader_error),
gen_id,
DOMErrorName::OperationError,
));
}),
realm,
can_gc,
);
Ok(()) Ok(())
} }

View file

@ -210,7 +210,8 @@ DOMInterfaces = {
}, },
'FileReader': { 'FileReader': {
'canGc': ['Abort'], 'canGc': ['Abort', 'ReadAsArrayBuffer', 'ReadAsDataURL', 'ReadAsText'],
'inRealms': ['ReadAsArrayBuffer', 'ReadAsDataURL', 'ReadAsText'],
}, },
'FileReaderSync': { 'FileReaderSync': {