mirror of
https://github.com/servo/servo.git
synced 2025-07-21 06:13:40 +01:00
Auto merge of #22134 - ferjm:load_better_blob, r=jdm
Support range requests for blob URLs - [X] `./mach build -d` does not report any errors - [X] `./mach test-tidy` does not report any errors - [X] These changes fix #21467 and fix #22053 - [X] There are tests for these changes. <!-- Reviewable:start --> --- This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/22134) <!-- Reviewable:end -->
This commit is contained in:
commit
d282618baa
9 changed files with 520 additions and 236 deletions
|
@ -1,108 +0,0 @@
|
||||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
|
||||||
|
|
||||||
use crate::filemanager_thread::FileManager;
|
|
||||||
use headers_core::HeaderMapExt;
|
|
||||||
use headers_ext::{ContentLength, ContentType};
|
|
||||||
use http::header::{self, HeaderValue};
|
|
||||||
use http::HeaderMap;
|
|
||||||
use ipc_channel::ipc;
|
|
||||||
use mime::{self, Mime};
|
|
||||||
use net_traits::blob_url_store::parse_blob_url;
|
|
||||||
use net_traits::filemanager_thread::ReadFileProgress;
|
|
||||||
use net_traits::{http_percent_encode, NetworkError};
|
|
||||||
use servo_url::ServoUrl;
|
|
||||||
|
|
||||||
// TODO: Check on GET
|
|
||||||
// https://w3c.github.io/FileAPI/#requestResponseModel
|
|
||||||
|
|
||||||
/// https://fetch.spec.whatwg.org/#concept-basic-fetch (partial)
|
|
||||||
// TODO: make async.
|
|
||||||
pub fn load_blob_sync(
|
|
||||||
url: ServoUrl,
|
|
||||||
filemanager: FileManager,
|
|
||||||
) -> Result<(HeaderMap, Vec<u8>), NetworkError> {
|
|
||||||
let (id, origin) = match parse_blob_url(&url) {
|
|
||||||
Ok((id, origin)) => (id, origin),
|
|
||||||
Err(()) => {
|
|
||||||
let e = format!("Invalid blob URL format {:?}", url);
|
|
||||||
return Err(NetworkError::Internal(e));
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let (sender, receiver) = ipc::channel().unwrap();
|
|
||||||
let check_url_validity = true;
|
|
||||||
filemanager.read_file(sender, id, check_url_validity, origin);
|
|
||||||
|
|
||||||
let blob_buf = match receiver.recv().unwrap() {
|
|
||||||
Ok(ReadFileProgress::Meta(blob_buf)) => blob_buf,
|
|
||||||
Ok(_) => {
|
|
||||||
return Err(NetworkError::Internal(
|
|
||||||
"Invalid filemanager reply".to_string(),
|
|
||||||
));
|
|
||||||
},
|
|
||||||
Err(e) => {
|
|
||||||
return Err(NetworkError::Internal(format!("{:?}", e)));
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime::TEXT_PLAIN);
|
|
||||||
let charset = content_type.get_param(mime::CHARSET);
|
|
||||||
|
|
||||||
let mut headers = HeaderMap::new();
|
|
||||||
|
|
||||||
if let Some(name) = blob_buf.filename {
|
|
||||||
let charset = charset
|
|
||||||
.map(|c| c.as_ref().into())
|
|
||||||
.unwrap_or("us-ascii".to_owned());
|
|
||||||
// TODO(eijebong): Replace this once the typed header is there
|
|
||||||
headers.insert(
|
|
||||||
header::CONTENT_DISPOSITION,
|
|
||||||
HeaderValue::from_bytes(
|
|
||||||
format!(
|
|
||||||
"inline; {}",
|
|
||||||
if charset.to_lowercase() == "utf-8" {
|
|
||||||
format!(
|
|
||||||
"filename=\"{}\"",
|
|
||||||
String::from_utf8(name.as_bytes().into()).unwrap()
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
format!(
|
|
||||||
"filename*=\"{}\"''{}",
|
|
||||||
charset,
|
|
||||||
http_percent_encode(name.as_bytes())
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.as_bytes(),
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Basic fetch, Step 4.
|
|
||||||
headers.typed_insert(ContentLength(blob_buf.size as u64));
|
|
||||||
// Basic fetch, Step 5.
|
|
||||||
headers.typed_insert(ContentType::from(content_type.clone()));
|
|
||||||
|
|
||||||
let mut bytes = blob_buf.bytes;
|
|
||||||
loop {
|
|
||||||
match receiver.recv().unwrap() {
|
|
||||||
Ok(ReadFileProgress::Partial(ref mut new_bytes)) => {
|
|
||||||
bytes.append(new_bytes);
|
|
||||||
},
|
|
||||||
Ok(ReadFileProgress::EOF) => {
|
|
||||||
return Ok((headers, bytes));
|
|
||||||
},
|
|
||||||
Ok(_) => {
|
|
||||||
return Err(NetworkError::Internal(
|
|
||||||
"Invalid filemanager reply".to_string(),
|
|
||||||
));
|
|
||||||
},
|
|
||||||
Err(e) => {
|
|
||||||
return Err(NetworkError::Internal(format!("{:?}", e)));
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -2,10 +2,9 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
use crate::blob_loader::load_blob_sync;
|
|
||||||
use crate::data_loader::decode;
|
use crate::data_loader::decode;
|
||||||
use crate::fetch::cors_cache::CorsCache;
|
use crate::fetch::cors_cache::CorsCache;
|
||||||
use crate::filemanager_thread::FileManager;
|
use crate::filemanager_thread::{fetch_file_in_chunks, FileManager, FILE_CHUNK_SIZE};
|
||||||
use crate::http_loader::{determine_request_referrer, http_fetch, HttpState};
|
use crate::http_loader::{determine_request_referrer, http_fetch, HttpState};
|
||||||
use crate::http_loader::{set_default_accept, set_default_accept_language};
|
use crate::http_loader::{set_default_accept, set_default_accept_language};
|
||||||
use crate::subresource_integrity::is_response_integrity_valid;
|
use crate::subresource_integrity::is_response_integrity_valid;
|
||||||
|
@ -19,6 +18,8 @@ use hyper::StatusCode;
|
||||||
use ipc_channel::ipc::IpcReceiver;
|
use ipc_channel::ipc::IpcReceiver;
|
||||||
use mime::{self, Mime};
|
use mime::{self, Mime};
|
||||||
use mime_guess::guess_mime_type;
|
use mime_guess::guess_mime_type;
|
||||||
|
use net_traits::blob_url_store::{parse_blob_url, BlobURLStoreError};
|
||||||
|
use net_traits::filemanager_thread::RelativePos;
|
||||||
use net_traits::request::{CredentialsMode, Destination, Referrer, Request, RequestMode};
|
use net_traits::request::{CredentialsMode, Destination, Referrer, Request, RequestMode};
|
||||||
use net_traits::request::{Origin, ResponseTainting, Window};
|
use net_traits::request::{Origin, ResponseTainting, Window};
|
||||||
use net_traits::response::{Response, ResponseBody, ResponseType};
|
use net_traits::response::{Response, ResponseBody, ResponseType};
|
||||||
|
@ -26,21 +27,18 @@ use net_traits::{FetchTaskTarget, NetworkError, ReferrerPolicy, ResourceFetchTim
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{BufRead, BufReader, Seek, SeekFrom};
|
use std::io::{BufReader, Seek, SeekFrom};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops::Bound;
|
use std::ops::Bound;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::sync::atomic::Ordering;
|
use std::sync::atomic::Ordering;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use std::thread;
|
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref X_CONTENT_TYPE_OPTIONS: HeaderName =
|
static ref X_CONTENT_TYPE_OPTIONS: HeaderName =
|
||||||
HeaderName::from_static("x-content-type-options");
|
HeaderName::from_static("x-content-type-options");
|
||||||
}
|
}
|
||||||
|
|
||||||
const FILE_CHUNK_SIZE: usize = 32768; //32 KB
|
|
||||||
|
|
||||||
pub type Target<'a> = &'a mut (dyn FetchTaskTarget + Send);
|
pub type Target<'a> = &'a mut (dyn FetchTaskTarget + Send);
|
||||||
|
|
||||||
pub enum Data {
|
pub enum Data {
|
||||||
|
@ -492,6 +490,74 @@ fn wait_for_response(response: &mut Response, target: Target, done_chan: &mut Do
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Range header start and end values.
|
||||||
|
pub enum RangeRequestBounds {
|
||||||
|
/// The range bounds are known and set to final values.
|
||||||
|
Final(RelativePos),
|
||||||
|
/// We need extra information to set the range bounds.
|
||||||
|
/// i.e. buffer or file size.
|
||||||
|
Pending(u64),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RangeRequestBounds {
|
||||||
|
pub fn get_final(&self, len: Option<u64>) -> Result<RelativePos, ()> {
|
||||||
|
match self {
|
||||||
|
RangeRequestBounds::Final(pos) => {
|
||||||
|
if let Some(len) = len {
|
||||||
|
if pos.start <= len as i64 {
|
||||||
|
return Ok(pos.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(())
|
||||||
|
},
|
||||||
|
RangeRequestBounds::Pending(offset) => Ok(RelativePos::from_opts(
|
||||||
|
if let Some(len) = len {
|
||||||
|
Some((len - u64::min(len, *offset)) as i64)
|
||||||
|
} else {
|
||||||
|
Some(0)
|
||||||
|
},
|
||||||
|
None,
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the range bounds if the `Range` header is present.
|
||||||
|
fn get_range_request_bounds(range: Option<Range>) -> RangeRequestBounds {
|
||||||
|
if let Some(ref range) = range {
|
||||||
|
let (start, end) = match range
|
||||||
|
.iter()
|
||||||
|
.collect::<Vec<(Bound<u64>, Bound<u64>)>>()
|
||||||
|
.first()
|
||||||
|
{
|
||||||
|
Some(&(Bound::Included(start), Bound::Unbounded)) => (start, None),
|
||||||
|
Some(&(Bound::Included(start), Bound::Included(end))) => {
|
||||||
|
// `end` should be less or equal to `start`.
|
||||||
|
(start, Some(i64::max(start as i64, end as i64)))
|
||||||
|
},
|
||||||
|
Some(&(Bound::Unbounded, Bound::Included(offset))) => {
|
||||||
|
return RangeRequestBounds::Pending(offset);
|
||||||
|
},
|
||||||
|
_ => (0, None),
|
||||||
|
};
|
||||||
|
RangeRequestBounds::Final(RelativePos::from_opts(Some(start as i64), end))
|
||||||
|
} else {
|
||||||
|
RangeRequestBounds::Final(RelativePos::from_opts(Some(0), None))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn partial_content(response: &mut Response) {
|
||||||
|
let reason = "Partial Content".to_owned();
|
||||||
|
response.status = Some((StatusCode::PARTIAL_CONTENT, reason.clone()));
|
||||||
|
response.raw_status = Some((StatusCode::PARTIAL_CONTENT.as_u16(), reason.into()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn range_not_satisfiable_error(response: &mut Response) {
|
||||||
|
let reason = "Range Not Satisfiable".to_owned();
|
||||||
|
response.status = Some((StatusCode::RANGE_NOT_SATISFIABLE, reason.clone()));
|
||||||
|
response.raw_status = Some((StatusCode::RANGE_NOT_SATISFIABLE.as_u16(), reason.into()));
|
||||||
|
}
|
||||||
|
|
||||||
/// [Scheme fetch](https://fetch.spec.whatwg.org#scheme-fetch)
|
/// [Scheme fetch](https://fetch.spec.whatwg.org#scheme-fetch)
|
||||||
fn scheme_fetch(
|
fn scheme_fetch(
|
||||||
request: &mut Request,
|
request: &mut Request,
|
||||||
|
@ -537,106 +603,56 @@ fn scheme_fetch(
|
||||||
}
|
}
|
||||||
if let Ok(file_path) = url.to_file_path() {
|
if let Ok(file_path) = url.to_file_path() {
|
||||||
if let Ok(file) = File::open(file_path.clone()) {
|
if let Ok(file) = File::open(file_path.clone()) {
|
||||||
let mime = guess_mime_type(file_path);
|
// Get range bounds (if any) and try to seek to the requested offset.
|
||||||
|
// If seeking fails, bail out with a NetworkError.
|
||||||
|
let file_size = match file.metadata() {
|
||||||
|
Ok(metadata) => Some(metadata.len()),
|
||||||
|
Err(_) => None,
|
||||||
|
};
|
||||||
|
|
||||||
let mut response =
|
let mut response =
|
||||||
Response::new(url, ResourceFetchTiming::new(request.timing_type()));
|
Response::new(url, ResourceFetchTiming::new(request.timing_type()));
|
||||||
|
|
||||||
|
let range_header = request.headers.typed_get::<Range>();
|
||||||
|
let is_range_request = range_header.is_some();
|
||||||
|
let range = match get_range_request_bounds(range_header).get_final(file_size) {
|
||||||
|
Ok(range) => range,
|
||||||
|
Err(_) => {
|
||||||
|
range_not_satisfiable_error(&mut response);
|
||||||
|
return response;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let mut reader = BufReader::with_capacity(FILE_CHUNK_SIZE, file);
|
||||||
|
if reader.seek(SeekFrom::Start(range.start as u64)).is_err() {
|
||||||
|
return Response::network_error(NetworkError::Internal(
|
||||||
|
"Unexpected method for file".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set response status to 206 if Range header is present.
|
||||||
|
// At this point we should have already validated the header.
|
||||||
|
if is_range_request {
|
||||||
|
partial_content(&mut response);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Content-Type header.
|
||||||
|
let mime = guess_mime_type(file_path);
|
||||||
response.headers.typed_insert(ContentType::from(mime));
|
response.headers.typed_insert(ContentType::from(mime));
|
||||||
|
|
||||||
|
// Setup channel to receive cross-thread messages about the file fetch
|
||||||
|
// operation.
|
||||||
let (done_sender, done_receiver) = unbounded();
|
let (done_sender, done_receiver) = unbounded();
|
||||||
*done_chan = Some((done_sender.clone(), done_receiver));
|
*done_chan = Some((done_sender.clone(), done_receiver));
|
||||||
*response.body.lock().unwrap() = ResponseBody::Receiving(vec![]);
|
*response.body.lock().unwrap() = ResponseBody::Receiving(vec![]);
|
||||||
|
|
||||||
let res_body = response.body.clone();
|
fetch_file_in_chunks(
|
||||||
|
done_sender,
|
||||||
let cancellation_listener = context.cancellation_listener.clone();
|
reader,
|
||||||
|
response.body.clone(),
|
||||||
let (start, end) = if let Some(ref range) = request.headers.typed_get::<Range>()
|
context.cancellation_listener.clone(),
|
||||||
{
|
range,
|
||||||
match range
|
|
||||||
.iter()
|
|
||||||
.collect::<Vec<(Bound<u64>, Bound<u64>)>>()
|
|
||||||
.first()
|
|
||||||
{
|
|
||||||
Some(&(Bound::Included(start), Bound::Unbounded)) => (start, None),
|
|
||||||
Some(&(Bound::Included(start), Bound::Included(end))) => {
|
|
||||||
// `end` should be less or equal to `start`.
|
|
||||||
(start, Some(u64::max(start, end)))
|
|
||||||
},
|
|
||||||
Some(&(Bound::Unbounded, Bound::Included(offset))) => {
|
|
||||||
if let Ok(metadata) = file.metadata() {
|
|
||||||
// `offset` cannot be bigger than the file size.
|
|
||||||
(metadata.len() - u64::min(metadata.len(), offset), None)
|
|
||||||
} else {
|
|
||||||
(0, None)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => (0, None),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(0, None)
|
|
||||||
};
|
|
||||||
|
|
||||||
thread::Builder::new()
|
|
||||||
.name("fetch file worker thread".to_string())
|
|
||||||
.spawn(move || {
|
|
||||||
let mut reader = BufReader::with_capacity(FILE_CHUNK_SIZE, file);
|
|
||||||
if reader.seek(SeekFrom::Start(start)).is_err() {
|
|
||||||
warn!("Fetch - could not seek to {:?}", start);
|
|
||||||
}
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if cancellation_listener.lock().unwrap().cancelled() {
|
|
||||||
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
|
|
||||||
let _ = done_sender.send(Data::Cancelled);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let length = {
|
|
||||||
let buffer = reader.fill_buf().unwrap().to_vec();
|
|
||||||
let mut buffer_len = buffer.len();
|
|
||||||
if let ResponseBody::Receiving(ref mut body) =
|
|
||||||
*res_body.lock().unwrap()
|
|
||||||
{
|
|
||||||
let offset = usize::min(
|
|
||||||
{
|
|
||||||
if let Some(end) = end {
|
|
||||||
let remaining_bytes =
|
|
||||||
end as usize - start as usize - body.len();
|
|
||||||
if remaining_bytes <= FILE_CHUNK_SIZE {
|
|
||||||
// This is the last chunk so we set buffer
|
|
||||||
// len to 0 to break the reading loop.
|
|
||||||
buffer_len = 0;
|
|
||||||
remaining_bytes
|
|
||||||
} else {
|
|
||||||
FILE_CHUNK_SIZE
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
FILE_CHUNK_SIZE
|
|
||||||
}
|
|
||||||
},
|
|
||||||
buffer.len(),
|
|
||||||
);
|
);
|
||||||
body.extend_from_slice(&buffer[0..offset]);
|
|
||||||
let _ = done_sender.send(Data::Payload(buffer));
|
|
||||||
}
|
|
||||||
buffer_len
|
|
||||||
};
|
|
||||||
if length == 0 {
|
|
||||||
let mut body = res_body.lock().unwrap();
|
|
||||||
let completed_body = match *body {
|
|
||||||
ResponseBody::Receiving(ref mut body) => {
|
|
||||||
mem::replace(body, vec![])
|
|
||||||
},
|
|
||||||
_ => vec![],
|
|
||||||
};
|
|
||||||
*body = ResponseBody::Done(completed_body);
|
|
||||||
let _ = done_sender.send(Data::Done);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
reader.consume(length);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expect("Failed to create fetch file worker thread");
|
|
||||||
response
|
response
|
||||||
} else {
|
} else {
|
||||||
Response::network_error(NetworkError::Internal("Opening file failed".into()))
|
Response::network_error(NetworkError::Internal("Opening file failed".into()))
|
||||||
|
@ -649,7 +665,7 @@ fn scheme_fetch(
|
||||||
},
|
},
|
||||||
|
|
||||||
"blob" => {
|
"blob" => {
|
||||||
println!("Loading blob {}", url.as_str());
|
debug!("Loading blob {}", url.as_str());
|
||||||
// Step 2.
|
// Step 2.
|
||||||
if request.method != Method::GET {
|
if request.method != Method::GET {
|
||||||
return Response::network_error(NetworkError::Internal(
|
return Response::network_error(NetworkError::Internal(
|
||||||
|
@ -657,19 +673,51 @@ fn scheme_fetch(
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
match load_blob_sync(url.clone(), context.filemanager.clone()) {
|
let range_header = request.headers.typed_get::<Range>();
|
||||||
Ok((headers, bytes)) => {
|
let is_range_request = range_header.is_some();
|
||||||
let mut response =
|
// We will get a final version of this range once we have
|
||||||
Response::new(url, ResourceFetchTiming::new(request.timing_type()));
|
// the length of the data backing the blob.
|
||||||
response.headers = headers;
|
let range = get_range_request_bounds(range_header);
|
||||||
*response.body.lock().unwrap() = ResponseBody::Done(bytes);
|
|
||||||
response
|
let (id, origin) = match parse_blob_url(&url) {
|
||||||
},
|
Ok((id, origin)) => (id, origin),
|
||||||
Err(e) => {
|
Err(()) => {
|
||||||
debug!("Failed to load {}: {:?}", url, e);
|
return Response::network_error(NetworkError::Internal(
|
||||||
Response::network_error(e)
|
"Invalid blob url".into(),
|
||||||
|
));
|
||||||
},
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut response = Response::new(url, ResourceFetchTiming::new(request.timing_type()));
|
||||||
|
if is_range_request {
|
||||||
|
partial_content(&mut response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let (done_sender, done_receiver) = unbounded();
|
||||||
|
*done_chan = Some((done_sender.clone(), done_receiver));
|
||||||
|
*response.body.lock().unwrap() = ResponseBody::Receiving(vec![]);
|
||||||
|
let check_url_validity = true;
|
||||||
|
if let Err(err) = context.filemanager.fetch_file(
|
||||||
|
&done_sender,
|
||||||
|
context.cancellation_listener.clone(),
|
||||||
|
id,
|
||||||
|
check_url_validity,
|
||||||
|
origin,
|
||||||
|
&mut response,
|
||||||
|
range,
|
||||||
|
) {
|
||||||
|
let _ = done_sender.send(Data::Done);
|
||||||
|
let err = match err {
|
||||||
|
BlobURLStoreError::InvalidRange => {
|
||||||
|
range_not_satisfiable_error(&mut response);
|
||||||
|
return response;
|
||||||
|
},
|
||||||
|
_ => format!("{:?}", err),
|
||||||
|
};
|
||||||
|
return Response::network_error(NetworkError::Internal(err));
|
||||||
|
};
|
||||||
|
|
||||||
|
response
|
||||||
},
|
},
|
||||||
|
|
||||||
"ftp" => {
|
"ftp" => {
|
||||||
|
|
|
@ -2,26 +2,37 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
|
use crate::fetch::methods::{CancellationListener, Data, RangeRequestBounds};
|
||||||
|
use crossbeam_channel::Sender;
|
||||||
use embedder_traits::{EmbedderMsg, EmbedderProxy, FilterPattern};
|
use embedder_traits::{EmbedderMsg, EmbedderProxy, FilterPattern};
|
||||||
|
use headers_ext::{ContentLength, ContentType, HeaderMap, HeaderMapExt};
|
||||||
|
use http::header::{self, HeaderValue};
|
||||||
use ipc_channel::ipc::{self, IpcSender};
|
use ipc_channel::ipc::{self, IpcSender};
|
||||||
|
use mime::{self, Mime};
|
||||||
use mime_guess::guess_mime_type_opt;
|
use mime_guess::guess_mime_type_opt;
|
||||||
use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError};
|
use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError};
|
||||||
use net_traits::filemanager_thread::{FileManagerResult, FileManagerThreadMsg, FileOrigin};
|
use net_traits::filemanager_thread::{FileManagerResult, FileManagerThreadMsg, FileOrigin};
|
||||||
use net_traits::filemanager_thread::{
|
use net_traits::filemanager_thread::{
|
||||||
FileManagerThreadError, ReadFileProgress, RelativePos, SelectedFile,
|
FileManagerThreadError, ReadFileProgress, RelativePos, SelectedFile,
|
||||||
};
|
};
|
||||||
|
use net_traits::http_percent_encode;
|
||||||
|
use net_traits::response::{Response, ResponseBody};
|
||||||
|
use servo_arc::Arc as ServoArc;
|
||||||
use servo_config::prefs::PREFS;
|
use servo_config::prefs::PREFS;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{Read, Seek, SeekFrom};
|
use std::io::{BufRead, BufReader, Read, Seek, SeekFrom};
|
||||||
|
use std::mem;
|
||||||
use std::ops::Index;
|
use std::ops::Index;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
|
use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, Mutex, RwLock};
|
||||||
use std::thread;
|
use std::thread;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub const FILE_CHUNK_SIZE: usize = 32768; //32 KB
|
||||||
|
|
||||||
/// FileManagerStore's entry
|
/// FileManagerStore's entry
|
||||||
struct FileStoreEntry {
|
struct FileStoreEntry {
|
||||||
/// Origin of the entry's "creator"
|
/// Origin of the entry's "creator"
|
||||||
|
@ -91,6 +102,30 @@ impl FileManager {
|
||||||
.expect("Thread spawning failed");
|
.expect("Thread spawning failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Read a file for the Fetch implementation.
|
||||||
|
// It gets the required headers synchronously and reads the actual content
|
||||||
|
// in a separate thread.
|
||||||
|
pub fn fetch_file(
|
||||||
|
&self,
|
||||||
|
done_sender: &Sender<Data>,
|
||||||
|
cancellation_listener: Arc<Mutex<CancellationListener>>,
|
||||||
|
id: Uuid,
|
||||||
|
check_url_validity: bool,
|
||||||
|
origin: FileOrigin,
|
||||||
|
response: &mut Response,
|
||||||
|
range: RangeRequestBounds,
|
||||||
|
) -> Result<(), BlobURLStoreError> {
|
||||||
|
self.store.fetch_blob_buf(
|
||||||
|
done_sender,
|
||||||
|
cancellation_listener,
|
||||||
|
&id,
|
||||||
|
&origin,
|
||||||
|
range,
|
||||||
|
check_url_validity,
|
||||||
|
response,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn promote_memory(
|
pub fn promote_memory(
|
||||||
&self,
|
&self,
|
||||||
blob_buf: BlobBuf,
|
blob_buf: BlobBuf,
|
||||||
|
@ -452,7 +487,7 @@ impl FileManagerStore {
|
||||||
None => "".to_string(),
|
None => "".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
chunked_read(sender, &mut file, range.len(), opt_filename, type_string);
|
read_file_in_chunks(sender, &mut file, range.len(), opt_filename, type_string);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(BlobURLStoreError::InvalidEntry)
|
Err(BlobURLStoreError::InvalidEntry)
|
||||||
|
@ -489,6 +524,109 @@ impl FileManagerStore {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn fetch_blob_buf(
|
||||||
|
&self,
|
||||||
|
done_sender: &Sender<Data>,
|
||||||
|
cancellation_listener: Arc<Mutex<CancellationListener>>,
|
||||||
|
id: &Uuid,
|
||||||
|
origin_in: &FileOrigin,
|
||||||
|
range: RangeRequestBounds,
|
||||||
|
check_url_validity: bool,
|
||||||
|
response: &mut Response,
|
||||||
|
) -> Result<(), BlobURLStoreError> {
|
||||||
|
let file_impl = self.get_impl(id, origin_in, check_url_validity)?;
|
||||||
|
match file_impl {
|
||||||
|
FileImpl::Memory(buf) => {
|
||||||
|
let range = match range.get_final(Some(buf.size)) {
|
||||||
|
Ok(range) => range,
|
||||||
|
Err(_) => {
|
||||||
|
return Err(BlobURLStoreError::InvalidRange);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let range = range.to_abs_range(buf.size as usize);
|
||||||
|
let len = range.len() as u64;
|
||||||
|
|
||||||
|
set_headers(
|
||||||
|
&mut response.headers,
|
||||||
|
len,
|
||||||
|
buf.type_string.parse().unwrap_or(mime::TEXT_PLAIN),
|
||||||
|
/* filename */ None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut bytes = vec![];
|
||||||
|
bytes.extend_from_slice(buf.bytes.index(range));
|
||||||
|
|
||||||
|
let _ = done_sender.send(Data::Payload(bytes));
|
||||||
|
let _ = done_sender.send(Data::Done);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
FileImpl::MetaDataOnly(metadata) => {
|
||||||
|
/* XXX: Snapshot state check (optional) https://w3c.github.io/FileAPI/#snapshot-state.
|
||||||
|
Concretely, here we create another file, and this file might not
|
||||||
|
has the same underlying file state (meta-info plus content) as the time
|
||||||
|
create_entry is called.
|
||||||
|
*/
|
||||||
|
|
||||||
|
let file = File::open(&metadata.path)
|
||||||
|
.map_err(|e| BlobURLStoreError::External(e.to_string()))?;
|
||||||
|
|
||||||
|
let range = match range.get_final(Some(metadata.size)) {
|
||||||
|
Ok(range) => range,
|
||||||
|
Err(_) => {
|
||||||
|
return Err(BlobURLStoreError::InvalidRange);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut reader = BufReader::with_capacity(FILE_CHUNK_SIZE, file);
|
||||||
|
if reader.seek(SeekFrom::Start(range.start as u64)).is_err() {
|
||||||
|
return Err(BlobURLStoreError::External(
|
||||||
|
"Unexpected method for blob".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let filename = metadata
|
||||||
|
.path
|
||||||
|
.file_name()
|
||||||
|
.and_then(|osstr| osstr.to_str())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
|
set_headers(
|
||||||
|
&mut response.headers,
|
||||||
|
metadata.size,
|
||||||
|
guess_mime_type_opt(metadata.path).unwrap_or(mime::TEXT_PLAIN),
|
||||||
|
filename,
|
||||||
|
);
|
||||||
|
|
||||||
|
fetch_file_in_chunks(
|
||||||
|
done_sender.clone(),
|
||||||
|
reader,
|
||||||
|
response.body.clone(),
|
||||||
|
cancellation_listener,
|
||||||
|
range,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
FileImpl::Sliced(parent_id, inner_rel_pos) => {
|
||||||
|
// Next time we don't need to check validity since
|
||||||
|
// we have already done that for requesting URL if necessary.
|
||||||
|
return self.fetch_blob_buf(
|
||||||
|
done_sender,
|
||||||
|
cancellation_listener,
|
||||||
|
&parent_id,
|
||||||
|
origin_in,
|
||||||
|
RangeRequestBounds::Final(
|
||||||
|
RelativePos::full_range().slice_inner(&inner_rel_pos),
|
||||||
|
),
|
||||||
|
false,
|
||||||
|
response,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
|
fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
|
||||||
let (do_remove, opt_parent_id) = match self.entries.read().unwrap().get(id) {
|
let (do_remove, opt_parent_id) = match self.entries.read().unwrap().get(id) {
|
||||||
Some(entry) => {
|
Some(entry) => {
|
||||||
|
@ -609,9 +747,7 @@ fn select_files_pref_enabled() -> bool {
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
const CHUNK_SIZE: usize = 8192;
|
fn read_file_in_chunks(
|
||||||
|
|
||||||
fn chunked_read(
|
|
||||||
sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
|
sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
|
||||||
file: &mut File,
|
file: &mut File,
|
||||||
size: usize,
|
size: usize,
|
||||||
|
@ -619,7 +755,7 @@ fn chunked_read(
|
||||||
type_string: String,
|
type_string: String,
|
||||||
) {
|
) {
|
||||||
// First chunk
|
// First chunk
|
||||||
let mut buf = vec![0; CHUNK_SIZE];
|
let mut buf = vec![0; FILE_CHUNK_SIZE];
|
||||||
match file.read(&mut buf) {
|
match file.read(&mut buf) {
|
||||||
Ok(n) => {
|
Ok(n) => {
|
||||||
buf.truncate(n);
|
buf.truncate(n);
|
||||||
|
@ -639,7 +775,7 @@ fn chunked_read(
|
||||||
|
|
||||||
// Send the remaining chunks
|
// Send the remaining chunks
|
||||||
loop {
|
loop {
|
||||||
let mut buf = vec![0; CHUNK_SIZE];
|
let mut buf = vec![0; FILE_CHUNK_SIZE];
|
||||||
match file.read(&mut buf) {
|
match file.read(&mut buf) {
|
||||||
Ok(0) => {
|
Ok(0) => {
|
||||||
let _ = sender.send(Ok(ReadFileProgress::EOF));
|
let _ = sender.send(Ok(ReadFileProgress::EOF));
|
||||||
|
@ -656,3 +792,104 @@ fn chunked_read(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn fetch_file_in_chunks(
|
||||||
|
done_sender: Sender<Data>,
|
||||||
|
mut reader: BufReader<File>,
|
||||||
|
res_body: ServoArc<Mutex<ResponseBody>>,
|
||||||
|
cancellation_listener: Arc<Mutex<CancellationListener>>,
|
||||||
|
range: RelativePos,
|
||||||
|
) {
|
||||||
|
thread::Builder::new()
|
||||||
|
.name("fetch file worker thread".to_string())
|
||||||
|
.spawn(move || {
|
||||||
|
loop {
|
||||||
|
if cancellation_listener.lock().unwrap().cancelled() {
|
||||||
|
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
|
||||||
|
let _ = done_sender.send(Data::Cancelled);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let length = {
|
||||||
|
let buffer = reader.fill_buf().unwrap().to_vec();
|
||||||
|
let mut buffer_len = buffer.len();
|
||||||
|
if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap() {
|
||||||
|
let offset = usize::min(
|
||||||
|
{
|
||||||
|
if let Some(end) = range.end {
|
||||||
|
// HTTP Range requests are specified with closed ranges,
|
||||||
|
// while Rust uses half-open ranges. We add +1 here so
|
||||||
|
// we don't skip the last requested byte.
|
||||||
|
let remaining_bytes =
|
||||||
|
end as usize - range.start as usize - body.len() + 1;
|
||||||
|
if remaining_bytes <= FILE_CHUNK_SIZE {
|
||||||
|
// This is the last chunk so we set buffer
|
||||||
|
// len to 0 to break the reading loop.
|
||||||
|
buffer_len = 0;
|
||||||
|
remaining_bytes
|
||||||
|
} else {
|
||||||
|
FILE_CHUNK_SIZE
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
FILE_CHUNK_SIZE
|
||||||
|
}
|
||||||
|
},
|
||||||
|
buffer.len(),
|
||||||
|
);
|
||||||
|
let chunk = &buffer[0..offset];
|
||||||
|
body.extend_from_slice(chunk);
|
||||||
|
let _ = done_sender.send(Data::Payload(chunk.to_vec()));
|
||||||
|
}
|
||||||
|
buffer_len
|
||||||
|
};
|
||||||
|
if length == 0 {
|
||||||
|
let mut body = res_body.lock().unwrap();
|
||||||
|
let completed_body = match *body {
|
||||||
|
ResponseBody::Receiving(ref mut body) => mem::replace(body, vec![]),
|
||||||
|
_ => vec![],
|
||||||
|
};
|
||||||
|
*body = ResponseBody::Done(completed_body);
|
||||||
|
let _ = done_sender.send(Data::Done);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
reader.consume(length);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.expect("Failed to create fetch file worker thread");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_headers(headers: &mut HeaderMap, content_length: u64, mime: Mime, filename: Option<String>) {
|
||||||
|
headers.typed_insert(ContentLength(content_length));
|
||||||
|
headers.typed_insert(ContentType::from(mime.clone()));
|
||||||
|
let name = match filename {
|
||||||
|
Some(name) => name,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
let charset = mime.get_param(mime::CHARSET);
|
||||||
|
let charset = charset
|
||||||
|
.map(|c| c.as_ref().into())
|
||||||
|
.unwrap_or("us-ascii".to_owned());
|
||||||
|
// TODO(eijebong): Replace this once the typed header is there
|
||||||
|
// https://github.com/hyperium/headers/issues/8
|
||||||
|
headers.insert(
|
||||||
|
header::CONTENT_DISPOSITION,
|
||||||
|
HeaderValue::from_bytes(
|
||||||
|
format!(
|
||||||
|
"inline; {}",
|
||||||
|
if charset.to_lowercase() == "utf-8" {
|
||||||
|
format!(
|
||||||
|
"filename=\"{}\"",
|
||||||
|
String::from_utf8(name.as_bytes().into()).unwrap()
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"filename*=\"{}\"''{}",
|
||||||
|
charset,
|
||||||
|
http_percent_encode(name.as_bytes())
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -17,7 +17,6 @@ extern crate profile_traits;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
|
|
||||||
mod blob_loader;
|
|
||||||
pub mod connector;
|
pub mod connector;
|
||||||
pub mod cookie;
|
pub mod cookie;
|
||||||
pub mod cookie_storage;
|
pub mod cookie_storage;
|
||||||
|
|
|
@ -29,14 +29,15 @@ use mime::{self, Mime};
|
||||||
use msg::constellation_msg::TEST_PIPELINE_ID;
|
use msg::constellation_msg::TEST_PIPELINE_ID;
|
||||||
use net::connector::create_ssl_connector_builder;
|
use net::connector::create_ssl_connector_builder;
|
||||||
use net::fetch::cors_cache::CorsCache;
|
use net::fetch::cors_cache::CorsCache;
|
||||||
use net::fetch::methods::{CancellationListener, FetchContext};
|
use net::fetch::methods::{self, CancellationListener, FetchContext};
|
||||||
use net::filemanager_thread::FileManager;
|
use net::filemanager_thread::FileManager;
|
||||||
use net::hsts::HstsEntry;
|
use net::hsts::HstsEntry;
|
||||||
use net::test::HttpState;
|
use net::test::HttpState;
|
||||||
use net_traits::request::{Destination, Origin, RedirectMode, Referrer, Request, RequestMode};
|
use net_traits::request::{Destination, Origin, RedirectMode, Referrer, Request, RequestMode};
|
||||||
use net_traits::response::{CacheState, Response, ResponseBody, ResponseType};
|
use net_traits::response::{CacheState, Response, ResponseBody, ResponseType};
|
||||||
use net_traits::{
|
use net_traits::{
|
||||||
IncludeSubdomains, NetworkError, ReferrerPolicy, ResourceFetchTiming, ResourceTimingType,
|
FetchTaskTarget, IncludeSubdomains, NetworkError, ReferrerPolicy, ResourceFetchTiming,
|
||||||
|
ResourceTimingType,
|
||||||
};
|
};
|
||||||
use servo_url::{ImmutableOrigin, ServoUrl};
|
use servo_url::{ImmutableOrigin, ServoUrl};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
@ -127,7 +128,27 @@ fn test_fetch_blob() {
|
||||||
use ipc_channel::ipc;
|
use ipc_channel::ipc;
|
||||||
use net_traits::blob_url_store::BlobBuf;
|
use net_traits::blob_url_store::BlobBuf;
|
||||||
|
|
||||||
let mut context = new_fetch_context(None, None);
|
struct FetchResponseCollector {
|
||||||
|
sender: Sender<Response>,
|
||||||
|
buffer: Vec<u8>,
|
||||||
|
expected: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FetchTaskTarget for FetchResponseCollector {
|
||||||
|
fn process_request_body(&mut self, _: &Request) {}
|
||||||
|
fn process_request_eof(&mut self, _: &Request) {}
|
||||||
|
fn process_response(&mut self, _: &Response) {}
|
||||||
|
fn process_response_chunk(&mut self, chunk: Vec<u8>) {
|
||||||
|
self.buffer.extend_from_slice(chunk.as_slice());
|
||||||
|
}
|
||||||
|
/// Fired when the response is fully fetched
|
||||||
|
fn process_response_eof(&mut self, response: &Response) {
|
||||||
|
assert_eq!(self.buffer, self.expected);
|
||||||
|
let _ = self.sender.send(response.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let context = new_fetch_context(None, None);
|
||||||
|
|
||||||
let bytes = b"content";
|
let bytes = b"content";
|
||||||
let blob_buf = BlobBuf {
|
let blob_buf = BlobBuf {
|
||||||
|
@ -147,7 +168,18 @@ fn test_fetch_blob() {
|
||||||
let url = ServoUrl::parse(&format!("blob:{}{}", origin.as_str(), id.to_simple())).unwrap();
|
let url = ServoUrl::parse(&format!("blob:{}{}", origin.as_str(), id.to_simple())).unwrap();
|
||||||
|
|
||||||
let mut request = Request::new(url, Some(Origin::Origin(origin.origin())), None);
|
let mut request = Request::new(url, Some(Origin::Origin(origin.origin())), None);
|
||||||
let fetch_response = fetch_with_context(&mut request, &mut context);
|
|
||||||
|
let (sender, receiver) = unbounded();
|
||||||
|
|
||||||
|
let mut target = FetchResponseCollector {
|
||||||
|
sender,
|
||||||
|
buffer: vec![],
|
||||||
|
expected: bytes.to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
methods::fetch(&mut request, &mut target, &context);
|
||||||
|
|
||||||
|
let fetch_response = receiver.recv().unwrap();
|
||||||
|
|
||||||
assert!(!fetch_response.is_network_error());
|
assert!(!fetch_response.is_network_error());
|
||||||
|
|
||||||
|
@ -165,7 +197,7 @@ fn test_fetch_blob() {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
*fetch_response.body.lock().unwrap(),
|
*fetch_response.body.lock().unwrap(),
|
||||||
ResponseBody::Done(bytes.to_vec())
|
ResponseBody::Receiving(vec![])
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// Errors returned to Blob URL Store request
|
/// Errors returned to Blob URL Store request
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||||
pub enum BlobURLStoreError {
|
pub enum BlobURLStoreError {
|
||||||
/// Invalid File UUID
|
/// Invalid File UUID
|
||||||
InvalidFileID,
|
InvalidFileID,
|
||||||
|
@ -17,6 +17,8 @@ pub enum BlobURLStoreError {
|
||||||
InvalidOrigin,
|
InvalidOrigin,
|
||||||
/// Invalid entry content
|
/// Invalid entry content
|
||||||
InvalidEntry,
|
InvalidEntry,
|
||||||
|
/// Invalid range
|
||||||
|
InvalidRange,
|
||||||
/// External error, from like file system, I/O etc.
|
/// External error, from like file system, I/O etc.
|
||||||
External(String),
|
External(String),
|
||||||
}
|
}
|
||||||
|
|
|
@ -14270,6 +14270,12 @@
|
||||||
{}
|
{}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
|
"mozilla/range_request_blob_url.html": [
|
||||||
|
[
|
||||||
|
"/_mozilla/mozilla/range_request_blob_url.html",
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
],
|
||||||
"mozilla/range_request_file_url.html": [
|
"mozilla/range_request_file_url.html": [
|
||||||
[
|
[
|
||||||
"/_mozilla/mozilla/range_request_file_url.html",
|
"/_mozilla/mozilla/range_request_file_url.html",
|
||||||
|
@ -27239,8 +27245,12 @@
|
||||||
"8de03455bcb0d18258f76af20f58c14868fe1c21",
|
"8de03455bcb0d18258f76af20f58c14868fe1c21",
|
||||||
"testharness"
|
"testharness"
|
||||||
],
|
],
|
||||||
|
"mozilla/range_request_blob_url.html": [
|
||||||
|
"075397620e989dafc814c0ed2bca46bd476bccf6",
|
||||||
|
"testharness"
|
||||||
|
],
|
||||||
"mozilla/range_request_file_url.html": [
|
"mozilla/range_request_file_url.html": [
|
||||||
"65fe13fe93d97cebc2846ff7d7deab3eb84c1787",
|
"4fd4ddc8b1a9959e90b243795267c220d6a05f5e",
|
||||||
"testharness"
|
"testharness"
|
||||||
],
|
],
|
||||||
"mozilla/referrer-policy/OWNERS": [
|
"mozilla/referrer-policy/OWNERS": [
|
||||||
|
|
60
tests/wpt/mozilla/tests/mozilla/range_request_blob_url.html
Normal file
60
tests/wpt/mozilla/tests/mozilla/range_request_blob_url.html
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<script src="/resources/testharness.js"></script>
|
||||||
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
|
<script>
|
||||||
|
[{
|
||||||
|
range: "bytes=0-",
|
||||||
|
status: 206,
|
||||||
|
expected: "abcdefghijklmnopqrstuvwxyz"
|
||||||
|
}, {
|
||||||
|
range: "bytes=0-9",
|
||||||
|
status: 206,
|
||||||
|
expected: "abcdefghi"
|
||||||
|
}, {
|
||||||
|
range: "bytes=1-9",
|
||||||
|
status: 206,
|
||||||
|
expected: "bcdefghi"
|
||||||
|
}, {
|
||||||
|
range: "bytes=-10",
|
||||||
|
status: 206,
|
||||||
|
expected: "qrstuvwxyz"
|
||||||
|
}, {
|
||||||
|
range: "bytes=0-100",
|
||||||
|
status: 206,
|
||||||
|
expected: "abcdefghijklmnopqrstuvwxyz"
|
||||||
|
}, {
|
||||||
|
range: "bytes=100-",
|
||||||
|
status: 416,
|
||||||
|
expected: ""
|
||||||
|
}, {
|
||||||
|
range: "bytes=-100",
|
||||||
|
status: 206,
|
||||||
|
expected: "abcdefghijklmnopqrstuvwxyz"
|
||||||
|
}].forEach(test => {
|
||||||
|
promise_test(function() {
|
||||||
|
const abc = "abcdefghijklmnopqrstuvwxyz";
|
||||||
|
const blob = new Blob([abc], { "type": "text/plain" });
|
||||||
|
return fetch(URL.createObjectURL(blob), {
|
||||||
|
headers: {
|
||||||
|
"Range": test.range
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert_equals(response.status, test.status);
|
||||||
|
if (response.status != 206) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
return response.text();
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert_equals(response, test.expected);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
|
@ -3,6 +3,10 @@
|
||||||
<script src="/resources/testharness.js"></script>
|
<script src="/resources/testharness.js"></script>
|
||||||
<script src="/resources/testharnessreport.js"></script>
|
<script src="/resources/testharnessreport.js"></script>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
|
// FIXME(ferjm) https://github.com/servo/servo/issues/22269
|
||||||
|
// This is not using file:// urls.
|
||||||
|
|
||||||
[{
|
[{
|
||||||
file: "resources/range_small.txt",
|
file: "resources/range_small.txt",
|
||||||
range: "bytes=0-",
|
range: "bytes=0-",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue