mirror of
https://github.com/servo/servo.git
synced 2025-07-23 07:13:52 +01:00
Upgrade Hyper
This commit is contained in:
parent
5df705a41f
commit
a48a111cee
42 changed files with 872 additions and 891 deletions
859
Cargo.lock
generated
859
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -22,7 +22,8 @@ embedder_traits = { path = "../embedder_traits" }
|
||||||
euclid = "0.20"
|
euclid = "0.20"
|
||||||
gfx = { path = "../gfx" }
|
gfx = { path = "../gfx" }
|
||||||
gfx_traits = { path = "../gfx_traits" }
|
gfx_traits = { path = "../gfx_traits" }
|
||||||
http = "0.1"
|
http = "0.2"
|
||||||
|
headers = "0.3"
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
keyboard-types = "0.5"
|
keyboard-types = "0.5"
|
||||||
layout_traits = { path = "../layout_traits" }
|
layout_traits = { path = "../layout_traits" }
|
||||||
|
|
|
@ -14,9 +14,8 @@ path = "lib.rs"
|
||||||
crossbeam-channel = "0.4"
|
crossbeam-channel = "0.4"
|
||||||
devtools_traits = { path = "../devtools_traits" }
|
devtools_traits = { path = "../devtools_traits" }
|
||||||
embedder_traits = { path = "../embedder_traits" }
|
embedder_traits = { path = "../embedder_traits" }
|
||||||
headers = "0.2"
|
headers = "0.3"
|
||||||
http = "0.1"
|
http = "0.2"
|
||||||
hyper = "0.12"
|
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
msg = { path = "../msg" }
|
msg = { path = "../msg" }
|
||||||
|
|
|
@ -12,8 +12,7 @@ use crate::StreamId;
|
||||||
use devtools_traits::HttpRequest as DevtoolsHttpRequest;
|
use devtools_traits::HttpRequest as DevtoolsHttpRequest;
|
||||||
use devtools_traits::HttpResponse as DevtoolsHttpResponse;
|
use devtools_traits::HttpResponse as DevtoolsHttpResponse;
|
||||||
use headers::{ContentType, Cookie, HeaderMapExt};
|
use headers::{ContentType, Cookie, HeaderMapExt};
|
||||||
use http::{header, HeaderMap};
|
use http::{header, HeaderMap, Method, StatusCode};
|
||||||
use hyper::{Method, StatusCode};
|
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use std::net::TcpStream;
|
use std::net::TcpStream;
|
||||||
use time::Tm;
|
use time::Tm;
|
||||||
|
@ -343,6 +342,7 @@ impl NetworkEventActor {
|
||||||
|
|
||||||
pub fn add_request(&mut self, request: DevtoolsHttpRequest) {
|
pub fn add_request(&mut self, request: DevtoolsHttpRequest) {
|
||||||
self.request.url = request.url.as_str().to_owned();
|
self.request.url = request.url.as_str().to_owned();
|
||||||
|
|
||||||
self.request.method = request.method.clone();
|
self.request.method = request.method.clone();
|
||||||
self.request.headers = request.headers.clone();
|
self.request.headers = request.headers.clone();
|
||||||
self.request.body = request.body;
|
self.request.body = request.body;
|
||||||
|
|
|
@ -12,7 +12,8 @@ path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bitflags = "1.0"
|
bitflags = "1.0"
|
||||||
http = "0.1"
|
headers = "0.3"
|
||||||
|
http = "0.2"
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
malloc_size_of = { path = "../malloc_size_of" }
|
malloc_size_of = { path = "../malloc_size_of" }
|
||||||
malloc_size_of_derive = "0.1"
|
malloc_size_of_derive = "0.1"
|
||||||
|
|
|
@ -18,8 +18,8 @@ extern crate malloc_size_of_derive;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
|
|
||||||
use http::method::Method;
|
|
||||||
use http::HeaderMap;
|
use http::HeaderMap;
|
||||||
|
use http::Method;
|
||||||
use ipc_channel::ipc::IpcSender;
|
use ipc_channel::ipc::IpcSender;
|
||||||
use msg::constellation_msg::{BrowsingContextId, PipelineId};
|
use msg::constellation_msg::{BrowsingContextId, PipelineId};
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
|
|
|
@ -13,7 +13,7 @@ servo = [
|
||||||
"accountable-refcell",
|
"accountable-refcell",
|
||||||
"content-security-policy",
|
"content-security-policy",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
"hyper",
|
"http",
|
||||||
"hyper_serde",
|
"hyper_serde",
|
||||||
"keyboard-types",
|
"keyboard-types",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -34,8 +34,8 @@ crossbeam-channel = { version = "0.4", optional = true }
|
||||||
cssparser = "0.29"
|
cssparser = "0.29"
|
||||||
euclid = "0.20"
|
euclid = "0.20"
|
||||||
hashglobe = { path = "../hashglobe" }
|
hashglobe = { path = "../hashglobe" }
|
||||||
hyper = { version = "0.12", optional = true }
|
http = { version = "0.2", optional = true }
|
||||||
hyper_serde = { version = "0.11", optional = true }
|
hyper_serde = { version = "0.12", optional = true }
|
||||||
keyboard-types = { version = "0.5", optional = true }
|
keyboard-types = { version = "0.5", optional = true }
|
||||||
selectors = { path = "../selectors" }
|
selectors = { path = "../selectors" }
|
||||||
serde = { version = "1.0.27", optional = true }
|
serde = { version = "1.0.27", optional = true }
|
||||||
|
@ -46,7 +46,7 @@ smallvec = "1.0"
|
||||||
string_cache = { version = "0.8", optional = true }
|
string_cache = { version = "0.8", optional = true }
|
||||||
thin-slice = "0.1.0"
|
thin-slice = "0.1.0"
|
||||||
time = { version = "0.1.41", optional = true }
|
time = { version = "0.1.41", optional = true }
|
||||||
tokio = "0.2"
|
tokio = "1"
|
||||||
url = { version = "2.0", optional = true }
|
url = { version = "2.0", optional = true }
|
||||||
uuid = { version = "0.8", features = ["v4"], optional = true }
|
uuid = { version = "0.8", features = ["v4"], optional = true }
|
||||||
void = "1.0.2"
|
void = "1.0.2"
|
||||||
|
|
|
@ -57,7 +57,7 @@ extern crate cssparser;
|
||||||
extern crate euclid;
|
extern crate euclid;
|
||||||
extern crate hashglobe;
|
extern crate hashglobe;
|
||||||
#[cfg(feature = "servo")]
|
#[cfg(feature = "servo")]
|
||||||
extern crate hyper;
|
extern crate http;
|
||||||
#[cfg(feature = "servo")]
|
#[cfg(feature = "servo")]
|
||||||
extern crate hyper_serde;
|
extern crate hyper_serde;
|
||||||
#[cfg(feature = "servo")]
|
#[cfg(feature = "servo")]
|
||||||
|
@ -957,7 +957,7 @@ impl<T> MallocSizeOf for tokio::sync::mpsc::UnboundedSender<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "servo")]
|
#[cfg(feature = "servo")]
|
||||||
impl MallocSizeOf for hyper::StatusCode {
|
impl MallocSizeOf for http::StatusCode {
|
||||||
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ async-recursion = "0.3.2"
|
||||||
async-tungstenite = { version = "0.9", features = ["tokio-openssl"] }
|
async-tungstenite = { version = "0.9", features = ["tokio-openssl"] }
|
||||||
base64 = "0.10.1"
|
base64 = "0.10.1"
|
||||||
brotli = "3"
|
brotli = "3"
|
||||||
bytes = "0.4"
|
bytes = "1"
|
||||||
content-security-policy = { version = "0.4.0", features = ["serde"] }
|
content-security-policy = { version = "0.4.0", features = ["serde"] }
|
||||||
cookie_rs = { package = "cookie", version = "0.11" }
|
cookie_rs = { package = "cookie", version = "0.11" }
|
||||||
crossbeam-channel = "0.4"
|
crossbeam-channel = "0.4"
|
||||||
|
@ -27,14 +27,13 @@ data-url = "0.1.0"
|
||||||
devtools_traits = { path = "../devtools_traits" }
|
devtools_traits = { path = "../devtools_traits" }
|
||||||
embedder_traits = { path = "../embedder_traits" }
|
embedder_traits = { path = "../embedder_traits" }
|
||||||
flate2 = "1"
|
flate2 = "1"
|
||||||
futures = "0.1"
|
futures = { version = "0.3", package = "futures" }
|
||||||
futures03 = { version = "0.3", package = "futures" }
|
futures-util = { version = "0.3" }
|
||||||
futures-util = { version = "0.3", features = ["compat"] }
|
headers = "0.3"
|
||||||
headers = "0.2"
|
http = "0.2"
|
||||||
http = "0.1"
|
hyper = { version = "0.14", features = ["client", "http1", "http2", "tcp", "stream"] }
|
||||||
hyper = "0.12"
|
hyper-openssl = "0.9.1"
|
||||||
hyper-openssl = "0.7"
|
hyper_serde = "0.12"
|
||||||
hyper_serde = "0.11"
|
|
||||||
immeta = "0.4"
|
immeta = "0.4"
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
lazy_static = "1"
|
lazy_static = "1"
|
||||||
|
@ -59,19 +58,21 @@ servo_arc = { path = "../servo_arc" }
|
||||||
servo_config = { path = "../config" }
|
servo_config = { path = "../config" }
|
||||||
servo_url = { path = "../url" }
|
servo_url = { path = "../url" }
|
||||||
time = "0.1.41"
|
time = "0.1.41"
|
||||||
tokio = "0.1"
|
tokio = { version = "1", package = "tokio", features = ["sync", "macros", "rt-multi-thread"] }
|
||||||
tokio2 = { version = "0.2", package = "tokio", features = ["sync", "macros", "rt-threaded", "tcp"] }
|
tokio2 = { version = "0.2", package = "tokio", features = ["sync", "macros", "rt-threaded", "tcp"] }
|
||||||
tokio-compat = "0.1"
|
tokio-stream = "0.1"
|
||||||
tungstenite = "0.11"
|
tungstenite = "0.11"
|
||||||
url = "2.0"
|
url = "2.0"
|
||||||
uuid = { version = "0.8", features = ["v4"] }
|
uuid = { version = "0.8", features = ["v4"] }
|
||||||
webrender_api = { git = "https://github.com/servo/webrender" }
|
webrender_api = { git = "https://github.com/servo/webrender" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
futures = "0.1"
|
futures = {version = "0.3", features = ["compat"]}
|
||||||
std_test_override = { path = "../std_test_override" }
|
std_test_override = { path = "../std_test_override" }
|
||||||
tokio-openssl = "0.3"
|
tokio-openssl = "0.6"
|
||||||
tokio-test = "0.2"
|
tokio-test = "0.4"
|
||||||
|
tokio-stream = { version = "0.1", features = ["net"] }
|
||||||
|
hyper = { version = "0.14", features = ["full"] }
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "main"
|
name = "main"
|
||||||
|
|
|
@ -3,10 +3,12 @@
|
||||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
use crate::hosts::replace_host;
|
use crate::hosts::replace_host;
|
||||||
use hyper::client::connect::{Connect, Destination};
|
use crate::http_loader::HANDLE;
|
||||||
|
use futures::{task::Context, task::Poll, Future};
|
||||||
|
use http::uri::{Authority, Uri as Destination};
|
||||||
use hyper::client::HttpConnector as HyperHttpConnector;
|
use hyper::client::HttpConnector as HyperHttpConnector;
|
||||||
use hyper::rt::Future;
|
use hyper::rt::Executor;
|
||||||
use hyper::{Body, Client};
|
use hyper::{service::Service, Body, Client};
|
||||||
use hyper_openssl::HttpsConnector;
|
use hyper_openssl::HttpsConnector;
|
||||||
use openssl::ex_data::Index;
|
use openssl::ex_data::Index;
|
||||||
use openssl::ssl::{
|
use openssl::ssl::{
|
||||||
|
@ -15,7 +17,6 @@ use openssl::ssl::{
|
||||||
use openssl::x509::{self, X509StoreContext};
|
use openssl::x509::{self, X509StoreContext};
|
||||||
use std::collections::hash_map::{Entry, HashMap};
|
use std::collections::hash_map::{Entry, HashMap};
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use tokio::prelude::future::Executor;
|
|
||||||
|
|
||||||
pub const BUF_SIZE: usize = 32768;
|
pub const BUF_SIZE: usize = 32768;
|
||||||
pub const ALPN_H2_H1: &'static [u8] = b"\x02h2\x08http/1.1";
|
pub const ALPN_H2_H1: &'static [u8] = b"\x02h2\x08http/1.1";
|
||||||
|
@ -67,30 +68,53 @@ impl ConnectionCerts {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct HttpConnector {
|
pub struct HttpConnector {
|
||||||
inner: HyperHttpConnector,
|
inner: HyperHttpConnector,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HttpConnector {
|
impl HttpConnector {
|
||||||
fn new() -> HttpConnector {
|
fn new() -> HttpConnector {
|
||||||
let mut inner = HyperHttpConnector::new(4);
|
let mut inner = HyperHttpConnector::new();
|
||||||
inner.enforce_http(false);
|
inner.enforce_http(false);
|
||||||
inner.set_happy_eyeballs_timeout(None);
|
inner.set_happy_eyeballs_timeout(None);
|
||||||
HttpConnector { inner }
|
HttpConnector { inner }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Connect for HttpConnector {
|
impl Service<Destination> for HttpConnector {
|
||||||
type Transport = <HyperHttpConnector as Connect>::Transport;
|
type Response = <HyperHttpConnector as Service<Destination>>::Response;
|
||||||
type Error = <HyperHttpConnector as Connect>::Error;
|
type Error = <HyperHttpConnector as Service<Destination>>::Error;
|
||||||
type Future = <HyperHttpConnector as Connect>::Future;
|
type Future = <HyperHttpConnector as Service<Destination>>::Future;
|
||||||
|
|
||||||
fn connect(&self, dest: Destination) -> Self::Future {
|
fn call(&mut self, dest: Destination) -> Self::Future {
|
||||||
// Perform host replacement when making the actual TCP connection.
|
// Perform host replacement when making the actual TCP connection.
|
||||||
let mut new_dest = dest.clone();
|
let mut new_dest = dest.clone();
|
||||||
let addr = replace_host(dest.host());
|
let mut parts = dest.into_parts();
|
||||||
new_dest.set_host(&*addr).unwrap();
|
|
||||||
self.inner.connect(new_dest)
|
if let Some(auth) = parts.authority {
|
||||||
|
let host = auth.host();
|
||||||
|
let host = replace_host(host);
|
||||||
|
|
||||||
|
let authority = if let Some(port) = auth.port() {
|
||||||
|
format!("{}:{}", host, port.as_str())
|
||||||
|
} else {
|
||||||
|
format!("{}", &*host)
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Ok(authority) = Authority::from_maybe_shared(authority) {
|
||||||
|
parts.authority = Some(authority);
|
||||||
|
if let Ok(dest) = Destination::from_parts(parts) {
|
||||||
|
new_dest = dest
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.inner.call(new_dest)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||||
|
Ok(()).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -209,18 +233,28 @@ pub fn create_tls_config(
|
||||||
cfg
|
cfg
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_http_client<E>(tls_config: TlsConfig, executor: E) -> Client<Connector, Body>
|
struct TokioExecutor {}
|
||||||
|
|
||||||
|
impl<F> Executor<F> for TokioExecutor
|
||||||
where
|
where
|
||||||
E: Executor<Box<dyn Future<Error = (), Item = ()> + Send + 'static>> + Sync + Send + 'static,
|
F: Future<Output = ()> + 'static + std::marker::Send,
|
||||||
{
|
{
|
||||||
|
fn execute(&self, fut: F) {
|
||||||
|
HANDLE.lock().unwrap().as_ref().unwrap().spawn(fut);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_http_client(tls_config: TlsConfig) -> Client<Connector, Body> {
|
||||||
let mut connector = HttpsConnector::with_connector(HttpConnector::new(), tls_config).unwrap();
|
let mut connector = HttpsConnector::with_connector(HttpConnector::new(), tls_config).unwrap();
|
||||||
connector.set_callback(|configuration, destination| {
|
connector.set_callback(|configuration, destination| {
|
||||||
configuration.set_ex_data(*HOST_INDEX, Host(destination.host().to_owned()));
|
if let Some(host) = destination.host() {
|
||||||
|
configuration.set_ex_data(*HOST_INDEX, Host(host.to_owned()));
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
});
|
});
|
||||||
|
|
||||||
Client::builder()
|
Client::builder()
|
||||||
.http1_title_case_headers(true)
|
.http1_title_case_headers(true)
|
||||||
.executor(executor)
|
.executor(TokioExecutor {})
|
||||||
.build(connector)
|
.build(connector)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
//! Implementation of cookie creation and matching as specified by
|
//! Implementation of cookie creation and matching as specified by
|
||||||
//! http://tools.ietf.org/html/rfc6265
|
//! http://tools.ietf.org/html/rfc6265
|
||||||
|
|
||||||
use hyper_serde::{self, Serde};
|
use hyper_serde::Serde;
|
||||||
use net_traits::pub_domains::is_pub_domain;
|
use net_traits::pub_domains::is_pub_domain;
|
||||||
use net_traits::CookieSource;
|
use net_traits::CookieSource;
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
|
|
|
@ -159,14 +159,14 @@ impl CookieStorage {
|
||||||
// http://tools.ietf.org/html/rfc6265#section-5.4
|
// http://tools.ietf.org/html/rfc6265#section-5.4
|
||||||
pub fn cookies_for_url(&mut self, url: &ServoUrl, source: CookieSource) -> Option<String> {
|
pub fn cookies_for_url(&mut self, url: &ServoUrl, source: CookieSource) -> Option<String> {
|
||||||
let filterer = |c: &&mut Cookie| -> bool {
|
let filterer = |c: &&mut Cookie| -> bool {
|
||||||
info!(
|
debug!(
|
||||||
" === SENT COOKIE : {} {} {:?} {:?}",
|
" === SENT COOKIE : {} {} {:?} {:?}",
|
||||||
c.cookie.name(),
|
c.cookie.name(),
|
||||||
c.cookie.value(),
|
c.cookie.value(),
|
||||||
c.cookie.domain(),
|
c.cookie.domain(),
|
||||||
c.cookie.path()
|
c.cookie.path()
|
||||||
);
|
);
|
||||||
info!(
|
debug!(
|
||||||
" === SENT COOKIE RESULT {}",
|
" === SENT COOKIE RESULT {}",
|
||||||
c.appropriate_for_url(url, source)
|
c.appropriate_for_url(url, source)
|
||||||
);
|
);
|
||||||
|
|
|
@ -29,20 +29,24 @@ The following types directly support the gzip compression case:
|
||||||
|
|
||||||
use crate::connector::BUF_SIZE;
|
use crate::connector::BUF_SIZE;
|
||||||
use brotli::Decompressor;
|
use brotli::Decompressor;
|
||||||
use bytes::{Buf, BufMut, BytesMut};
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
|
||||||
use flate2::read::DeflateDecoder;
|
use flate2::read::DeflateDecoder;
|
||||||
use futures::{Async, Future, Poll, Stream};
|
use futures::{task::Context, task::Poll, Future, Stream};
|
||||||
use hyper::header::{HeaderValue, CONTENT_ENCODING, TRANSFER_ENCODING};
|
use hyper::header::{HeaderValue, CONTENT_ENCODING, TRANSFER_ENCODING};
|
||||||
use hyper::{self, Body, Chunk, Response};
|
use hyper::{self, Body, Response};
|
||||||
use libflate::non_blocking::gzip;
|
use libflate::non_blocking::gzip;
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::io::{self, Read};
|
use std::io::{self, Read};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use std::task::Waker;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
Hyper(hyper::error::Error),
|
Hyper(hyper::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<io::Error> for Error {
|
impl From<io::Error> for Error {
|
||||||
|
@ -51,8 +55,8 @@ impl From<io::Error> for Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<hyper::error::Error> for Error {
|
impl From<hyper::Error> for Error {
|
||||||
fn from(err: hyper::error::Error) -> Error {
|
fn from(err: hyper::Error) -> Error {
|
||||||
Error::Hyper(err)
|
Error::Hyper(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -93,10 +97,11 @@ struct Pending {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A gzip decoder that reads from a `libflate::gzip::Decoder` into a `BytesMut` and emits the results
|
/// A gzip decoder that reads from a `libflate::gzip::Decoder` into a `BytesMut` and emits the results
|
||||||
/// as a `Chunk`.
|
/// as a `Bytes`.
|
||||||
struct Gzip {
|
struct Gzip {
|
||||||
inner: Box<gzip::Decoder<Peeked<ReadableChunks<Body>>>>,
|
inner: Box<gzip::Decoder<Peeked<ReadableChunks<Body>>>>,
|
||||||
buf: BytesMut,
|
buf: BytesMut,
|
||||||
|
reader: Arc<Mutex<ReadableChunks<Body>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Decoder {
|
impl fmt::Debug for Decoder {
|
||||||
|
@ -162,37 +167,36 @@ impl Decoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Stream for Decoder {
|
impl Stream for Decoder {
|
||||||
type Item = Chunk;
|
type Item = Result<Bytes, Error>;
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
// Do a read or poll for a pending decoder value.
|
// Do a read or poll for a pending decoder value.
|
||||||
let new_value = match self.inner {
|
let new_value = match self.inner {
|
||||||
Inner::Pending(ref mut future) => match future.poll() {
|
Inner::Pending(ref mut future) => match Pin::new(future).poll(cx) {
|
||||||
Ok(Async::Ready(inner)) => inner,
|
Poll::Ready(inner) => inner,
|
||||||
Ok(Async::NotReady) => return Ok(Async::NotReady),
|
Poll::Pending => return Poll::Pending,
|
||||||
Err(e) => return Err(e.into()),
|
|
||||||
},
|
},
|
||||||
Inner::PlainText(ref mut body) => return body.poll().map_err(|e| e.into()),
|
Inner::PlainText(ref mut body) => {
|
||||||
Inner::Gzip(ref mut decoder) => return decoder.poll(),
|
return Pin::new(body).poll_next(cx).map_err(|e| e.into())
|
||||||
Inner::Brotli(ref mut decoder) => return decoder.poll(),
|
},
|
||||||
Inner::Deflate(ref mut decoder) => return decoder.poll(),
|
Inner::Gzip(ref mut decoder) => return Pin::new(decoder).poll_next(cx),
|
||||||
|
Inner::Brotli(ref mut decoder) => return Pin::new(decoder).poll_next(cx),
|
||||||
|
Inner::Deflate(ref mut decoder) => return Pin::new(decoder).poll_next(cx),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
//
|
||||||
self.inner = new_value;
|
self.inner = new_value;
|
||||||
self.poll()
|
self.poll_next(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Future for Pending {
|
impl Future for Pending {
|
||||||
type Item = Inner;
|
type Output = Inner;
|
||||||
type Error = hyper::error::Error;
|
|
||||||
|
|
||||||
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
let body_state = match self.body.poll_stream() {
|
let body_state = match self.body.poll_stream(cx) {
|
||||||
Ok(Async::Ready(state)) => state,
|
Poll::Ready(state) => state,
|
||||||
Ok(Async::NotReady) => return Ok(Async::NotReady),
|
Poll::Pending => return Poll::Pending,
|
||||||
Err(e) => return Err(e),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let body = mem::replace(&mut self.body, ReadableChunks::new(Body::empty()));
|
let body = mem::replace(&mut self.body, ReadableChunks::new(Body::empty()));
|
||||||
|
@ -200,110 +204,133 @@ impl Future for Pending {
|
||||||
// if the stream was empty, or truly had an UnexpectedEof.
|
// if the stream was empty, or truly had an UnexpectedEof.
|
||||||
// Therefore, we need to check for EOF first.
|
// Therefore, we need to check for EOF first.
|
||||||
match body_state {
|
match body_state {
|
||||||
StreamState::Eof => Ok(Async::Ready(Inner::PlainText(Body::empty()))),
|
StreamState::Eof => Poll::Ready(Inner::PlainText(Body::empty())),
|
||||||
StreamState::HasMore => Ok(Async::Ready(match self.type_ {
|
StreamState::HasMore => Poll::Ready(match self.type_ {
|
||||||
DecoderType::Gzip => Inner::Gzip(Gzip::new(body)),
|
DecoderType::Gzip => Inner::Gzip(Gzip::new(body)),
|
||||||
DecoderType::Brotli => Inner::Brotli(Brotli::new(body)),
|
DecoderType::Brotli => Inner::Brotli(Brotli::new(body)),
|
||||||
DecoderType::Deflate => Inner::Deflate(Deflate::new(body)),
|
DecoderType::Deflate => Inner::Deflate(Deflate::new(body)),
|
||||||
})),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Gzip {
|
impl Gzip {
|
||||||
fn new(stream: ReadableChunks<Body>) -> Self {
|
fn new(stream: ReadableChunks<Body>) -> Self {
|
||||||
|
let stream = Arc::new(Mutex::new(stream));
|
||||||
|
let reader = stream.clone();
|
||||||
Gzip {
|
Gzip {
|
||||||
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
|
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
|
||||||
inner: Box::new(gzip::Decoder::new(Peeked::new(stream))),
|
inner: Box::new(gzip::Decoder::new(Peeked::new(stream))),
|
||||||
|
reader: reader,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unsafe_code)]
|
#[allow(unsafe_code)]
|
||||||
fn poll_with_read(reader: &mut dyn Read, buf: &mut BytesMut) -> Poll<Option<Chunk>, Error> {
|
fn poll_with_read(reader: &mut dyn Read, buf: &mut BytesMut) -> Poll<Option<Result<Bytes, Error>>> {
|
||||||
if buf.remaining_mut() == 0 {
|
// Ensure a full size buffer is available.
|
||||||
buf.reserve(INIT_BUFFER_SIZE);
|
// `reserve` is optimized to reclaim space over allocating.
|
||||||
}
|
buf.reserve(INIT_BUFFER_SIZE);
|
||||||
|
|
||||||
// The buffer contains uninitialised memory so getting a readable slice is unsafe.
|
// The buffer contains uninitialised memory so getting a readable slice is unsafe.
|
||||||
// We trust the reader not to read from the memory given.
|
// We trust the reader not to read from the memory given.
|
||||||
//
|
//
|
||||||
// To be safe, this memory could be zeroed before passing to the reader.
|
// To be safe, this memory could be zeroed before passing to the reader.
|
||||||
// Otherwise we might need to deal with the case where the reader panics.
|
// Otherwise we might need to deal with the case where the reader panics.
|
||||||
|
|
||||||
let read = {
|
let read = {
|
||||||
let mut buf = unsafe { buf.bytes_mut() };
|
let buf = unsafe {
|
||||||
reader.read(&mut buf)
|
let ptr = buf.chunk_mut().as_mut_ptr();
|
||||||
|
std::slice::from_raw_parts_mut(ptr, buf.capacity())
|
||||||
|
};
|
||||||
|
reader.read(&mut *buf)
|
||||||
};
|
};
|
||||||
|
|
||||||
match read {
|
match read {
|
||||||
Ok(read) if read == 0 => Ok(Async::Ready(None)),
|
Ok(read) if read == 0 => Poll::Ready(None),
|
||||||
Ok(read) => {
|
Ok(read) => {
|
||||||
unsafe { buf.advance_mut(read) };
|
unsafe { buf.advance_mut(read) };
|
||||||
let chunk = Chunk::from(buf.split_to(read).freeze());
|
let chunk = buf.split_to(read).freeze();
|
||||||
|
Poll::Ready(Some(Ok(chunk)))
|
||||||
Ok(Async::Ready(Some(chunk)))
|
|
||||||
},
|
},
|
||||||
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Ok(Async::NotReady),
|
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Poll::Pending,
|
||||||
Err(e) => Err(e.into()),
|
Err(e) => Poll::Ready(Some(Err(e.into()))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Stream for Gzip {
|
impl Stream for Gzip {
|
||||||
type Item = Chunk;
|
type Item = Result<Bytes, Error>;
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
poll_with_read(&mut self.inner, &mut self.buf)
|
let mut buf = self.buf.clone();
|
||||||
|
if let Ok(mut reader) = self.reader.lock() {
|
||||||
|
reader.waker = Some(cx.waker().clone());
|
||||||
|
}
|
||||||
|
poll_with_read(&mut self.inner, &mut buf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A brotli decoder that reads from a `brotli::Decompressor` into a `BytesMut` and emits the results
|
/// A brotli decoder that reads from a `brotli::Decompressor` into a `BytesMut` and emits the results
|
||||||
/// as a `Chunk`.
|
/// as a `Bytes`.
|
||||||
struct Brotli {
|
struct Brotli {
|
||||||
inner: Box<Decompressor<Peeked<ReadableChunks<Body>>>>,
|
inner: Box<Decompressor<Peeked<ReadableChunks<Body>>>>,
|
||||||
buf: BytesMut,
|
buf: BytesMut,
|
||||||
|
reader: Arc<Mutex<ReadableChunks<Body>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Brotli {
|
impl Brotli {
|
||||||
fn new(stream: ReadableChunks<Body>) -> Self {
|
fn new(stream: ReadableChunks<Body>) -> Self {
|
||||||
|
let stream = Arc::new(Mutex::new(stream));
|
||||||
|
let reader = stream.clone();
|
||||||
Self {
|
Self {
|
||||||
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
|
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
|
||||||
inner: Box::new(Decompressor::new(Peeked::new(stream), BUF_SIZE)),
|
inner: Box::new(Decompressor::new(Peeked::new(stream), BUF_SIZE)),
|
||||||
|
reader: reader,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Stream for Brotli {
|
impl Stream for Brotli {
|
||||||
type Item = Chunk;
|
type Item = Result<Bytes, Error>;
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
poll_with_read(&mut self.inner, &mut self.buf)
|
let mut buf = self.buf.clone();
|
||||||
|
if let Ok(mut reader) = self.reader.lock() {
|
||||||
|
reader.waker = Some(cx.waker().clone());
|
||||||
|
}
|
||||||
|
poll_with_read(&mut self.inner, &mut buf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A deflate decoder that reads from a `deflate::Decoder` into a `BytesMut` and emits the results
|
/// A deflate decoder that reads from a `deflate::Decoder` into a `BytesMut` and emits the results
|
||||||
/// as a `Chunk`.
|
/// as a `Bytes`.
|
||||||
struct Deflate {
|
struct Deflate {
|
||||||
inner: Box<DeflateDecoder<Peeked<ReadableChunks<Body>>>>,
|
inner: Box<DeflateDecoder<Peeked<ReadableChunks<Body>>>>,
|
||||||
buf: BytesMut,
|
buf: BytesMut,
|
||||||
|
reader: Arc<Mutex<ReadableChunks<Body>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deflate {
|
impl Deflate {
|
||||||
fn new(stream: ReadableChunks<Body>) -> Self {
|
fn new(stream: ReadableChunks<Body>) -> Self {
|
||||||
|
let stream = Arc::new(Mutex::new(stream));
|
||||||
|
let reader = stream.clone();
|
||||||
Self {
|
Self {
|
||||||
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
|
buf: BytesMut::with_capacity(INIT_BUFFER_SIZE),
|
||||||
inner: Box::new(DeflateDecoder::new(Peeked::new(stream))),
|
inner: Box::new(DeflateDecoder::new(Peeked::new(stream))),
|
||||||
|
reader: reader,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Stream for Deflate {
|
impl Stream for Deflate {
|
||||||
type Item = Chunk;
|
type Item = Result<Bytes, Error>;
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
poll_with_read(&mut self.inner, &mut self.buf)
|
let mut buf = self.buf.clone();
|
||||||
|
if let Ok(mut reader) = self.reader.lock() {
|
||||||
|
reader.waker = Some(cx.waker().clone());
|
||||||
|
}
|
||||||
|
poll_with_read(&mut self.inner, &mut buf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -311,17 +338,21 @@ impl Stream for Deflate {
|
||||||
pub struct ReadableChunks<S> {
|
pub struct ReadableChunks<S> {
|
||||||
state: ReadState,
|
state: ReadState,
|
||||||
stream: S,
|
stream: S,
|
||||||
|
waker: Option<Waker>,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ReadState {
|
enum ReadState {
|
||||||
/// A chunk is ready to be read from.
|
/// A chunk is ready to be read from.
|
||||||
Ready(Chunk),
|
Ready(Bytes),
|
||||||
/// The next chunk isn't ready yet.
|
/// The next chunk isn't ready yet.
|
||||||
NotReady,
|
NotReady,
|
||||||
/// The stream has finished.
|
/// The stream has finished.
|
||||||
Eof,
|
Eof,
|
||||||
|
/// Stream is in err
|
||||||
|
Error(hyper::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
enum StreamState {
|
enum StreamState {
|
||||||
/// More bytes can be read from the stream.
|
/// More bytes can be read from the stream.
|
||||||
HasMore,
|
HasMore,
|
||||||
|
@ -334,7 +365,7 @@ struct Peeked<R> {
|
||||||
state: PeekedState,
|
state: PeekedState,
|
||||||
peeked_buf: [u8; 10],
|
peeked_buf: [u8; 10],
|
||||||
pos: usize,
|
pos: usize,
|
||||||
inner: R,
|
inner: Arc<Mutex<R>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum PeekedState {
|
enum PeekedState {
|
||||||
|
@ -346,7 +377,7 @@ enum PeekedState {
|
||||||
|
|
||||||
impl<R> Peeked<R> {
|
impl<R> Peeked<R> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn new(inner: R) -> Self {
|
fn new(inner: Arc<Mutex<R>>) -> Self {
|
||||||
Peeked {
|
Peeked {
|
||||||
state: PeekedState::NotReady,
|
state: PeekedState::NotReady,
|
||||||
peeked_buf: [0; 10],
|
peeked_buf: [0; 10],
|
||||||
|
@ -383,11 +414,13 @@ impl<R: Read> Read for Peeked<R> {
|
||||||
if self.pos == peeked_buf_len {
|
if self.pos == peeked_buf_len {
|
||||||
self.not_ready();
|
self.not_ready();
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(len);
|
return Ok(len);
|
||||||
},
|
},
|
||||||
PeekedState::NotReady => {
|
PeekedState::NotReady => {
|
||||||
let read = self.inner.read(&mut self.peeked_buf[self.pos..]);
|
let mut buf = &mut self.peeked_buf[self.pos..];
|
||||||
|
let stream = self.inner.clone();
|
||||||
|
let mut reader = stream.lock().unwrap();
|
||||||
|
let read = reader.read(&mut buf);
|
||||||
|
|
||||||
match read {
|
match read {
|
||||||
Ok(0) => self.ready(),
|
Ok(0) => self.ready(),
|
||||||
|
@ -411,6 +444,7 @@ impl<S> ReadableChunks<S> {
|
||||||
ReadableChunks {
|
ReadableChunks {
|
||||||
state: ReadState::NotReady,
|
state: ReadState::NotReady,
|
||||||
stream: stream,
|
stream: stream,
|
||||||
|
waker: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -423,9 +457,12 @@ impl<S> fmt::Debug for ReadableChunks<S> {
|
||||||
|
|
||||||
impl<S> Read for ReadableChunks<S>
|
impl<S> Read for ReadableChunks<S>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Chunk, Error = hyper::error::Error>,
|
S: Stream<Item = Result<Bytes, hyper::Error>> + std::marker::Unpin,
|
||||||
{
|
{
|
||||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
let waker = self.waker.as_ref().unwrap().clone();
|
||||||
|
let mut cx = Context::from_waker(&waker);
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let ret;
|
let ret;
|
||||||
match self.state {
|
match self.state {
|
||||||
|
@ -440,15 +477,15 @@ where
|
||||||
return Ok(len);
|
return Ok(len);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ReadState::NotReady => match self.poll_stream() {
|
ReadState::NotReady => match self.poll_stream(&mut cx) {
|
||||||
Ok(Async::Ready(StreamState::HasMore)) => continue,
|
Poll::Ready(StreamState::HasMore) => continue,
|
||||||
Ok(Async::Ready(StreamState::Eof)) => return Ok(0),
|
Poll::Ready(StreamState::Eof) => return Ok(0),
|
||||||
Ok(Async::NotReady) => return Err(io::ErrorKind::WouldBlock.into()),
|
Poll::Pending => return Err(io::ErrorKind::WouldBlock.into()),
|
||||||
Err(e) => {
|
|
||||||
return Err(io::Error::new(io::ErrorKind::Other, e));
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
ReadState::Eof => return Ok(0),
|
ReadState::Eof => return Ok(0),
|
||||||
|
ReadState::Error(ref err) => {
|
||||||
|
return Err(io::Error::new(io::ErrorKind::Other, err.to_string()))
|
||||||
|
},
|
||||||
}
|
}
|
||||||
self.state = ReadState::NotReady;
|
self.state = ReadState::NotReady;
|
||||||
return Ok(ret);
|
return Ok(ret);
|
||||||
|
@ -458,26 +495,29 @@ where
|
||||||
|
|
||||||
impl<S> ReadableChunks<S>
|
impl<S> ReadableChunks<S>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Chunk, Error = hyper::error::Error>,
|
S: Stream<Item = Result<Bytes, hyper::Error>> + std::marker::Unpin,
|
||||||
{
|
{
|
||||||
/// Poll the readiness of the inner reader.
|
/// Poll the readiness of the inner reader.
|
||||||
///
|
///
|
||||||
/// This function will update the internal state and return a simplified
|
/// This function will update the internal state and return a simplified
|
||||||
/// version of the `ReadState`.
|
/// version of the `ReadState`.
|
||||||
fn poll_stream(&mut self) -> Poll<StreamState, hyper::error::Error> {
|
fn poll_stream(&mut self, cx: &mut Context<'_>) -> Poll<StreamState> {
|
||||||
match self.stream.poll() {
|
match Pin::new(&mut self.stream).poll_next(cx) {
|
||||||
Ok(Async::Ready(Some(chunk))) => {
|
Poll::Ready(Some(Ok(chunk))) => {
|
||||||
self.state = ReadState::Ready(chunk);
|
self.state = ReadState::Ready(chunk);
|
||||||
|
|
||||||
Ok(Async::Ready(StreamState::HasMore))
|
Poll::Ready(StreamState::HasMore)
|
||||||
},
|
},
|
||||||
Ok(Async::Ready(None)) => {
|
Poll::Ready(Some(Err(err))) => {
|
||||||
self.state = ReadState::Eof;
|
self.state = ReadState::Error(err);
|
||||||
|
|
||||||
Ok(Async::Ready(StreamState::Eof))
|
Poll::Ready(StreamState::Eof)
|
||||||
},
|
},
|
||||||
Ok(Async::NotReady) => Ok(Async::NotReady),
|
Poll::Ready(None) => {
|
||||||
Err(e) => Err(e),
|
self.state = ReadState::Eof;
|
||||||
|
Poll::Ready(StreamState::Eof)
|
||||||
|
},
|
||||||
|
Poll::Pending => Poll::Pending,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
//! with CORSRequest being expanded into FetchRequest (etc)
|
//! with CORSRequest being expanded into FetchRequest (etc)
|
||||||
|
|
||||||
use http::header::HeaderName;
|
use http::header::HeaderName;
|
||||||
use hyper::Method;
|
use http::Method;
|
||||||
use net_traits::request::{CredentialsMode, Origin, Request};
|
use net_traits::request::{CredentialsMode, Origin, Request};
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
use time::{self, Timespec};
|
use time::{self, Timespec};
|
||||||
|
|
|
@ -14,8 +14,8 @@ use crossbeam_channel::Sender;
|
||||||
use devtools_traits::DevtoolsControlMsg;
|
use devtools_traits::DevtoolsControlMsg;
|
||||||
use headers::{AccessControlExposeHeaders, ContentType, HeaderMapExt, Range};
|
use headers::{AccessControlExposeHeaders, ContentType, HeaderMapExt, Range};
|
||||||
use http::header::{self, HeaderMap, HeaderName};
|
use http::header::{self, HeaderMap, HeaderName};
|
||||||
use hyper::Method;
|
use http::Method;
|
||||||
use hyper::StatusCode;
|
use http::StatusCode;
|
||||||
use ipc_channel::ipc::{self, IpcReceiver};
|
use ipc_channel::ipc::{self, IpcReceiver};
|
||||||
use mime::{self, Mime};
|
use mime::{self, Mime};
|
||||||
use net_traits::blob_url_store::{parse_blob_url, BlobURLStoreError};
|
use net_traits::blob_url_store::{parse_blob_url, BlobURLStoreError};
|
||||||
|
@ -40,7 +40,7 @@ use std::ops::Bound;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::sync::atomic::Ordering;
|
use std::sync::atomic::Ordering;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
use tokio2::sync::mpsc::{
|
use tokio::sync::mpsc::{
|
||||||
unbounded_channel, UnboundedReceiver as TokioReceiver, UnboundedSender as TokioSender,
|
unbounded_channel, UnboundedReceiver as TokioReceiver, UnboundedSender as TokioSender,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -522,7 +522,9 @@ async fn wait_for_response(
|
||||||
Some(Data::Payload(vec)) => {
|
Some(Data::Payload(vec)) => {
|
||||||
target.process_response_chunk(vec);
|
target.process_response_chunk(vec);
|
||||||
},
|
},
|
||||||
Some(Data::Done) => break,
|
Some(Data::Done) => {
|
||||||
|
break;
|
||||||
|
},
|
||||||
Some(Data::Cancelled) => {
|
Some(Data::Cancelled) => {
|
||||||
response.aborted.store(true, Ordering::Release);
|
response.aborted.store(true, Ordering::Release);
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -27,7 +27,7 @@ use std::ops::Index;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
|
use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
|
||||||
use std::sync::{Arc, Mutex, RwLock, Weak};
|
use std::sync::{Arc, Mutex, RwLock, Weak};
|
||||||
use tokio2::sync::mpsc::UnboundedSender as TokioSender;
|
use tokio::sync::mpsc::UnboundedSender as TokioSender;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,7 @@ use headers::{
|
||||||
CacheControl, ContentRange, Expires, HeaderMapExt, LastModified, Pragma, Range, Vary,
|
CacheControl, ContentRange, Expires, HeaderMapExt, LastModified, Pragma, Range, Vary,
|
||||||
};
|
};
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
use http::{header, HeaderMap};
|
use http::{header, HeaderMap, Method, StatusCode};
|
||||||
use hyper::{Method, StatusCode};
|
|
||||||
use malloc_size_of::Measurable;
|
use malloc_size_of::Measurable;
|
||||||
use malloc_size_of::{
|
use malloc_size_of::{
|
||||||
MallocSizeOf, MallocSizeOfOps, MallocUnconditionalShallowSizeOf, MallocUnconditionalSizeOf,
|
MallocSizeOf, MallocSizeOfOps, MallocUnconditionalShallowSizeOf, MallocUnconditionalSizeOf,
|
||||||
|
@ -29,7 +28,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
use time::{Duration, Timespec, Tm};
|
use time::{Duration, Timespec, Tm};
|
||||||
use tokio2::sync::mpsc::{unbounded_channel as unbounded, UnboundedSender as TokioSender};
|
use tokio::sync::mpsc::{unbounded_channel as unbounded, UnboundedSender as TokioSender};
|
||||||
|
|
||||||
/// The key used to differentiate requests in the cache.
|
/// The key used to differentiate requests in the cache.
|
||||||
#[derive(Clone, Eq, Hash, MallocSizeOf, PartialEq)]
|
#[derive(Clone, Eq, Hash, MallocSizeOf, PartialEq)]
|
||||||
|
|
|
@ -12,12 +12,13 @@ use crate::hsts::HstsList;
|
||||||
use crate::http_cache::{CacheKey, HttpCache};
|
use crate::http_cache::{CacheKey, HttpCache};
|
||||||
use crate::resource_thread::AuthCache;
|
use crate::resource_thread::AuthCache;
|
||||||
use async_recursion::async_recursion;
|
use async_recursion::async_recursion;
|
||||||
use crossbeam_channel::{unbounded, Receiver, Sender};
|
use core::convert::Infallible;
|
||||||
|
use crossbeam_channel::Sender;
|
||||||
use devtools_traits::{
|
use devtools_traits::{
|
||||||
ChromeToDevtoolsControlMsg, DevtoolsControlMsg, HttpRequest as DevtoolsHttpRequest,
|
ChromeToDevtoolsControlMsg, DevtoolsControlMsg, HttpRequest as DevtoolsHttpRequest,
|
||||||
};
|
};
|
||||||
use devtools_traits::{HttpResponse as DevtoolsHttpResponse, NetworkEvent};
|
use devtools_traits::{HttpResponse as DevtoolsHttpResponse, NetworkEvent};
|
||||||
use futures_util::compat::*;
|
use futures::{future, StreamExt, TryFutureExt, TryStreamExt};
|
||||||
use headers::authorization::Basic;
|
use headers::authorization::Basic;
|
||||||
use headers::{AccessControlAllowCredentials, AccessControlAllowHeaders, HeaderMapExt};
|
use headers::{AccessControlAllowCredentials, AccessControlAllowHeaders, HeaderMapExt};
|
||||||
use headers::{
|
use headers::{
|
||||||
|
@ -28,12 +29,11 @@ use headers::{AccessControlAllowOrigin, AccessControlMaxAge};
|
||||||
use headers::{CacheControl, ContentEncoding, ContentLength};
|
use headers::{CacheControl, ContentEncoding, ContentLength};
|
||||||
use headers::{IfModifiedSince, LastModified, Origin as HyperOrigin, Pragma, Referer, UserAgent};
|
use headers::{IfModifiedSince, LastModified, Origin as HyperOrigin, Pragma, Referer, UserAgent};
|
||||||
use http::header::{
|
use http::header::{
|
||||||
self, HeaderName, HeaderValue, ACCEPT, CONTENT_ENCODING, CONTENT_LANGUAGE, CONTENT_LOCATION,
|
self, HeaderValue, ACCEPT, CONTENT_ENCODING, CONTENT_LANGUAGE, CONTENT_LOCATION, CONTENT_TYPE,
|
||||||
CONTENT_TYPE,
|
|
||||||
};
|
};
|
||||||
use http::{HeaderMap, Request as HyperRequest};
|
use http::{HeaderMap, Method, Request as HyperRequest, StatusCode};
|
||||||
use hyper::header::TRANSFER_ENCODING;
|
use hyper::header::{HeaderName, TRANSFER_ENCODING};
|
||||||
use hyper::{Body, Client, Method, Response as HyperResponse, StatusCode};
|
use hyper::{Body, Client, Response as HyperResponse};
|
||||||
use hyper_serde::Serde;
|
use hyper_serde::Serde;
|
||||||
use ipc_channel::ipc::{self, IpcSender};
|
use ipc_channel::ipc::{self, IpcSender};
|
||||||
use ipc_channel::router::ROUTER;
|
use ipc_channel::router::ROUTER;
|
||||||
|
@ -65,14 +65,15 @@ use std::ops::Deref;
|
||||||
use std::sync::{Arc as StdArc, Condvar, Mutex, RwLock};
|
use std::sync::{Arc as StdArc, Condvar, Mutex, RwLock};
|
||||||
use std::time::{Duration, SystemTime};
|
use std::time::{Duration, SystemTime};
|
||||||
use time::{self, Tm};
|
use time::{self, Tm};
|
||||||
use tokio::prelude::{future, Future, Sink, Stream};
|
use tokio::runtime::Runtime;
|
||||||
use tokio::sync::mpsc::{channel, Receiver as TokioReceiver, Sender as TokioSender};
|
use tokio::sync::mpsc::{
|
||||||
use tokio2::sync::mpsc::{unbounded_channel, UnboundedSender as Tokio02Sender};
|
channel, unbounded_channel, Receiver as TokioReceiver, Sender as TokioSender,
|
||||||
use tokio_compat::runtime::{Builder, Runtime};
|
UnboundedReceiver, UnboundedSender,
|
||||||
|
};
|
||||||
|
use tokio_stream::wrappers::ReceiverStream;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref HANDLE: Mutex<Option<Runtime>> =
|
pub static ref HANDLE: Mutex<Option<Runtime>> = Mutex::new(Some(Runtime::new().unwrap()));
|
||||||
Mutex::new(Some(Builder::new().build().unwrap()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The various states an entry of the HttpCache can be in.
|
/// The various states an entry of the HttpCache can be in.
|
||||||
|
@ -110,10 +111,7 @@ impl HttpState {
|
||||||
history_states: RwLock::new(HashMap::new()),
|
history_states: RwLock::new(HashMap::new()),
|
||||||
http_cache: RwLock::new(HttpCache::new()),
|
http_cache: RwLock::new(HttpCache::new()),
|
||||||
http_cache_state: Mutex::new(HashMap::new()),
|
http_cache_state: Mutex::new(HashMap::new()),
|
||||||
client: create_http_client(
|
client: create_http_client(tls_config),
|
||||||
tls_config,
|
|
||||||
HANDLE.lock().unwrap().as_ref().unwrap().executor(),
|
|
||||||
),
|
|
||||||
extra_certs: ExtraCerts::new(),
|
extra_certs: ExtraCerts::new(),
|
||||||
connection_certs: ConnectionCerts::new(),
|
connection_certs: ConnectionCerts::new(),
|
||||||
}
|
}
|
||||||
|
@ -440,7 +438,7 @@ enum BodyStream {
|
||||||
Chunked(TokioReceiver<Vec<u8>>),
|
Chunked(TokioReceiver<Vec<u8>>),
|
||||||
/// A body whose bytes are buffered
|
/// A body whose bytes are buffered
|
||||||
/// and sent in one chunk over the network.
|
/// and sent in one chunk over the network.
|
||||||
Buffered(Receiver<BodyChunk>),
|
Buffered(UnboundedReceiver<BodyChunk>),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The sink side of the body passed to hyper,
|
/// The sink side of the body passed to hyper,
|
||||||
|
@ -451,7 +449,7 @@ enum BodySink {
|
||||||
/// A Crossbeam sender used to send chunks to the fetch worker,
|
/// A Crossbeam sender used to send chunks to the fetch worker,
|
||||||
/// where they will be buffered
|
/// where they will be buffered
|
||||||
/// in order to ensure they are not streamed them over the network.
|
/// in order to ensure they are not streamed them over the network.
|
||||||
Buffered(Sender<BodyChunk>),
|
Buffered(UnboundedSender<BodyChunk>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BodySink {
|
impl BodySink {
|
||||||
|
@ -459,12 +457,9 @@ impl BodySink {
|
||||||
match self {
|
match self {
|
||||||
BodySink::Chunked(ref sender) => {
|
BodySink::Chunked(ref sender) => {
|
||||||
let sender = sender.clone();
|
let sender = sender.clone();
|
||||||
HANDLE
|
HANDLE.lock().unwrap().as_mut().unwrap().spawn(async move {
|
||||||
.lock()
|
let _ = sender.send(bytes).await;
|
||||||
.unwrap()
|
});
|
||||||
.as_mut()
|
|
||||||
.unwrap()
|
|
||||||
.spawn(sender.send(bytes).map(|_| ()).map_err(|_| ()));
|
|
||||||
},
|
},
|
||||||
BodySink::Buffered(ref sender) => {
|
BodySink::Buffered(ref sender) => {
|
||||||
let _ = sender.send(BodyChunk::Chunk(bytes));
|
let _ = sender.send(BodyChunk::Chunk(bytes));
|
||||||
|
@ -474,20 +469,7 @@ impl BodySink {
|
||||||
|
|
||||||
pub fn close(&self) {
|
pub fn close(&self) {
|
||||||
match self {
|
match self {
|
||||||
BodySink::Chunked(ref sender) => {
|
BodySink::Chunked(_) => { /* no need to close sender */ },
|
||||||
let mut sender = sender.clone();
|
|
||||||
HANDLE
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.as_mut()
|
|
||||||
.unwrap()
|
|
||||||
.spawn(future::lazy(move || {
|
|
||||||
if sender.close().is_err() {
|
|
||||||
warn!("Failed to close network request sink.");
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}));
|
|
||||||
},
|
|
||||||
BodySink::Buffered(ref sender) => {
|
BodySink::Buffered(ref sender) => {
|
||||||
let _ = sender.send(BodyChunk::Done);
|
let _ = sender.send(BodyChunk::Done);
|
||||||
},
|
},
|
||||||
|
@ -506,7 +488,7 @@ async fn obtain_response(
|
||||||
request_id: Option<&str>,
|
request_id: Option<&str>,
|
||||||
is_xhr: bool,
|
is_xhr: bool,
|
||||||
context: &FetchContext,
|
context: &FetchContext,
|
||||||
fetch_terminated: Tokio02Sender<bool>,
|
fetch_terminated: UnboundedSender<bool>,
|
||||||
) -> Result<(HyperResponse<Decoder>, Option<ChromeToDevtoolsControlMsg>), NetworkError> {
|
) -> Result<(HyperResponse<Decoder>, Option<ChromeToDevtoolsControlMsg>), NetworkError> {
|
||||||
{
|
{
|
||||||
let mut headers = request_headers.clone();
|
let mut headers = request_headers.clone();
|
||||||
|
@ -537,7 +519,7 @@ async fn obtain_response(
|
||||||
// However since this doesn't appear documented, and we're using an ancient version,
|
// However since this doesn't appear documented, and we're using an ancient version,
|
||||||
// for now we buffer manually to ensure we don't stream requests
|
// for now we buffer manually to ensure we don't stream requests
|
||||||
// to servers that might not know how to handle them.
|
// to servers that might not know how to handle them.
|
||||||
let (sender, receiver) = unbounded();
|
let (sender, receiver) = unbounded_channel();
|
||||||
(BodySink::Buffered(sender), BodyStream::Buffered(receiver))
|
(BodySink::Buffered(sender), BodyStream::Buffered(receiver))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -557,6 +539,7 @@ async fn obtain_response(
|
||||||
ROUTER.add_route(
|
ROUTER.add_route(
|
||||||
body_port.to_opaque(),
|
body_port.to_opaque(),
|
||||||
Box::new(move |message| {
|
Box::new(move |message| {
|
||||||
|
info!("Received message");
|
||||||
let bytes: Vec<u8> = match message.to().unwrap() {
|
let bytes: Vec<u8> = match message.to().unwrap() {
|
||||||
BodyChunkResponse::Chunk(bytes) => bytes,
|
BodyChunkResponse::Chunk(bytes) => bytes,
|
||||||
BodyChunkResponse::Done => {
|
BodyChunkResponse::Done => {
|
||||||
|
@ -593,23 +576,25 @@ async fn obtain_response(
|
||||||
);
|
);
|
||||||
|
|
||||||
let body = match stream {
|
let body = match stream {
|
||||||
BodyStream::Chunked(receiver) => Body::wrap_stream(receiver),
|
BodyStream::Chunked(receiver) => {
|
||||||
BodyStream::Buffered(receiver) => {
|
let stream = ReceiverStream::new(receiver);
|
||||||
|
Body::wrap_stream(stream.map(Ok::<_, Infallible>))
|
||||||
|
},
|
||||||
|
BodyStream::Buffered(mut receiver) => {
|
||||||
// Accumulate bytes received over IPC into a vector.
|
// Accumulate bytes received over IPC into a vector.
|
||||||
let mut body = vec![];
|
let mut body = vec![];
|
||||||
loop {
|
loop {
|
||||||
match receiver.recv() {
|
match receiver.recv().await {
|
||||||
Ok(BodyChunk::Chunk(mut bytes)) => {
|
Some(BodyChunk::Chunk(mut bytes)) => {
|
||||||
body.append(&mut bytes);
|
body.append(&mut bytes);
|
||||||
},
|
},
|
||||||
Ok(BodyChunk::Done) => break,
|
Some(BodyChunk::Done) => break,
|
||||||
Err(_) => warn!("Failed to read all chunks from request body."),
|
None => warn!("Failed to read all chunks from request body."),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
body.into()
|
body.into()
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
HyperRequest::builder()
|
HyperRequest::builder()
|
||||||
.method(method)
|
.method(method)
|
||||||
.uri(encoded_url)
|
.uri(encoded_url)
|
||||||
|
@ -709,12 +694,11 @@ async fn obtain_response(
|
||||||
debug!("Not notifying devtools (no request_id)");
|
debug!("Not notifying devtools (no request_id)");
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
Ok((Decoder::detect(res), msg))
|
future::ready(Ok((Decoder::detect(res), msg)))
|
||||||
})
|
})
|
||||||
.map_err(move |e| {
|
.map_err(move |e| {
|
||||||
NetworkError::from_hyper_error(&e, connection_certs_clone.remove(host_clone))
|
NetworkError::from_hyper_error(&e, connection_certs_clone.remove(host_clone))
|
||||||
})
|
})
|
||||||
.compat() // convert from Future01 to Future03
|
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1850,7 +1834,7 @@ async fn http_network_fetch(
|
||||||
send_response_to_devtools(
|
send_response_to_devtools(
|
||||||
&sender,
|
&sender,
|
||||||
request_id.unwrap(),
|
request_id.unwrap(),
|
||||||
meta_headers.map(Serde::into_inner),
|
meta_headers.map(|hdrs| Serde::into_inner(hdrs)),
|
||||||
meta_status,
|
meta_status,
|
||||||
pipeline_id,
|
pipeline_id,
|
||||||
);
|
);
|
||||||
|
@ -1866,19 +1850,22 @@ async fn http_network_fetch(
|
||||||
|
|
||||||
HANDLE.lock().unwrap().as_ref().unwrap().spawn(
|
HANDLE.lock().unwrap().as_ref().unwrap().spawn(
|
||||||
res.into_body()
|
res.into_body()
|
||||||
.map_err(|_| ())
|
.map_err(|e| {
|
||||||
.fold(res_body, move |res_body, chunk| {
|
warn!("Error streaming response body: {:?}", e);
|
||||||
|
()
|
||||||
|
})
|
||||||
|
.try_fold(res_body, move |res_body, chunk| {
|
||||||
if cancellation_listener.lock().unwrap().cancelled() {
|
if cancellation_listener.lock().unwrap().cancelled() {
|
||||||
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
|
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
|
||||||
let _ = done_sender.send(Data::Cancelled);
|
let _ = done_sender.send(Data::Cancelled);
|
||||||
return tokio::prelude::future::failed(());
|
return future::ready(Err(()));
|
||||||
}
|
}
|
||||||
if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap() {
|
if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap() {
|
||||||
let bytes = chunk.into_bytes();
|
let bytes = chunk;
|
||||||
body.extend_from_slice(&*bytes);
|
body.extend_from_slice(&*bytes);
|
||||||
let _ = done_sender.send(Data::Payload(bytes.to_vec()));
|
let _ = done_sender.send(Data::Payload(bytes.to_vec()));
|
||||||
}
|
}
|
||||||
tokio::prelude::future::ok(res_body)
|
future::ready(Ok(res_body))
|
||||||
})
|
})
|
||||||
.and_then(move |res_body| {
|
.and_then(move |res_body| {
|
||||||
debug!("successfully finished response for {:?}", url1);
|
debug!("successfully finished response for {:?}", url1);
|
||||||
|
@ -1893,10 +1880,10 @@ async fn http_network_fetch(
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.set_attribute(ResourceAttribute::ResponseEnd);
|
.set_attribute(ResourceAttribute::ResponseEnd);
|
||||||
let _ = done_sender2.send(Data::Done);
|
let _ = done_sender2.send(Data::Done);
|
||||||
tokio::prelude::future::ok(())
|
future::ready(Ok(()))
|
||||||
})
|
})
|
||||||
.map_err(move |_| {
|
.map_err(move |_| {
|
||||||
warn!("finished response for {:?} with error", url2);
|
debug!("finished response for {:?}", url2);
|
||||||
let mut body = res_body2.lock().unwrap();
|
let mut body = res_body2.lock().unwrap();
|
||||||
let completed_body = match *body {
|
let completed_body = match *body {
|
||||||
ResponseBody::Receiving(ref mut body) => mem::replace(body, vec![]),
|
ResponseBody::Receiving(ref mut body) => mem::replace(body, vec![]),
|
||||||
|
|
|
@ -16,7 +16,7 @@ use crate::hsts::HstsList;
|
||||||
use crate::http_cache::HttpCache;
|
use crate::http_cache::HttpCache;
|
||||||
use crate::http_loader::{http_redirect_fetch, HttpState, HANDLE};
|
use crate::http_loader::{http_redirect_fetch, HttpState, HANDLE};
|
||||||
use crate::storage_thread::StorageThreadFactory;
|
use crate::storage_thread::StorageThreadFactory;
|
||||||
use crate::websocket_loader::{self, HANDLE as WS_HANDLE};
|
use crate::websocket_loader;
|
||||||
use crossbeam_channel::Sender;
|
use crossbeam_channel::Sender;
|
||||||
use devtools_traits::DevtoolsControlMsg;
|
use devtools_traits::DevtoolsControlMsg;
|
||||||
use embedder_traits::resources::{self, Resource};
|
use embedder_traits::resources::{self, Resource};
|
||||||
|
@ -155,15 +155,12 @@ fn create_http_states(
|
||||||
history_states: RwLock::new(HashMap::new()),
|
history_states: RwLock::new(HashMap::new()),
|
||||||
http_cache: RwLock::new(http_cache),
|
http_cache: RwLock::new(http_cache),
|
||||||
http_cache_state: Mutex::new(HashMap::new()),
|
http_cache_state: Mutex::new(HashMap::new()),
|
||||||
client: create_http_client(
|
client: create_http_client(create_tls_config(
|
||||||
create_tls_config(
|
&certs,
|
||||||
&certs,
|
ALPN_H2_H1,
|
||||||
ALPN_H2_H1,
|
extra_certs.clone(),
|
||||||
extra_certs.clone(),
|
connection_certs.clone(),
|
||||||
connection_certs.clone(),
|
)),
|
||||||
),
|
|
||||||
HANDLE.lock().unwrap().as_ref().unwrap().executor(),
|
|
||||||
),
|
|
||||||
extra_certs,
|
extra_certs,
|
||||||
connection_certs,
|
connection_certs,
|
||||||
};
|
};
|
||||||
|
@ -178,15 +175,12 @@ fn create_http_states(
|
||||||
history_states: RwLock::new(HashMap::new()),
|
history_states: RwLock::new(HashMap::new()),
|
||||||
http_cache: RwLock::new(HttpCache::new()),
|
http_cache: RwLock::new(HttpCache::new()),
|
||||||
http_cache_state: Mutex::new(HashMap::new()),
|
http_cache_state: Mutex::new(HashMap::new()),
|
||||||
client: create_http_client(
|
client: create_http_client(create_tls_config(
|
||||||
create_tls_config(
|
&certs,
|
||||||
&certs,
|
ALPN_H2_H1,
|
||||||
ALPN_H2_H1,
|
extra_certs.clone(),
|
||||||
extra_certs.clone(),
|
connection_certs.clone(),
|
||||||
connection_certs.clone(),
|
)),
|
||||||
),
|
|
||||||
HANDLE.lock().unwrap().as_ref().unwrap().executor(),
|
|
||||||
),
|
|
||||||
extra_certs,
|
extra_certs,
|
||||||
connection_certs,
|
connection_certs,
|
||||||
};
|
};
|
||||||
|
@ -616,12 +610,6 @@ impl CoreResourceManager {
|
||||||
// or a short timeout has been reached.
|
// or a short timeout has been reached.
|
||||||
self.thread_pool.exit();
|
self.thread_pool.exit();
|
||||||
|
|
||||||
// Shut-down the async runtime used by fetch workers.
|
|
||||||
drop(HANDLE.lock().unwrap().take());
|
|
||||||
|
|
||||||
// Shut-down the async runtime used by websocket workers.
|
|
||||||
drop(WS_HANDLE.lock().unwrap().take());
|
|
||||||
|
|
||||||
debug!("Exited CoreResourceManager");
|
debug!("Exited CoreResourceManager");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -680,58 +668,49 @@ impl CoreResourceManager {
|
||||||
_ => (FileTokenCheck::NotRequired, None),
|
_ => (FileTokenCheck::NotRequired, None),
|
||||||
};
|
};
|
||||||
|
|
||||||
HANDLE
|
HANDLE.lock().unwrap().as_ref().unwrap().spawn(async move {
|
||||||
.lock()
|
// XXXManishearth: Check origin against pipeline id (also ensure that the mode is allowed)
|
||||||
.unwrap()
|
// todo load context / mimesniff in fetch
|
||||||
.as_ref()
|
// todo referrer policy?
|
||||||
.unwrap()
|
// todo service worker stuff
|
||||||
.spawn_std(async move {
|
let context = FetchContext {
|
||||||
// XXXManishearth: Check origin against pipeline id (also ensure that the mode is allowed)
|
state: http_state,
|
||||||
// todo load context / mimesniff in fetch
|
user_agent: ua,
|
||||||
// todo referrer policy?
|
devtools_chan: dc.map(|dc| Arc::new(Mutex::new(dc))),
|
||||||
// todo service worker stuff
|
filemanager: Arc::new(Mutex::new(filemanager)),
|
||||||
let context = FetchContext {
|
file_token,
|
||||||
state: http_state,
|
cancellation_listener: Arc::new(Mutex::new(CancellationListener::new(cancel_chan))),
|
||||||
user_agent: ua,
|
timing: ServoArc::new(Mutex::new(ResourceFetchTiming::new(request.timing_type()))),
|
||||||
devtools_chan: dc.map(|dc| Arc::new(Mutex::new(dc))),
|
};
|
||||||
filemanager: Arc::new(Mutex::new(filemanager)),
|
|
||||||
file_token,
|
|
||||||
cancellation_listener: Arc::new(Mutex::new(CancellationListener::new(
|
|
||||||
cancel_chan,
|
|
||||||
))),
|
|
||||||
timing: ServoArc::new(Mutex::new(ResourceFetchTiming::new(
|
|
||||||
request.timing_type(),
|
|
||||||
))),
|
|
||||||
};
|
|
||||||
|
|
||||||
match res_init_ {
|
match res_init_ {
|
||||||
Some(res_init) => {
|
Some(res_init) => {
|
||||||
let response = Response::from_init(res_init, timing_type);
|
let response = Response::from_init(res_init, timing_type);
|
||||||
http_redirect_fetch(
|
http_redirect_fetch(
|
||||||
&mut request,
|
&mut request,
|
||||||
&mut CorsCache::new(),
|
&mut CorsCache::new(),
|
||||||
response,
|
response,
|
||||||
true,
|
true,
|
||||||
&mut sender,
|
&mut sender,
|
||||||
&mut None,
|
&mut None,
|
||||||
&context,
|
&context,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
fetch(&mut request, &mut sender, &context).await;
|
fetch(&mut request, &mut sender, &context).await;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Remove token after fetch.
|
// Remove token after fetch.
|
||||||
if let Some(id) = blob_url_file_id.as_ref() {
|
if let Some(id) = blob_url_file_id.as_ref() {
|
||||||
context
|
context
|
||||||
.filemanager
|
.filemanager
|
||||||
.lock()
|
.lock()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.invalidate_token(&context.file_token, id);
|
.invalidate_token(&context.file_token, id);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn websocket_connect(
|
fn websocket_connect(
|
||||||
|
|
|
@ -20,7 +20,7 @@ use headers::{AccessControlAllowMethods, AccessControlMaxAge, HeaderMapExt};
|
||||||
use headers::{CacheControl, ContentLength, ContentType, Expires, LastModified, Pragma, UserAgent};
|
use headers::{CacheControl, ContentLength, ContentType, Expires, LastModified, Pragma, UserAgent};
|
||||||
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
|
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
|
||||||
use http::{Method, StatusCode};
|
use http::{Method, StatusCode};
|
||||||
use hyper::body::Body;
|
use hyper::Body;
|
||||||
use hyper::{Request as HyperRequest, Response as HyperResponse};
|
use hyper::{Request as HyperRequest, Response as HyperResponse};
|
||||||
use mime::{self, Mime};
|
use mime::{self, Mime};
|
||||||
use msg::constellation_msg::TEST_PIPELINE_ID;
|
use msg::constellation_msg::TEST_PIPELINE_ID;
|
||||||
|
|
|
@ -10,7 +10,7 @@ use net_traits::request::{Origin, Referrer, Request};
|
||||||
use net_traits::response::{HttpsState, Response, ResponseBody};
|
use net_traits::response::{HttpsState, Response, ResponseBody};
|
||||||
use net_traits::{ResourceFetchTiming, ResourceTimingType};
|
use net_traits::{ResourceFetchTiming, ResourceTimingType};
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
use tokio2::sync::mpsc::unbounded_channel as unbounded;
|
use tokio::sync::mpsc::unbounded_channel as unbounded;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_refreshing_resource_sets_done_chan_the_appropriate_value() {
|
fn test_refreshing_resource_sets_done_chan_the_appropriate_value() {
|
||||||
|
|
|
@ -15,7 +15,6 @@ use devtools_traits::HttpResponse as DevtoolsHttpResponse;
|
||||||
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, NetworkEvent};
|
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, NetworkEvent};
|
||||||
use flate2::write::{DeflateEncoder, GzEncoder};
|
use flate2::write::{DeflateEncoder, GzEncoder};
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use futures::{self, Future, Stream};
|
|
||||||
use headers::authorization::Basic;
|
use headers::authorization::Basic;
|
||||||
use headers::{
|
use headers::{
|
||||||
Authorization, ContentLength, Date, HeaderMapExt, Host, StrictTransportSecurity, UserAgent,
|
Authorization, ContentLength, Date, HeaderMapExt, Host, StrictTransportSecurity, UserAgent,
|
||||||
|
@ -23,7 +22,7 @@ use headers::{
|
||||||
use http::header::{self, HeaderMap, HeaderValue};
|
use http::header::{self, HeaderMap, HeaderValue};
|
||||||
use http::uri::Authority;
|
use http::uri::Authority;
|
||||||
use http::{Method, StatusCode};
|
use http::{Method, StatusCode};
|
||||||
use hyper::body::Body;
|
use hyper::Body;
|
||||||
use hyper::{Request as HyperRequest, Response as HyperResponse};
|
use hyper::{Request as HyperRequest, Response as HyperResponse};
|
||||||
use ipc_channel::ipc;
|
use ipc_channel::ipc;
|
||||||
use ipc_channel::router::ROUTER;
|
use ipc_channel::router::ROUTER;
|
||||||
|
@ -51,13 +50,6 @@ fn mock_origin() -> ImmutableOrigin {
|
||||||
ServoUrl::parse("http://servo.org").unwrap().origin()
|
ServoUrl::parse("http://servo.org").unwrap().origin()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_response(req: HyperRequest<Body>) -> impl Future<Item = String, Error = ()> {
|
|
||||||
req.into_body()
|
|
||||||
.concat2()
|
|
||||||
.and_then(|body| futures::future::ok(str::from_utf8(&body).unwrap().to_owned()))
|
|
||||||
.map_err(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_cookie_for_domain(
|
fn assert_cookie_for_domain(
|
||||||
cookie_jar: &RwLock<CookieStorage>,
|
cookie_jar: &RwLock<CookieStorage>,
|
||||||
domain: &str,
|
domain: &str,
|
||||||
|
@ -521,28 +513,18 @@ fn test_load_should_decode_the_response_as_gzip_when_response_headers_have_conte
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_load_doesnt_send_request_body_on_any_redirect() {
|
fn test_load_doesnt_send_request_body_on_any_redirect() {
|
||||||
|
use hyper::body::HttpBody;
|
||||||
|
|
||||||
let post_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
let post_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||||
assert_eq!(request.method(), Method::GET);
|
assert_eq!(request.method(), Method::GET);
|
||||||
read_response(request)
|
assert_eq!(request.size_hint().exact(), Some(0));
|
||||||
.and_then(|data| {
|
|
||||||
assert_eq!(data, "");
|
|
||||||
futures::future::ok(())
|
|
||||||
})
|
|
||||||
.poll()
|
|
||||||
.unwrap();
|
|
||||||
*response.body_mut() = b"Yay!".to_vec().into();
|
*response.body_mut() = b"Yay!".to_vec().into();
|
||||||
};
|
};
|
||||||
let (post_server, post_url) = make_server(post_handler);
|
let (post_server, post_url) = make_server(post_handler);
|
||||||
|
|
||||||
let post_redirect_url = post_url.clone();
|
let post_redirect_url = post_url.clone();
|
||||||
let pre_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
let pre_handler = move |request: HyperRequest<Body>, response: &mut HyperResponse<Body>| {
|
||||||
read_response(request)
|
assert_eq!(request.size_hint().exact(), Some(13));
|
||||||
.and_then(|data| {
|
|
||||||
assert_eq!(data, "Body on POST");
|
|
||||||
futures::future::ok(())
|
|
||||||
})
|
|
||||||
.poll()
|
|
||||||
.unwrap();
|
|
||||||
response.headers_mut().insert(
|
response.headers_mut().insert(
|
||||||
header::LOCATION,
|
header::LOCATION,
|
||||||
HeaderValue::from_str(&post_redirect_url.to_string()).unwrap(),
|
HeaderValue::from_str(&post_redirect_url.to_string()).unwrap(),
|
||||||
|
|
|
@ -21,14 +21,17 @@ mod mime_classifier;
|
||||||
mod resource_thread;
|
mod resource_thread;
|
||||||
mod subresource_integrity;
|
mod subresource_integrity;
|
||||||
|
|
||||||
|
use core::convert::Infallible;
|
||||||
|
use core::pin::Pin;
|
||||||
use crossbeam_channel::{unbounded, Sender};
|
use crossbeam_channel::{unbounded, Sender};
|
||||||
use devtools_traits::DevtoolsControlMsg;
|
use devtools_traits::DevtoolsControlMsg;
|
||||||
use embedder_traits::resources::{self, Resource};
|
use embedder_traits::resources::{self, Resource};
|
||||||
use embedder_traits::{EmbedderProxy, EventLoopWaker};
|
use embedder_traits::{EmbedderProxy, EventLoopWaker};
|
||||||
use futures::{Future, Stream};
|
use futures::future::ready;
|
||||||
|
use futures::StreamExt;
|
||||||
use hyper::server::conn::Http;
|
use hyper::server::conn::Http;
|
||||||
use hyper::server::Server as HyperServer;
|
use hyper::server::Server as HyperServer;
|
||||||
use hyper::service::service_fn_ok;
|
use hyper::service::{make_service_fn, service_fn};
|
||||||
use hyper::{Body, Request as HyperRequest, Response as HyperResponse};
|
use hyper::{Body, Request as HyperRequest, Response as HyperResponse};
|
||||||
use net::connector::{create_tls_config, ConnectionCerts, ExtraCerts, ALPN_H2_H1};
|
use net::connector::{create_tls_config, ConnectionCerts, ExtraCerts, ALPN_H2_H1};
|
||||||
use net::fetch::cors_cache::CorsCache;
|
use net::fetch::cors_cache::CorsCache;
|
||||||
|
@ -40,20 +43,27 @@ use net_traits::filemanager_thread::FileTokenCheck;
|
||||||
use net_traits::request::Request;
|
use net_traits::request::Request;
|
||||||
use net_traits::response::Response;
|
use net_traits::response::Response;
|
||||||
use net_traits::{FetchTaskTarget, ResourceFetchTiming, ResourceTimingType};
|
use net_traits::{FetchTaskTarget, ResourceFetchTiming, ResourceTimingType};
|
||||||
use openssl::ssl::{SslAcceptor, SslFiletype, SslMethod};
|
use openssl::ssl::{Ssl, SslAcceptor, SslFiletype, SslMethod};
|
||||||
use servo_arc::Arc as ServoArc;
|
use servo_arc::Arc as ServoArc;
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
use std::net::TcpListener as StdTcpListener;
|
use std::net::TcpListener as StdTcpListener;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::{Arc, Mutex, Weak};
|
use std::sync::{Arc, Mutex, Weak};
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
use tokio::reactor::Handle;
|
use tokio::net::TcpStream;
|
||||||
use tokio::runtime::Runtime;
|
use tokio::runtime::{Builder, Runtime};
|
||||||
use tokio_openssl::SslAcceptorExt;
|
use tokio_openssl::SslStream;
|
||||||
|
use tokio_stream::wrappers::TcpListenerStream;
|
||||||
use tokio_test::block_on;
|
use tokio_test::block_on;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref HANDLE: Mutex<Runtime> = Mutex::new(Runtime::new().unwrap());
|
pub static ref HANDLE: Mutex<Runtime> = Mutex::new(
|
||||||
|
Builder::new_multi_thread()
|
||||||
|
.enable_io()
|
||||||
|
.worker_threads(10)
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const DEFAULT_USER_AGENT: &'static str = "Such Browser. Very Layout. Wow.";
|
const DEFAULT_USER_AGENT: &'static str = "Such Browser. Very Layout. Wow.";
|
||||||
|
@ -134,33 +144,34 @@ fn fetch(request: &mut Request, dc: Option<Sender<DevtoolsControlMsg>>) -> Respo
|
||||||
fn fetch_with_context(request: &mut Request, mut context: &mut FetchContext) -> Response {
|
fn fetch_with_context(request: &mut Request, mut context: &mut FetchContext) -> Response {
|
||||||
let (sender, receiver) = unbounded();
|
let (sender, receiver) = unbounded();
|
||||||
let mut target = FetchResponseCollector { sender: sender };
|
let mut target = FetchResponseCollector { sender: sender };
|
||||||
|
block_on(async move {
|
||||||
block_on(methods::fetch(request, &mut target, &mut context));
|
methods::fetch(request, &mut target, &mut context).await;
|
||||||
|
receiver.recv().unwrap()
|
||||||
receiver.recv().unwrap()
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Response {
|
fn fetch_with_cors_cache(request: &mut Request, cache: &mut CorsCache) -> Response {
|
||||||
let (sender, receiver) = unbounded();
|
let (sender, receiver) = unbounded();
|
||||||
let mut target = FetchResponseCollector { sender: sender };
|
let mut target = FetchResponseCollector { sender: sender };
|
||||||
|
block_on(async move {
|
||||||
block_on(methods::fetch_with_cors_cache(
|
methods::fetch_with_cors_cache(
|
||||||
request,
|
request,
|
||||||
cache,
|
cache,
|
||||||
&mut target,
|
&mut target,
|
||||||
&mut new_fetch_context(None, None, None),
|
&mut new_fetch_context(None, None, None),
|
||||||
));
|
)
|
||||||
|
.await;
|
||||||
receiver.recv().unwrap()
|
receiver.recv().unwrap()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Server {
|
pub(crate) struct Server {
|
||||||
pub close_channel: futures::sync::oneshot::Sender<()>,
|
pub close_channel: tokio::sync::oneshot::Sender<()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Server {
|
impl Server {
|
||||||
fn close(self) {
|
fn close(self) {
|
||||||
self.close_channel.send(()).unwrap();
|
self.close_channel.send(()).expect("err closing server:");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,19 +183,26 @@ where
|
||||||
let listener = StdTcpListener::bind("0.0.0.0:0").unwrap();
|
let listener = StdTcpListener::bind("0.0.0.0:0").unwrap();
|
||||||
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
|
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
|
||||||
let url = ServoUrl::parse(&url_string).unwrap();
|
let url = ServoUrl::parse(&url_string).unwrap();
|
||||||
let (tx, rx) = futures::sync::oneshot::channel::<()>();
|
let (tx, rx) = tokio::sync::oneshot::channel::<()>();
|
||||||
let server = HyperServer::from_tcp(listener)
|
let server = async move {
|
||||||
.unwrap()
|
HyperServer::from_tcp(listener)
|
||||||
.serve(move || {
|
.unwrap()
|
||||||
let handler = handler.clone();
|
.serve(make_service_fn(move |_| {
|
||||||
service_fn_ok(move |req: HyperRequest<Body>| {
|
let handler = handler.clone();
|
||||||
let mut response = HyperResponse::new(Vec::<u8>::new().into());
|
ready(Ok::<_, Infallible>(service_fn(
|
||||||
handler(req, &mut response);
|
move |req: HyperRequest<Body>| {
|
||||||
response
|
let mut response = HyperResponse::new(Vec::<u8>::new().into());
|
||||||
|
handler(req, &mut response);
|
||||||
|
ready(Ok::<_, Infallible>(response))
|
||||||
|
},
|
||||||
|
)))
|
||||||
|
}))
|
||||||
|
.with_graceful_shutdown(async move {
|
||||||
|
rx.await.ok();
|
||||||
})
|
})
|
||||||
})
|
.await
|
||||||
.with_graceful_shutdown(rx)
|
.expect("Could not start server");
|
||||||
.map_err(|_| ());
|
};
|
||||||
|
|
||||||
HANDLE.lock().unwrap().spawn(server);
|
HANDLE.lock().unwrap().spawn(server);
|
||||||
let server = Server { close_channel: tx };
|
let server = Server { close_channel: tx };
|
||||||
|
@ -197,43 +215,64 @@ where
|
||||||
{
|
{
|
||||||
let handler = Arc::new(handler);
|
let handler = Arc::new(handler);
|
||||||
let listener = StdTcpListener::bind("[::0]:0").unwrap();
|
let listener = StdTcpListener::bind("[::0]:0").unwrap();
|
||||||
let listener = TcpListener::from_std(listener, &Handle::default()).unwrap();
|
let listener = HANDLE
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.block_on(async move { TcpListener::from_std(listener).unwrap() });
|
||||||
|
|
||||||
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
|
let url_string = format!("http://localhost:{}", listener.local_addr().unwrap().port());
|
||||||
|
let mut listener = TcpListenerStream::new(listener);
|
||||||
|
|
||||||
let url = ServoUrl::parse(&url_string).unwrap();
|
let url = ServoUrl::parse(&url_string).unwrap();
|
||||||
|
let (tx, mut rx) = tokio::sync::oneshot::channel::<()>();
|
||||||
|
|
||||||
let server = listener.incoming().map_err(|_| ()).for_each(move |sock| {
|
let server = async move {
|
||||||
let mut tls_server_config = SslAcceptor::mozilla_intermediate_v5(SslMethod::tls()).unwrap();
|
loop {
|
||||||
tls_server_config
|
let stream = tokio::select! {
|
||||||
.set_certificate_file(&cert_path, SslFiletype::PEM)
|
stream = listener.next() => stream,
|
||||||
.unwrap();
|
_ = &mut rx => break
|
||||||
tls_server_config
|
};
|
||||||
.set_private_key_file(&key_path, SslFiletype::PEM)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let handler = handler.clone();
|
let stream = match stream {
|
||||||
tls_server_config
|
Some(stream) => stream.expect("Could not accept stream: "),
|
||||||
.build()
|
_ => break,
|
||||||
.accept_async(sock)
|
};
|
||||||
.map_err(|_| ())
|
|
||||||
.and_then(move |ssl| {
|
|
||||||
Http::new()
|
|
||||||
.serve_connection(
|
|
||||||
ssl,
|
|
||||||
service_fn_ok(move |req: HyperRequest<Body>| {
|
|
||||||
let mut response = HyperResponse::new(Vec::<u8>::new().into());
|
|
||||||
handler(req, &mut response);
|
|
||||||
response
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.map_err(|_| ())
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let (tx, rx) = futures::sync::oneshot::channel::<()>();
|
let stream = stream.into_std().unwrap();
|
||||||
let server = server
|
stream
|
||||||
.select(rx.map_err(|_| ()))
|
.set_read_timeout(Some(std::time::Duration::new(5, 0)))
|
||||||
.map(|_| ())
|
.unwrap();
|
||||||
.map_err(|_| ());
|
let stream = TcpStream::from_std(stream).unwrap();
|
||||||
|
|
||||||
|
let mut tls_server_config =
|
||||||
|
SslAcceptor::mozilla_intermediate_v5(SslMethod::tls()).unwrap();
|
||||||
|
tls_server_config
|
||||||
|
.set_certificate_file(&cert_path, SslFiletype::PEM)
|
||||||
|
.unwrap();
|
||||||
|
tls_server_config
|
||||||
|
.set_private_key_file(&key_path, SslFiletype::PEM)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let tls_server_config = tls_server_config.build();
|
||||||
|
let ssl = Ssl::new(tls_server_config.context()).unwrap();
|
||||||
|
let mut stream = SslStream::new(ssl, stream).unwrap();
|
||||||
|
|
||||||
|
let _ = Pin::new(&mut stream).accept().await;
|
||||||
|
|
||||||
|
let handler = handler.clone();
|
||||||
|
|
||||||
|
let _ = Http::new()
|
||||||
|
.serve_connection(
|
||||||
|
stream,
|
||||||
|
service_fn(move |req: HyperRequest<Body>| {
|
||||||
|
let mut response = HyperResponse::new(Body::empty());
|
||||||
|
handler(req, &mut response);
|
||||||
|
ready(Ok::<_, Infallible>(response))
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
HANDLE.lock().unwrap().spawn(server);
|
HANDLE.lock().unwrap().spawn(server);
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ fn test_sniff_mp4_matcher_long() {
|
||||||
let matcher = Mp4Matcher;
|
let matcher = Mp4Matcher;
|
||||||
|
|
||||||
let mut data: [u8; 260] = [0; 260];
|
let mut data: [u8; 260] = [0; 260];
|
||||||
&data[..11].clone_from_slice(&[
|
let _ = &data[..11].clone_from_slice(&[
|
||||||
0x00, 0x00, 0x01, 0x04, 0x66, 0x74, 0x79, 0x70, 0x6D, 0x70, 0x34,
|
0x00, 0x00, 0x01, 0x04, 0x66, 0x74, 0x79, 0x70, 0x6D, 0x70, 0x34,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
|
@ -19,9 +19,9 @@ use crate::http_loader::HttpState;
|
||||||
use async_tungstenite::tokio::{client_async_tls_with_connector_and_config, ConnectStream};
|
use async_tungstenite::tokio::{client_async_tls_with_connector_and_config, ConnectStream};
|
||||||
use async_tungstenite::WebSocketStream;
|
use async_tungstenite::WebSocketStream;
|
||||||
use embedder_traits::resources::{self, Resource};
|
use embedder_traits::resources::{self, Resource};
|
||||||
use futures03::future::TryFutureExt;
|
use futures::future::TryFutureExt;
|
||||||
use futures03::sink::SinkExt;
|
use futures::sink::SinkExt;
|
||||||
use futures03::stream::StreamExt;
|
use futures::stream::StreamExt;
|
||||||
use http::header::{HeaderMap, HeaderName, HeaderValue};
|
use http::header::{HeaderMap, HeaderName, HeaderValue};
|
||||||
use ipc_channel::ipc::{IpcReceiver, IpcSender};
|
use ipc_channel::ipc::{IpcReceiver, IpcSender};
|
||||||
use ipc_channel::router::ROUTER;
|
use ipc_channel::router::ROUTER;
|
||||||
|
|
|
@ -16,10 +16,10 @@ doctest = false
|
||||||
content-security-policy = { version = "0.4.0", features = ["serde"] }
|
content-security-policy = { version = "0.4.0", features = ["serde"] }
|
||||||
cookie = "0.11"
|
cookie = "0.11"
|
||||||
embedder_traits = { path = "../embedder_traits" }
|
embedder_traits = { path = "../embedder_traits" }
|
||||||
headers = "0.2"
|
headers = "0.3"
|
||||||
http = "0.1"
|
http = "0.2"
|
||||||
hyper = "0.12"
|
hyper = "0.14"
|
||||||
hyper_serde = "0.11"
|
hyper_serde = "0.12"
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
lazy_static = "1"
|
lazy_static = "1"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
|
|
|
@ -21,9 +21,9 @@ use crate::response::{HttpsState, Response, ResponseInit};
|
||||||
use crate::storage_thread::StorageThreadMsg;
|
use crate::storage_thread::StorageThreadMsg;
|
||||||
use cookie::Cookie;
|
use cookie::Cookie;
|
||||||
use headers::{ContentType, HeaderMapExt, ReferrerPolicy as ReferrerPolicyHeader};
|
use headers::{ContentType, HeaderMapExt, ReferrerPolicy as ReferrerPolicyHeader};
|
||||||
|
use http::StatusCode;
|
||||||
use http::{Error as HttpError, HeaderMap};
|
use http::{Error as HttpError, HeaderMap};
|
||||||
use hyper::Error as HyperError;
|
use hyper::Error as HyperError;
|
||||||
use hyper::StatusCode;
|
|
||||||
use hyper_serde::Serde;
|
use hyper_serde::Serde;
|
||||||
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
||||||
use ipc_channel::router::ROUTER;
|
use ipc_channel::router::ROUTER;
|
||||||
|
@ -759,7 +759,7 @@ pub enum NetworkError {
|
||||||
impl NetworkError {
|
impl NetworkError {
|
||||||
pub fn from_hyper_error(error: &HyperError, cert_bytes: Option<Vec<u8>>) -> Self {
|
pub fn from_hyper_error(error: &HyperError, cert_bytes: Option<Vec<u8>>) -> Self {
|
||||||
let s = error.to_string();
|
let s = error.to_string();
|
||||||
if s.contains("the handshake failed") {
|
if s.to_lowercase().contains("ssl") {
|
||||||
NetworkError::SslValidation(s, cert_bytes.unwrap_or_default())
|
NetworkError::SslValidation(s, cert_bytes.unwrap_or_default())
|
||||||
} else {
|
} else {
|
||||||
NetworkError::Internal(s)
|
NetworkError::Internal(s)
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::ResourceTimingType;
|
||||||
use content_security_policy::{self as csp, CspList};
|
use content_security_policy::{self as csp, CspList};
|
||||||
use http::header::{HeaderName, AUTHORIZATION};
|
use http::header::{HeaderName, AUTHORIZATION};
|
||||||
use http::HeaderMap;
|
use http::HeaderMap;
|
||||||
use hyper::Method;
|
use http::Method;
|
||||||
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
use msg::constellation_msg::PipelineId;
|
use msg::constellation_msg::PipelineId;
|
||||||
|
|
|
@ -54,11 +54,10 @@ enum-iterator = "0.3"
|
||||||
euclid = "0.20"
|
euclid = "0.20"
|
||||||
fnv = "1.0"
|
fnv = "1.0"
|
||||||
fxhash = "0.2"
|
fxhash = "0.2"
|
||||||
headers = "0.2"
|
headers = "0.3"
|
||||||
html5ever = "0.25"
|
html5ever = "0.25"
|
||||||
http = "0.1"
|
http = "0.2"
|
||||||
hyper = "0.12"
|
hyper_serde = "0.12"
|
||||||
hyper_serde = "0.11"
|
|
||||||
image = "0.23"
|
image = "0.23"
|
||||||
indexmap = { version = "1.0.2", features = ["std"] }
|
indexmap = { version = "1.0.2", features = ["std"] }
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
|
@ -113,7 +112,7 @@ unicode-segmentation = "1.1.0"
|
||||||
url = "2.0"
|
url = "2.0"
|
||||||
utf-8 = "0.7"
|
utf-8 = "0.7"
|
||||||
uuid = { version = "0.8", features = ["v4", "serde"] }
|
uuid = { version = "0.8", features = ["v4", "serde"] }
|
||||||
webdriver = "0.40"
|
webdriver = "0.44"
|
||||||
webgpu = { path = "../webgpu" }
|
webgpu = { path = "../webgpu" }
|
||||||
webrender_api = { git = "https://github.com/servo/webrender" }
|
webrender_api = { git = "https://github.com/servo/webrender" }
|
||||||
webxr-api = { git = "https://github.com/servo/webxr", features = ["ipc"] }
|
webxr-api = { git = "https://github.com/servo/webxr", features = ["ipc"] }
|
||||||
|
|
|
@ -73,8 +73,8 @@ use euclid::Length as EuclidLength;
|
||||||
use html5ever::buffer_queue::BufferQueue;
|
use html5ever::buffer_queue::BufferQueue;
|
||||||
use html5ever::{LocalName, Namespace, Prefix, QualName};
|
use html5ever::{LocalName, Namespace, Prefix, QualName};
|
||||||
use http::header::HeaderMap;
|
use http::header::HeaderMap;
|
||||||
use hyper::Method;
|
use http::Method;
|
||||||
use hyper::StatusCode;
|
use http::StatusCode;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use ipc_channel::ipc::{IpcReceiver, IpcSender};
|
use ipc_channel::ipc::{IpcReceiver, IpcSender};
|
||||||
use js::glue::{CallObjectTracer, CallScriptTracer, CallStringTracer, CallValueTracer};
|
use js::glue::{CallObjectTracer, CallScriptTracer, CallStringTracer, CallValueTracer};
|
||||||
|
|
|
@ -61,7 +61,7 @@ use dom_struct::dom_struct;
|
||||||
use encoding_rs::{Encoding, UTF_8};
|
use encoding_rs::{Encoding, UTF_8};
|
||||||
use headers::{ContentType, HeaderMapExt};
|
use headers::{ContentType, HeaderMapExt};
|
||||||
use html5ever::{LocalName, Prefix};
|
use html5ever::{LocalName, Prefix};
|
||||||
use hyper::Method;
|
use http::Method;
|
||||||
use mime::{self, Mime};
|
use mime::{self, Mime};
|
||||||
use net_traits::http_percent_encode;
|
use net_traits::http_percent_encode;
|
||||||
use net_traits::request::Referrer;
|
use net_traits::request::Referrer;
|
||||||
|
|
|
@ -24,7 +24,7 @@ use crate::script_runtime::JSContext as SafeJSContext;
|
||||||
use crate::script_runtime::StreamConsumer;
|
use crate::script_runtime::StreamConsumer;
|
||||||
use dom_struct::dom_struct;
|
use dom_struct::dom_struct;
|
||||||
use http::header::HeaderMap as HyperHeaders;
|
use http::header::HeaderMap as HyperHeaders;
|
||||||
use hyper::StatusCode;
|
use http::StatusCode;
|
||||||
use hyper_serde::Serde;
|
use hyper_serde::Serde;
|
||||||
use js::jsapi::JSObject;
|
use js::jsapi::JSObject;
|
||||||
use servo_url::ServoUrl;
|
use servo_url::ServoUrl;
|
||||||
|
|
|
@ -45,7 +45,7 @@ use headers::{ContentLength, ContentType, HeaderMapExt};
|
||||||
use html5ever::serialize;
|
use html5ever::serialize;
|
||||||
use html5ever::serialize::SerializeOpts;
|
use html5ever::serialize::SerializeOpts;
|
||||||
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
|
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
|
||||||
use hyper::Method;
|
use http::Method;
|
||||||
use hyper_serde::Serde;
|
use hyper_serde::Serde;
|
||||||
use ipc_channel::ipc;
|
use ipc_channel::ipc;
|
||||||
use ipc_channel::router::ROUTER;
|
use ipc_channel::router::ROUTER;
|
||||||
|
|
|
@ -20,9 +20,9 @@ devtools_traits = { path = "../devtools_traits" }
|
||||||
embedder_traits = { path = "../embedder_traits" }
|
embedder_traits = { path = "../embedder_traits" }
|
||||||
euclid = "0.20"
|
euclid = "0.20"
|
||||||
gfx_traits = { path = "../gfx_traits" }
|
gfx_traits = { path = "../gfx_traits" }
|
||||||
http = "0.1"
|
headers = "0.3"
|
||||||
hyper = "0.12"
|
http = "0.2"
|
||||||
hyper_serde = "0.11"
|
hyper_serde = "0.12"
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
keyboard-types = "0.5"
|
keyboard-types = "0.5"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
|
@ -41,7 +41,7 @@ smallvec = "0.6"
|
||||||
style_traits = { path = "../style_traits", features = ["servo"] }
|
style_traits = { path = "../style_traits", features = ["servo"] }
|
||||||
time = "0.1.41"
|
time = "0.1.41"
|
||||||
uuid = { version = "0.8", features = ["v4"] }
|
uuid = { version = "0.8", features = ["v4"] }
|
||||||
webdriver = "0.40"
|
webdriver = "0.44"
|
||||||
webgpu = { path = "../webgpu" }
|
webgpu = { path = "../webgpu" }
|
||||||
webrender_api = { git = "https://github.com/servo/webrender" }
|
webrender_api = { git = "https://github.com/servo/webrender" }
|
||||||
webxr-api = { git = "https://github.com/servo/webxr", features = ["ipc"] }
|
webxr-api = { git = "https://github.com/servo/webxr", features = ["ipc"] }
|
||||||
|
|
|
@ -34,7 +34,7 @@ use embedder_traits::EventLoopWaker;
|
||||||
use euclid::{default::Point2D, Length, Rect, Scale, Size2D, UnknownUnit, Vector2D};
|
use euclid::{default::Point2D, Length, Rect, Scale, Size2D, UnknownUnit, Vector2D};
|
||||||
use gfx_traits::Epoch;
|
use gfx_traits::Epoch;
|
||||||
use http::HeaderMap;
|
use http::HeaderMap;
|
||||||
use hyper::Method;
|
use http::Method;
|
||||||
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
||||||
use ipc_channel::Error as IpcError;
|
use ipc_channel::Error as IpcError;
|
||||||
use keyboard_types::webdriver::Event as WebDriverInputEvent;
|
use keyboard_types::webdriver::Event as WebDriverInputEvent;
|
||||||
|
|
|
@ -16,7 +16,8 @@ compositing = { path = "../compositing" }
|
||||||
cookie = "0.11"
|
cookie = "0.11"
|
||||||
crossbeam-channel = "0.4"
|
crossbeam-channel = "0.4"
|
||||||
euclid = "0.20"
|
euclid = "0.20"
|
||||||
hyper = "0.12"
|
headers = "0.3"
|
||||||
|
http = "0.2"
|
||||||
image = "0.23"
|
image = "0.23"
|
||||||
ipc-channel = "0.14"
|
ipc-channel = "0.14"
|
||||||
keyboard-types = "0.5"
|
keyboard-types = "0.5"
|
||||||
|
@ -31,4 +32,4 @@ servo_config = { path = "../config" }
|
||||||
servo_url = { path = "../url" }
|
servo_url = { path = "../url" }
|
||||||
style_traits = { path = "../style_traits" }
|
style_traits = { path = "../style_traits" }
|
||||||
uuid = { version = "0.8", features = ["v4"] }
|
uuid = { version = "0.8", features = ["v4"] }
|
||||||
webdriver = "0.40"
|
webdriver = "0.44"
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use webdriver::capabilities::{BrowserCapabilities, Capabilities};
|
use webdriver::capabilities::{BrowserCapabilities, Capabilities};
|
||||||
use webdriver::error::WebDriverResult;
|
use webdriver::error::{WebDriverError, WebDriverResult};
|
||||||
|
|
||||||
pub struct ServoCapabilities {
|
pub struct ServoCapabilities {
|
||||||
pub browser_name: String,
|
pub browser_name: String,
|
||||||
|
@ -71,9 +71,16 @@ impl BrowserCapabilities for ServoCapabilities {
|
||||||
Ok(self.accept_custom)
|
Ok(self.accept_custom)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_custom(&self, _: &str, _: &Value) -> WebDriverResult<()> {
|
fn validate_custom(&mut self, _: &str, _: &Value) -> WebDriverResult<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn web_socket_url(
|
||||||
|
&mut self,
|
||||||
|
_: &serde_json::Map<std::string::String, Value>,
|
||||||
|
) -> Result<bool, WebDriverError> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_platform_name() -> Option<String> {
|
fn get_platform_name() -> Option<String> {
|
||||||
|
|
|
@ -22,7 +22,7 @@ use capabilities::ServoCapabilities;
|
||||||
use compositing::ConstellationMsg;
|
use compositing::ConstellationMsg;
|
||||||
use crossbeam_channel::{after, unbounded, Receiver, Sender};
|
use crossbeam_channel::{after, unbounded, Receiver, Sender};
|
||||||
use euclid::{Rect, Size2D};
|
use euclid::{Rect, Size2D};
|
||||||
use hyper::Method;
|
use http::method::Method;
|
||||||
use image::{DynamicImage, ImageFormat, RgbImage};
|
use image::{DynamicImage, ImageFormat, RgbImage};
|
||||||
use ipc_channel::ipc::{self, IpcSender};
|
use ipc_channel::ipc::{self, IpcSender};
|
||||||
use ipc_channel::router::ROUTER;
|
use ipc_channel::router::ROUTER;
|
||||||
|
@ -70,7 +70,7 @@ use webdriver::httpapi::WebDriverExtensionRoute;
|
||||||
use webdriver::response::{CookieResponse, CookiesResponse};
|
use webdriver::response::{CookieResponse, CookiesResponse};
|
||||||
use webdriver::response::{ElementRectResponse, NewSessionResponse, ValueResponse};
|
use webdriver::response::{ElementRectResponse, NewSessionResponse, ValueResponse};
|
||||||
use webdriver::response::{TimeoutsResponse, WebDriverResponse, WindowRectResponse};
|
use webdriver::response::{TimeoutsResponse, WebDriverResponse, WindowRectResponse};
|
||||||
use webdriver::server::{self, Session, WebDriverHandler};
|
use webdriver::server::{self, Session, SessionTeardownKind, WebDriverHandler};
|
||||||
|
|
||||||
fn extension_routes() -> Vec<(Method, &'static str, ServoExtensionRoute)> {
|
fn extension_routes() -> Vec<(Method, &'static str, ServoExtensionRoute)> {
|
||||||
return vec![
|
return vec![
|
||||||
|
@ -103,6 +103,7 @@ fn cookie_msg_to_cookie(cookie: cookie::Cookie) -> Cookie {
|
||||||
.map(|time| Date(time.to_timespec().sec as u64)),
|
.map(|time| Date(time.to_timespec().sec as u64)),
|
||||||
secure: cookie.secure().unwrap_or(false),
|
secure: cookie.secure().unwrap_or(false),
|
||||||
http_only: cookie.http_only().unwrap_or(false),
|
http_only: cookie.http_only().unwrap_or(false),
|
||||||
|
same_site: cookie.same_site().map(|s| s.to_string()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,7 +113,12 @@ pub fn start_server(port: u16, constellation_chan: Sender<ConstellationMsg>) {
|
||||||
.name("WebDriverHttpServer".to_owned())
|
.name("WebDriverHttpServer".to_owned())
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
let address = SocketAddrV4::new("0.0.0.0".parse().unwrap(), port);
|
let address = SocketAddrV4::new("0.0.0.0".parse().unwrap(), port);
|
||||||
match server::start(SocketAddr::V4(address), handler, extension_routes()) {
|
match server::start(
|
||||||
|
"localhost".to_owned(),
|
||||||
|
SocketAddr::V4(address),
|
||||||
|
handler,
|
||||||
|
extension_routes(),
|
||||||
|
) {
|
||||||
Ok(listening) => info!("WebDriver server listening on {}", listening.socket),
|
Ok(listening) => info!("WebDriver server listening on {}", listening.socket),
|
||||||
Err(_) => panic!("Unable to start WebDriver HTTPD server"),
|
Err(_) => panic!("Unable to start WebDriver HTTPD server"),
|
||||||
}
|
}
|
||||||
|
@ -1780,7 +1786,7 @@ impl WebDriverHandler<ServoExtensionRoute> for Handler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_session(&mut self, _session: &Option<Session>) {
|
fn teardown_session(&mut self, _session: SessionTeardownKind) {
|
||||||
self.session = None;
|
self.session = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,16 +34,15 @@ packages = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"cloudabi",
|
"cloudabi",
|
||||||
"cocoa",
|
"cocoa",
|
||||||
|
"cookie",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
"crossbeam-deque",
|
|
||||||
"crossbeam-epoch",
|
|
||||||
"crossbeam-utils",
|
"crossbeam-utils",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"fixedbitset",
|
"fixedbitset",
|
||||||
"gleam",
|
"gleam",
|
||||||
|
"h2",
|
||||||
"libloading",
|
"libloading",
|
||||||
"lock_api",
|
"lock_api",
|
||||||
"memoffset",
|
|
||||||
"metal",
|
"metal",
|
||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
"num-rational",
|
"num-rational",
|
||||||
|
@ -52,16 +51,22 @@ packages = [
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"ron",
|
"ron",
|
||||||
"wayland-sys",
|
"wayland-sys",
|
||||||
"rustc_version",
|
|
||||||
"semver",
|
|
||||||
"semver-parser",
|
|
||||||
|
|
||||||
# https://github.com/servo/servo/issues/26933
|
# https://github.com/servo/servo/issues/26933
|
||||||
"futures",
|
"futures",
|
||||||
|
"mio",
|
||||||
"tokio-openssl",
|
"tokio-openssl",
|
||||||
"tokio",
|
"tokio",
|
||||||
"http",
|
"tokio-macros",
|
||||||
|
"tokio-util",
|
||||||
|
"http-body",
|
||||||
|
"httpdate",
|
||||||
|
"hyper",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
"pin-project",
|
||||||
|
"pin-project-lite",
|
||||||
|
"pin-project-internal",
|
||||||
|
"socket2",
|
||||||
|
|
||||||
# https://github.com/servo/servo/pull/23288#issuecomment-494687746
|
# https://github.com/servo/servo/pull/23288#issuecomment-494687746
|
||||||
"gl_generator",
|
"gl_generator",
|
||||||
|
|
|
@ -41,16 +41,13 @@
|
||||||
[Content-Length%3A%20]
|
[Content-Length%3A%20]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: 42,42". Expected: 42.]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[Input: "Content-Length: 42\\r\\nContent-Length: 42,42". Expected: 42.]
|
[Input: "Content-Length: 42\\r\\nContent-Length: 42,42". Expected: 42.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: 30,30". Expected: 30.]
|
[Input: "Content-Length: 030, 30". Expected: network error.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: 30\\r\\nContent-Length: 30,30". Expected: 30.]
|
[Input: "Content-Length: 030,30". Expected: network error.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: aaaah\\r\\nContent-Length: aaaah". Expected: 42.]
|
[Input: "Content-Length: aaaah\\r\\nContent-Length: aaaah". Expected: 42.]
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
[credentials-flag.htm]
|
|
||||||
type: testharness
|
|
||||||
[Access-Control-Allow-Credentials: True should be disallowed (async)]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[Access-Control-Allow-Credentials: TRUE should be disallowed (async)]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
|
@ -41,16 +41,13 @@
|
||||||
[Content-Length%3A%20]
|
[Content-Length%3A%20]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: 42,42". Expected: 42.]
|
|
||||||
expected: FAIL
|
|
||||||
|
|
||||||
[Input: "Content-Length: 42\\r\\nContent-Length: 42,42". Expected: 42.]
|
[Input: "Content-Length: 42\\r\\nContent-Length: 42,42". Expected: 42.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: 30,30". Expected: 30.]
|
[Input: "Content-Length: 030, 30". Expected: network error.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: 30\\r\\nContent-Length: 30,30". Expected: 30.]
|
[Input: "Content-Length: 030,30". Expected: network error.]
|
||||||
expected: FAIL
|
expected: FAIL
|
||||||
|
|
||||||
[Input: "Content-Length: aaaah\\r\\nContent-Length: aaaah". Expected: 42.]
|
[Input: "Content-Length: aaaah\\r\\nContent-Length: aaaah". Expected: 42.]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue