mirror of
https://github.com/servo/servo.git
synced 2025-08-06 14:10:11 +01:00
parent
9ac250c62c
commit
67aa11323b
3 changed files with 200 additions and 9 deletions
|
@ -184,7 +184,7 @@ reason: \"certificate verify failed\" }]))";
|
|||
}
|
||||
}
|
||||
|
||||
enum LoadError {
|
||||
pub enum LoadError {
|
||||
UnsupportedScheme(Url),
|
||||
Connection(Url, String),
|
||||
Cors(Url, String),
|
||||
|
@ -220,13 +220,8 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
// the source rather than rendering the contents of the URL.
|
||||
let viewing_source = url.scheme == "view-source";
|
||||
if viewing_source {
|
||||
let inner_url = replace_hosts(&inner_url(&load_data.url));
|
||||
doc_url = inner_url.clone();
|
||||
if &*inner_url.scheme != "http" && &*inner_url.scheme != "https" {
|
||||
return Err(LoadError::UnsupportedScheme(inner_url));
|
||||
} else {
|
||||
url = inner_url;
|
||||
}
|
||||
url = inner_url(&load_data.url);
|
||||
doc_url = url.clone();
|
||||
}
|
||||
|
||||
// Loop to handle redirects.
|
||||
|
@ -270,6 +265,8 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
|
||||
req.headers_mut().set(host);
|
||||
|
||||
|
||||
// --- Set default accept header
|
||||
if !req.headers().has::<Accept>() {
|
||||
let accept = Accept(vec![
|
||||
qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])),
|
||||
|
@ -280,6 +277,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
req.headers_mut().set(accept);
|
||||
}
|
||||
|
||||
// --- Fetch cookies
|
||||
let (tx, rx) = ipc::channel().unwrap();
|
||||
resource_mgr_chan.send(ControlMsg::GetCookiesForUrl(doc_url.clone(),
|
||||
tx,
|
||||
|
@ -290,9 +288,11 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
req.headers_mut().set_raw("Cookie".to_owned(), v);
|
||||
}
|
||||
|
||||
// --- Set default accept encoding
|
||||
if !req.headers().has::<AcceptEncoding>() {
|
||||
req.headers_mut().set_raw("Accept-Encoding".to_owned(), vec![b"gzip, deflate".to_vec()]);
|
||||
}
|
||||
|
||||
if log_enabled!(log::LogLevel::Info) {
|
||||
info!("{}", load_data.method);
|
||||
for header in req.headers().iter() {
|
||||
|
@ -301,6 +301,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
info!("{:?}", load_data.data);
|
||||
}
|
||||
|
||||
// --- Start sending the request
|
||||
// Avoid automatically sending request body if a redirect has occurred.
|
||||
let writer = match load_data.data {
|
||||
Some(ref data) if iters == 1 => {
|
||||
|
@ -335,6 +336,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
}
|
||||
};
|
||||
|
||||
// --- Tell devtools we've made a request
|
||||
// Send an HttpRequest message to devtools with a unique request_id
|
||||
// TODO: Do this only if load_data has some pipeline_id, and send the pipeline_id in the message
|
||||
let request_id = uuid::Uuid::new_v4().to_simple_string();
|
||||
|
@ -348,6 +350,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
net_event))).unwrap();
|
||||
}
|
||||
|
||||
// --- Finish writing the request and read the response
|
||||
let response = match writer.send() {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
|
@ -363,6 +366,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
}
|
||||
}
|
||||
|
||||
// --- Update the resource manager that we've gotten a cookie
|
||||
if let Some(cookies) = response.headers.get_raw("set-cookie") {
|
||||
for cookie in cookies {
|
||||
if let Ok(cookies) = String::from_utf8(cookie.clone()) {
|
||||
|
@ -395,7 +399,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
// --- Loop if there's a redirect
|
||||
if response.status.class() == StatusClass::Redirection {
|
||||
match response.headers.get::<Location>() {
|
||||
Some(&Location(ref new_url)) => {
|
||||
|
@ -441,10 +445,12 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
}
|
||||
|
||||
let mut adjusted_headers = response.headers.clone();
|
||||
|
||||
if viewing_source {
|
||||
adjusted_headers.set(ContentType(Mime(TopLevel::Text, SubLevel::Plain, vec![])));
|
||||
}
|
||||
let mut metadata: Metadata = Metadata::default(doc_url.clone());
|
||||
|
||||
metadata.set_content_type(match adjusted_headers.get() {
|
||||
Some(&ContentType(ref mime)) => Some(mime),
|
||||
None => None
|
||||
|
@ -453,6 +459,8 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
metadata.status = Some(response.status_raw().clone());
|
||||
|
||||
let mut encoding_str: Option<String> = None;
|
||||
|
||||
//TODO: This is now in hyper, just need to implement
|
||||
//FIXME: Implement Content-Encoding Header https://github.com/hyperium/hyper/issues/391
|
||||
if let Some(encodings) = response.headers.get_raw("content-encoding") {
|
||||
for encoding in encodings {
|
||||
|
@ -465,6 +473,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
}
|
||||
}
|
||||
|
||||
// --- Tell devtools that we got a response
|
||||
// Send an HttpResponse message to devtools with the corresponding request_id
|
||||
// TODO: Send this message only if load_data has a pipeline_id that is not None
|
||||
if let Some(ref chan) = devtools_chan {
|
||||
|
@ -477,6 +486,7 @@ fn load<C, S>(mut load_data: LoadData,
|
|||
net_event_response))).unwrap();
|
||||
}
|
||||
|
||||
// --- Stream the results depending on the encoding type.
|
||||
match encoding_str {
|
||||
Some(encoding) => {
|
||||
if encoding == "gzip" {
|
||||
|
|
179
tests/unit/net/http_loader.rs
Normal file
179
tests/unit/net/http_loader.rs
Normal file
|
@ -0,0 +1,179 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use net::http_loader::{load, LoadError};
|
||||
use hyper::net::{NetworkStream, NetworkConnector};
|
||||
use hyper;
|
||||
use url::Url;
|
||||
use std::io::{self, Read, Write, Cursor};
|
||||
use std::fmt;
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use ipc_channel::ipc;
|
||||
use net_traits::LoadData;
|
||||
use net::hsts::HSTSList;
|
||||
use net::hsts::HSTSEntry;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MockStream {
|
||||
pub read: Cursor<Vec<u8>>,
|
||||
pub write: Vec<u8>,
|
||||
#[cfg(feature = "timeouts")]
|
||||
pub read_timeout: Cell<Option<Duration>>,
|
||||
#[cfg(feature = "timeouts")]
|
||||
pub write_timeout: Cell<Option<Duration>>
|
||||
}
|
||||
|
||||
impl fmt::Debug for MockStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "MockStream {{ read: {:?}, write: {:?} }}", self.read.get_ref(), self.write)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for MockStream {
|
||||
fn eq(&self, other: &MockStream) -> bool {
|
||||
self.read.get_ref() == other.read.get_ref() && self.write == other.write
|
||||
}
|
||||
}
|
||||
|
||||
impl MockStream {
|
||||
pub fn new() -> MockStream {
|
||||
MockStream::with_input(b"")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "timeouts"))]
|
||||
pub fn with_input(input: &[u8]) -> MockStream {
|
||||
MockStream {
|
||||
read: Cursor::new(input.to_vec()),
|
||||
write: vec![]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "timeouts")]
|
||||
pub fn with_input(input: &[u8]) -> MockStream {
|
||||
MockStream {
|
||||
read: Cursor::new(input.to_vec()),
|
||||
write: vec![],
|
||||
read_timeout: Cell::new(None),
|
||||
write_timeout: Cell::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Read for MockStream {
|
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
self.read.read(buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl Write for MockStream {
|
||||
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
|
||||
Write::write(&mut self.write, msg)
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl NetworkStream for MockStream {
|
||||
fn peer_addr(&mut self) -> io::Result<SocketAddr> {
|
||||
Ok("127.0.0.1:1337".parse().unwrap())
|
||||
}
|
||||
|
||||
#[cfg(feature = "timeouts")]
|
||||
fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
|
||||
self.read_timeout.set(dur);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "timeouts")]
|
||||
fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
|
||||
self.write_timeout.set(dur);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around a `MockStream` that allows one to clone it and keep an independent copy to the
|
||||
/// same underlying stream.
|
||||
#[derive(Clone)]
|
||||
pub struct CloneableMockStream {
|
||||
pub inner: Arc<Mutex<MockStream>>,
|
||||
}
|
||||
|
||||
impl Write for CloneableMockStream {
|
||||
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
|
||||
self.inner.lock().unwrap().write(msg)
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> io::Result<()> {
|
||||
self.inner.lock().unwrap().flush()
|
||||
}
|
||||
}
|
||||
|
||||
impl Read for CloneableMockStream {
|
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
self.inner.lock().unwrap().read(buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl NetworkStream for CloneableMockStream {
|
||||
fn peer_addr(&mut self) -> io::Result<SocketAddr> {
|
||||
self.inner.lock().unwrap().peer_addr()
|
||||
}
|
||||
|
||||
#[cfg(feature = "timeouts")]
|
||||
fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
|
||||
self.inner.lock().unwrap().set_read_timeout(dur)
|
||||
}
|
||||
|
||||
#[cfg(feature = "timeouts")]
|
||||
fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
|
||||
self.inner.lock().unwrap().set_write_timeout(dur)
|
||||
}
|
||||
}
|
||||
|
||||
impl CloneableMockStream {
|
||||
pub fn with_stream(stream: MockStream) -> CloneableMockStream {
|
||||
CloneableMockStream {
|
||||
inner: Arc::new(Mutex::new(stream)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MockConnector;
|
||||
|
||||
impl NetworkConnector for MockConnector {
|
||||
type Stream = MockStream;
|
||||
|
||||
fn connect(&self, _host: &str, _port: u16, _scheme: &str) -> hyper::Result<MockStream> {
|
||||
Ok(MockStream::new())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_errors_when_scheme_is_not_http_or_https() {
|
||||
let url = Url::parse("ftp://not-supported").unwrap();
|
||||
let (cookies_chan, _) = ipc::channel().unwrap();
|
||||
let load_data = LoadData::new(url, None);
|
||||
let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() }));
|
||||
|
||||
match load(load_data, cookies_chan, None, hsts_list, &MockConnector) {
|
||||
Err(LoadError::UnsupportedScheme(_)) => {}
|
||||
_ => panic!("expected ftp scheme to be unsupported")
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_errors_when_viewing_source_and_inner_url_scheme_is_not_http_or_https() {
|
||||
let url = Url::parse("view-source:ftp://not-supported").unwrap();
|
||||
let (cookies_chan, _) = ipc::channel().unwrap();
|
||||
let load_data = LoadData::new(url, None);
|
||||
let hsts_list = Arc::new(Mutex::new(HSTSList { entries: Vec::new() }));
|
||||
|
||||
match load(load_data, cookies_chan, None, hsts_list, &MockConnector) {
|
||||
Err(LoadError::UnsupportedScheme(_)) => {}
|
||||
_ => panic!("expected ftp scheme to be unsupported")
|
||||
}
|
||||
}
|
|
@ -10,9 +10,11 @@ extern crate net_traits;
|
|||
extern crate url;
|
||||
extern crate util;
|
||||
extern crate time;
|
||||
extern crate hyper;
|
||||
|
||||
#[cfg(test)] mod cookie;
|
||||
#[cfg(test)] mod data_loader;
|
||||
#[cfg(test)] mod mime_classifier;
|
||||
#[cfg(test)] mod resource_task;
|
||||
#[cfg(test)] mod hsts;
|
||||
#[cfg(test)] mod http_loader;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue