mirror of
https://github.com/servo/servo.git
synced 2025-08-09 23:45:35 +01:00
Upgrade to rustc 551a74dddd84cf01440ee84148ebd18bc68bd7c8.
This commit is contained in:
parent
7b87085c18
commit
ef8edd4e87
168 changed files with 2247 additions and 2408 deletions
|
@ -22,7 +22,7 @@ git = "https://github.com/servo/rust-png"
|
|||
[dependencies]
|
||||
url = "0.2.16"
|
||||
time = "0.1.17"
|
||||
openssl="0.5.1"
|
||||
openssl="0.6.1"
|
||||
rustc-serialize = "0.3"
|
||||
cookie="*"
|
||||
regex = "0.1.14"
|
||||
|
|
|
@ -14,7 +14,6 @@ use hyper::http::RawStatus;
|
|||
use hyper::mime::{Mime, TopLevel, SubLevel};
|
||||
use util::resource_files::resources_dir_path;
|
||||
|
||||
use std::borrow::IntoCow;
|
||||
use std::fs::PathExt;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -26,7 +25,7 @@ pub fn factory(mut load_data: LoadData, start_chan: LoadConsumer, classifier: Ar
|
|||
content_type: Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, vec![]))),
|
||||
charset: Some("utf-8".to_string()),
|
||||
headers: None,
|
||||
status: Some(RawStatus(200, "OK".into_cow())),
|
||||
status: Some(RawStatus(200, "OK".into())),
|
||||
});
|
||||
chan.send(Done(Ok(()))).unwrap();
|
||||
return
|
||||
|
|
|
@ -9,12 +9,10 @@ use net_traits::CookieSource;
|
|||
use pub_domains::PUB_DOMAINS;
|
||||
|
||||
use cookie_rs;
|
||||
use time::{Tm, now, at, Timespec};
|
||||
use time::{Tm, now, at, Duration};
|
||||
use url::Url;
|
||||
use std::borrow::ToOwned;
|
||||
use std::i64;
|
||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
||||
use std::time::Duration;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// A stored cookie that wraps the definition in cookie-rs. This is used to implement
|
||||
|
@ -27,7 +25,7 @@ pub struct Cookie {
|
|||
pub persistent: bool,
|
||||
pub creation_time: Tm,
|
||||
pub last_access: Tm,
|
||||
pub expiry_time: Tm,
|
||||
pub expiry_time: Option<Tm>,
|
||||
}
|
||||
|
||||
impl Cookie {
|
||||
|
@ -36,9 +34,11 @@ impl Cookie {
|
|||
-> Option<Cookie> {
|
||||
// Step 3
|
||||
let (persistent, expiry_time) = match (&cookie.max_age, &cookie.expires) {
|
||||
(&Some(max_age), _) => (true, at(now().to_timespec() + Duration::seconds(max_age as i64))),
|
||||
(_, &Some(expires)) => (true, expires),
|
||||
_ => (false, at(Timespec::new(i64::MAX, 0)))
|
||||
(&Some(max_age), _) => {
|
||||
(true, Some(at(now().to_timespec() + Duration::seconds(max_age as i64))))
|
||||
}
|
||||
(_, &Some(expires)) => (true, Some(expires)),
|
||||
_ => (false, None)
|
||||
};
|
||||
|
||||
let url_host = request.host().map(|host| host.serialize()).unwrap_or("".to_owned());
|
||||
|
@ -68,7 +68,7 @@ impl Cookie {
|
|||
|
||||
// Step 7
|
||||
let mut path = cookie.path.unwrap_or("".to_owned());
|
||||
if path.is_empty() || path.char_at(0) != '/' {
|
||||
if path.is_empty() || path.as_bytes()[0] != b'/' {
|
||||
let url_path = request.serialize_path();
|
||||
let url_path = url_path.as_ref().map(|path| &**path);
|
||||
path = Cookie::default_path(url_path.unwrap_or("")).to_owned();
|
||||
|
@ -117,7 +117,7 @@ impl Cookie {
|
|||
pub fn path_match(request_path: &str, cookie_path: &str) -> bool {
|
||||
request_path == cookie_path ||
|
||||
( request_path.starts_with(cookie_path) &&
|
||||
( request_path.ends_with("/") || request_path.char_at(cookie_path.len() - 1) == '/' )
|
||||
( request_path.ends_with("/") || request_path.as_bytes()[cookie_path.len() - 1] == b'/' )
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -127,7 +127,7 @@ impl Cookie {
|
|||
return true;
|
||||
}
|
||||
if string.ends_with(domain_string)
|
||||
&& string.char_at(string.len()-domain_string.len()-1) == '.'
|
||||
&& string.as_bytes()[string.len()-domain_string.len()-1] == b'.'
|
||||
&& Ipv4Addr::from_str(string).is_err()
|
||||
&& Ipv6Addr::from_str(string).is_err() {
|
||||
return true;
|
||||
|
|
|
@ -11,7 +11,8 @@ use rustc_serialize::base64::FromBase64;
|
|||
|
||||
use hyper::mime::Mime;
|
||||
use std::sync::Arc;
|
||||
use url::{percent_decode, SchemeData};
|
||||
use url::percent_encoding::percent_decode;
|
||||
use url::SchemeData;
|
||||
|
||||
pub fn factory(load_data: LoadData, senders: LoadConsumer, _classifier: Arc<MIMEClassifier>) {
|
||||
// NB: we don't spawn a new task.
|
||||
|
@ -39,7 +40,7 @@ pub fn load(load_data: LoadData, start_chan: LoadConsumer) {
|
|||
},
|
||||
None => ()
|
||||
}
|
||||
let parts: Vec<&str> = scheme_data.splitn(1, ',').collect();
|
||||
let parts: Vec<&str> = scheme_data.splitn(2, ',').collect();
|
||||
if parts.len() != 2 {
|
||||
start_sending(start_chan, metadata).send(Done(Err("invalid data uri".to_string()))).unwrap();
|
||||
return;
|
||||
|
|
|
@ -101,7 +101,6 @@ pub trait CORSCache {
|
|||
|
||||
/// A simple, vector-based CORS Cache
|
||||
#[derive(Clone)]
|
||||
#[unstable = "This might later be replaced with a HashMap-like entity, though that requires a separate Origin struct"]
|
||||
pub struct BasicCORSCache(Vec<CORSCacheEntry>);
|
||||
|
||||
impl BasicCORSCache {
|
||||
|
|
|
@ -28,7 +28,6 @@ pub enum TerminationReason {
|
|||
|
||||
/// The response body can still be pushed to after fetch
|
||||
/// This provides a way to store unfinished response bodies
|
||||
#[unstable = "I haven't yet decided exactly how the interface for this will be"]
|
||||
#[derive(Clone)]
|
||||
pub enum ResponseBody {
|
||||
Empty, // XXXManishearth is this necessary, or is Done(vec![]) enough?
|
||||
|
@ -36,14 +35,12 @@ pub enum ResponseBody {
|
|||
Done(Vec<u8>),
|
||||
}
|
||||
|
||||
#[unstable = "I haven't yet decided exactly how the interface for this will be"]
|
||||
pub enum ResponseMsg {
|
||||
Chunk(Vec<u8>),
|
||||
Finished,
|
||||
Errored
|
||||
}
|
||||
|
||||
#[unstable = "I haven't yet decided exactly how the interface for this will be"]
|
||||
pub struct ResponseLoader {
|
||||
response: Response,
|
||||
chan: Receiver<ResponseMsg>
|
||||
|
|
|
@ -8,6 +8,7 @@ use mime_classifier::MIMEClassifier;
|
|||
use resource_task::{start_sending, start_sending_sniffed, ProgressSender};
|
||||
|
||||
use std::borrow::ToOwned;
|
||||
use std::error::Error;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
|
|
|
@ -19,21 +19,21 @@ use hyper::mime::{Mime, TopLevel, SubLevel};
|
|||
use hyper::net::HttpConnector;
|
||||
use hyper::status::{StatusCode, StatusClass};
|
||||
use std::error::Error;
|
||||
use openssl::ssl::{SslContext, SslVerifyMode};
|
||||
use openssl::ssl::{SslContext, SSL_VERIFY_PEER};
|
||||
use std::io::{self, Read, Write};
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::{Sender, channel};
|
||||
use std::thunk::Invoke;
|
||||
use util::task::spawn_named;
|
||||
use util::resource_files::resources_dir_path;
|
||||
use util::opts;
|
||||
use url::{Url, UrlParser};
|
||||
|
||||
use std::borrow::ToOwned;
|
||||
use std::boxed::FnBox;
|
||||
|
||||
pub fn factory(cookies_chan: Sender<ControlMsg>)
|
||||
-> Box<Invoke<(LoadData, LoadConsumer, Arc<MIMEClassifier>)> + Send> {
|
||||
box move |(load_data, senders, classifier)| {
|
||||
-> Box<FnBox(LoadData, LoadConsumer, Arc<MIMEClassifier>) + Send> {
|
||||
box move |load_data, senders, classifier| {
|
||||
spawn_named("http_loader".to_owned(), move || load(load_data, senders, classifier, cookies_chan))
|
||||
}
|
||||
}
|
||||
|
@ -114,20 +114,20 @@ fn load(mut load_data: LoadData, start_chan: LoadConsumer, classifier: Arc<MIMEC
|
|||
info!("requesting {}", url.serialize());
|
||||
|
||||
fn verifier(ssl: &mut SslContext) {
|
||||
ssl.set_verify(SslVerifyMode::SslVerifyPeer, None);
|
||||
ssl.set_verify(SSL_VERIFY_PEER, None);
|
||||
let mut certs = resources_dir_path();
|
||||
certs.push("certs");
|
||||
ssl.set_CA_file(&certs);
|
||||
ssl.set_CA_file(&certs).unwrap();
|
||||
};
|
||||
|
||||
let ssl_err_string = "[UnknownError { library: \"SSL routines\", \
|
||||
let ssl_err_string = "Some(OpenSslErrors([UnknownError { library: \"SSL routines\", \
|
||||
function: \"SSL3_GET_SERVER_CERTIFICATE\", \
|
||||
reason: \"certificate verify failed\" }]";
|
||||
reason: \"certificate verify failed\" }]))";
|
||||
|
||||
let mut connector = if opts::get().nossl {
|
||||
HttpConnector(None)
|
||||
} else {
|
||||
HttpConnector(Some(box verifier as Box<FnMut(&mut SslContext)>))
|
||||
HttpConnector(Some(box verifier as Box<FnMut(&mut SslContext) + Send>))
|
||||
};
|
||||
|
||||
let mut req = match Request::with_connector(load_data.method.clone(), url.clone(), &mut connector) {
|
||||
|
@ -135,7 +135,8 @@ reason: \"certificate verify failed\" }]";
|
|||
Err(HttpError::HttpIoError(ref io_error)) if (
|
||||
io_error.kind() == io::ErrorKind::Other &&
|
||||
io_error.description() == "Error in OpenSSL" &&
|
||||
io_error.detail() == Some(ssl_err_string.to_owned())
|
||||
// FIXME: This incredibly hacky. Make it more robust, and at least test it.
|
||||
format!("{:?}", io_error.cause()) == ssl_err_string
|
||||
) => {
|
||||
let mut image = resources_dir_path();
|
||||
image.push("badcert.html");
|
||||
|
|
|
@ -6,9 +6,6 @@
|
|||
#![feature(box_syntax)]
|
||||
#![feature(collections)]
|
||||
#![feature(core)]
|
||||
#![feature(io)]
|
||||
#![cfg_attr(test, feature(net))]
|
||||
#![feature(path)]
|
||||
#![feature(path_ext)]
|
||||
#![feature(plugin)]
|
||||
#![feature(rustc_private)]
|
||||
|
@ -18,7 +15,7 @@
|
|||
#![plugin(regex_macros)]
|
||||
|
||||
extern crate net_traits;
|
||||
extern crate "cookie" as cookie_rs;
|
||||
extern crate cookie as cookie_rs;
|
||||
extern crate collections;
|
||||
extern crate flate2;
|
||||
extern crate geom;
|
||||
|
@ -27,7 +24,7 @@ extern crate png;
|
|||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate openssl;
|
||||
extern crate "rustc-serialize" as rustc_serialize;
|
||||
extern crate rustc_serialize;
|
||||
extern crate util;
|
||||
extern crate time;
|
||||
extern crate url;
|
||||
|
|
|
@ -22,7 +22,7 @@ use hyper::header::{ContentType, Header, SetCookie, UserAgent};
|
|||
use hyper::mime::{Mime, TopLevel, SubLevel};
|
||||
|
||||
use std::borrow::ToOwned;
|
||||
use std::boxed;
|
||||
use std::boxed::{self, FnBox};
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
|
@ -30,7 +30,6 @@ use std::io::{BufReader, Read};
|
|||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::{channel, Receiver, Sender};
|
||||
use std::thunk::Invoke;
|
||||
|
||||
static mut HOST_TABLE: Option<*mut HashMap<String, String>> = None;
|
||||
|
||||
|
@ -48,7 +47,7 @@ pub fn global_init() {
|
|||
|
||||
let mut lines = String::new();
|
||||
match file.read_to_string(&mut lines) {
|
||||
Ok(()) => (),
|
||||
Ok(_) => (),
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
|
@ -239,8 +238,8 @@ impl ResourceManager {
|
|||
self.user_agent.as_ref().map(|ua| load_data.headers.set(UserAgent(ua.clone())));
|
||||
|
||||
fn from_factory(factory: fn(LoadData, LoadConsumer, Arc<MIMEClassifier>))
|
||||
-> Box<Invoke<(LoadData, LoadConsumer, Arc<MIMEClassifier>)> + Send> {
|
||||
box move |(load_data, senders, classifier)| {
|
||||
-> Box<FnBox(LoadData, LoadConsumer, Arc<MIMEClassifier>) + Send> {
|
||||
box move |load_data, senders, classifier| {
|
||||
factory(load_data, senders, classifier)
|
||||
}
|
||||
}
|
||||
|
@ -259,6 +258,6 @@ impl ResourceManager {
|
|||
};
|
||||
debug!("resource_task: loading url: {}", load_data.url.serialize());
|
||||
|
||||
loader.invoke((load_data, consumer, self.mime_classifier.clone()));
|
||||
loader.call_box((load_data, consumer, self.mime_classifier.clone()));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue