mirror of
https://github.com/servo/servo.git
synced 2025-08-05 21:50:18 +01:00
auto merge of #1037 : kmcallister/servo/data-url, r=jdm
This commit is contained in:
commit
3690c375cd
16 changed files with 996 additions and 693 deletions
32
Makefile.in
32
Makefile.in
|
@ -245,35 +245,27 @@ CRATE_servo = $(S)src/components/main/servo.rc
|
|||
|
||||
DEPS_servo = $(CRATE_servo) $(SRC_servo) $(DONE_SUBMODULES) $(DONE_util) $(DONE_gfx) $(DONE_script) $(DONE_net) $(DONE_msg)
|
||||
|
||||
# rules that depend on having correct meta-target vars (DEPS_CLEAN, DEPS_servo, etc)
|
||||
include $(S)mk/check.mk
|
||||
include $(S)mk/clean.mk
|
||||
|
||||
.DEFAULT_GOAL := all
|
||||
.PHONY: all
|
||||
all: $(B)/src/compiler/rust/rust-auto-clean-stamp servo package
|
||||
|
||||
# Servo helper libraries
|
||||
|
||||
$(DONE_util): $(DEPS_util)
|
||||
@$(call E, compile: $@)
|
||||
$(Q)$(RUSTC) $(RFLAGS_util) --out-dir $(B)src/components/util $< && touch $@
|
||||
SERVO_LIB_CRATES = util net msg gfx script
|
||||
|
||||
$(DONE_net): $(DEPS_net)
|
||||
@$(call E, compile: $@)
|
||||
$(Q)$(RUSTC) $(RFLAGS_net) --out-dir $(B)src/components/net $< && touch $@
|
||||
define DEF_LIB_CRATE_RULES
|
||||
$$(DONE_$(1)): $$(DEPS_$(1))
|
||||
@$$(call E, compile: $$@)
|
||||
$$(Q)$$(RUSTC) $$(RFLAGS_$(1)) --out-dir $$(B)src/components/$(1) $$< && touch $$@
|
||||
endef
|
||||
|
||||
$(DONE_msg): $(DEPS_msg)
|
||||
@$(call E, compile: $@)
|
||||
$(Q)$(RUSTC) $(RFLAGS_msg) --out-dir $(B)src/components/msg $< && touch $@
|
||||
$(foreach lib_crate,$(SERVO_LIB_CRATES),\
|
||||
$(eval $(call DEF_LIB_CRATE_RULES,$(lib_crate))))
|
||||
|
||||
$(DONE_gfx): $(DEPS_gfx)
|
||||
@$(call E, compile: $@)
|
||||
$(Q)$(RUSTC) $(RFLAGS_gfx) --out-dir $(B)src/components/gfx $< && touch $@
|
||||
|
||||
$(DONE_script): $(DEPS_script)
|
||||
@$(call E, compile: $@)
|
||||
$(Q)$(RUSTC) $(RFLAGS_script) --out-dir $(B)src/components/script $< && touch $@
|
||||
# rules that depend on having correct meta-target vars (DEPS_CLEAN, DEPS_servo, etc)
|
||||
# and SERVO_LIB_CRATES
|
||||
include $(S)mk/check.mk
|
||||
include $(S)mk/clean.mk
|
||||
|
||||
BINDINGS_SRC = $(S)/src/components/script/dom/bindings/codegen
|
||||
|
||||
|
|
17
mk/check.mk
17
mk/check.mk
|
@ -17,6 +17,21 @@ $(foreach submodule,$(SUBMODULES),\
|
|||
$(eval $(call DEF_SUBMODULE_TEST_RULES,$(submodule))))
|
||||
|
||||
|
||||
define DEF_LIB_CRATE_TEST_RULES
|
||||
servo-test-$(1): $$(DEPS_$(1))
|
||||
@$$(call E, compile: servo-test-$(1))
|
||||
$$(Q)$$(RUSTC) $$(RFLAGS_$(1)) --test -o $$@ $$<
|
||||
|
||||
.PHONY: check-servo-$(1)
|
||||
check-servo-$(1): servo-test-$(1)
|
||||
@$$(call E, check: $(1))
|
||||
$$(Q)./servo-test-$(1)
|
||||
endef
|
||||
|
||||
$(foreach lib_crate,$(SERVO_LIB_CRATES),\
|
||||
$(eval $(call DEF_LIB_CRATE_TEST_RULES,$(lib_crate))))
|
||||
|
||||
|
||||
# Testing targets
|
||||
|
||||
servo-test: $(DEPS_servo)
|
||||
|
@ -50,7 +65,7 @@ check-all: $(DEPS_CHECK_TARGETS_ALL) check-servo check-content tidy
|
|||
@$(call E, check: all)
|
||||
|
||||
.PHONY: check-servo
|
||||
check-servo: servo-test
|
||||
check-servo: $(foreach lib_crate,$(SERVO_LIB_CRATES),check-servo-$(lib_crate)) servo-test
|
||||
@$(call E, check: servo)
|
||||
$(Q)./servo-test
|
||||
|
||||
|
|
|
@ -47,5 +47,5 @@ clean-script:
|
|||
|
||||
clean-servo: clean-gfx clean-util clean-net clean-script clean-msg
|
||||
@$(call E, "cleaning servo")
|
||||
$(Q)rm -f servo servo-test libservo*.so
|
||||
$(Q)rm -f servo servo-test $(foreach lib_crate,$(SERVO_LIB_CRATES),servo-test-$(lib_crate)) libservo*.so
|
||||
$(Q)cd $(BINDINGS_SRC) && rm -f *.pkl
|
||||
|
|
|
@ -123,7 +123,7 @@ pub fn true_type_tag(a: char, b: char, c: char, d: char) -> u32 {
|
|||
|
||||
#[test]
|
||||
fn test_true_type_tag() {
|
||||
assert!(true_type_tag('c', 'm', 'a', 'p') == 0x_63_6D_61_70_u32);
|
||||
assert_eq!(true_type_tag('c', 'm', 'a', 'p'), 0x_63_6D_61_70_u32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -139,8 +139,8 @@ fn test_transform_compress_none() {
|
|||
let mode = CompressNone;
|
||||
|
||||
for i in range(0, test_strs.len()) {
|
||||
(trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert!(trimmed_str == test_strs[i])
|
||||
let (trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert_eq!(&trimmed_str, &test_strs[i])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -163,15 +163,16 @@ fn test_transform_discard_newline() {
|
|||
~"foo bar baz",
|
||||
~"foobarbaz"];
|
||||
|
||||
assert!(test_strs.len() == oracle_strs.len());
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = DiscardNewline;
|
||||
|
||||
for i in range(0, test_strs.len()) {
|
||||
(trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert!(trimmed_str == oracle_strs[i])
|
||||
let (trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert_eq!(&trimmed_str, &oracle_strs[i])
|
||||
}
|
||||
}
|
||||
|
||||
/* FIXME: Fix and re-enable
|
||||
#[test]
|
||||
fn test_transform_compress_whitespace() {
|
||||
let test_strs : ~[~str] = ~[~" foo bar",
|
||||
|
@ -190,12 +191,12 @@ fn test_transform_compress_whitespace() {
|
|||
~"foo bar baz",
|
||||
~"foobarbaz\n\n"];
|
||||
|
||||
assert!(test_strs.len() == oracle_strs.len());
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = CompressWhitespace;
|
||||
|
||||
for i in range(0, test_strs.len()) {
|
||||
(trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert!(trimmed_str == oracle_strs[i])
|
||||
let (trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert_eq!(&trimmed_str, &oracle_strs[i])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -217,17 +218,18 @@ fn test_transform_compress_whitespace_newline() {
|
|||
~"foo bar baz",
|
||||
~"foobarbaz "];
|
||||
|
||||
assert!(test_strs.len() == oracle_strs.len());
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = CompressWhitespaceNewline;
|
||||
|
||||
for i in range(0, test_strs.len()) {
|
||||
(trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert!(trimmed_str == oracle_strs[i])
|
||||
let (trimmed_str, _out) = transform_text(test_strs[i], mode, true);
|
||||
assert_eq!(&trimmed_str, &oracle_strs[i])
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn test_transform_compress_whitespace_newline() {
|
||||
fn test_transform_compress_whitespace_newline_no_incoming() {
|
||||
let test_strs : ~[~str] = ~[~" foo bar",
|
||||
~"\nfoo bar",
|
||||
~"foo bar ",
|
||||
|
@ -246,11 +248,11 @@ fn test_transform_compress_whitespace_newline() {
|
|||
~"foo bar baz",
|
||||
~"foobarbaz "];
|
||||
|
||||
assert!(test_strs.len() == oracle_strs.len());
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = CompressWhitespaceNewline;
|
||||
|
||||
for i in range(0, test_strs.len()) {
|
||||
(trimmed_str, _out) = transform_text(test_strs[i], mode, false);
|
||||
assert!(trimmed_str == oracle_strs[i])
|
||||
let (trimmed_str, _out) = transform_text(test_strs[i], mode, false);
|
||||
assert_eq!(&trimmed_str, &oracle_strs[i])
|
||||
}
|
||||
}
|
||||
|
|
134
src/components/net/data_loader.rs
Normal file
134
src/components/net/data_loader.rs
Normal file
|
@ -0,0 +1,134 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use resource_task::{Done, Payload, Metadata, LoadResponse, LoaderTask, start_sending};
|
||||
|
||||
use extra::url::Url;
|
||||
use extra::base64::FromBase64;
|
||||
|
||||
use http::headers::test_utils::from_stream_with_str;
|
||||
use http::headers::content_type::MediaType;
|
||||
|
||||
pub fn factory() -> LoaderTask {
|
||||
|url, start_chan| {
|
||||
// NB: we don't spawn a new task.
|
||||
// Hypothesis: data URLs are too small for parallel base64 etc. to be worth it.
|
||||
// Should be tested at some point.
|
||||
load(url, start_chan)
|
||||
}
|
||||
}
|
||||
|
||||
fn load(url: Url, start_chan: Chan<LoadResponse>) {
|
||||
assert!("data" == url.scheme);
|
||||
|
||||
let mut metadata = Metadata::default(url.clone());
|
||||
|
||||
// Split out content type and data.
|
||||
let parts: ~[&str] = url.path.splitn_iter(',', 1).to_owned_vec();
|
||||
if parts.len() != 2 {
|
||||
start_sending(start_chan, metadata).send(Done(Err(())));
|
||||
return;
|
||||
}
|
||||
|
||||
// ";base64" must come at the end of the content type, per RFC 2397.
|
||||
// rust-http will fail to parse it because there's no =value part.
|
||||
let mut is_base64 = false;
|
||||
let mut ct_str = parts[0];
|
||||
if ct_str.ends_with(";base64") {
|
||||
is_base64 = true;
|
||||
ct_str = ct_str.slice_to(ct_str.as_bytes().len() - 7);
|
||||
}
|
||||
|
||||
// Parse the content type using rust-http.
|
||||
// FIXME: this can go into an infinite loop! (rust-http #25)
|
||||
let content_type: Option<MediaType> = from_stream_with_str(ct_str);
|
||||
metadata.set_content_type(&content_type);
|
||||
|
||||
let progress_chan = start_sending(start_chan, metadata);
|
||||
|
||||
if is_base64 {
|
||||
match parts[1].from_base64() {
|
||||
Err(*) => {
|
||||
progress_chan.send(Done(Err(())));
|
||||
}
|
||||
Ok(data) => {
|
||||
progress_chan.send(Payload(data));
|
||||
progress_chan.send(Done(Ok(())));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// FIXME: Since the %-decoded URL is already a str, we can't
|
||||
// handle UTF8-incompatible encodings.
|
||||
progress_chan.send(Payload(parts[1].as_bytes().into_owned()));
|
||||
progress_chan.send(Done(Ok(())));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn assert_parse(url: &'static str,
|
||||
content_type: Option<(~str, ~str)>,
|
||||
charset: Option<~str>,
|
||||
data: Option<~[u8]>) {
|
||||
use std::from_str::FromStr;
|
||||
use std::comm;
|
||||
|
||||
let (start_port, start_chan) = comm::stream();
|
||||
load(FromStr::from_str(url).unwrap(), start_chan);
|
||||
|
||||
let response = start_port.recv();
|
||||
assert_eq!(&response.metadata.content_type, &content_type);
|
||||
assert_eq!(&response.metadata.charset, &charset);
|
||||
|
||||
let progress = response.progress_port.recv();
|
||||
|
||||
match data {
|
||||
None => {
|
||||
assert_eq!(progress, Done(Err(())));
|
||||
}
|
||||
Some(dat) => {
|
||||
assert_eq!(progress, Payload(dat));
|
||||
assert_eq!(response.progress_port.recv(), Done(Ok(())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_invalid() {
|
||||
assert_parse("data:", None, None, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plain() {
|
||||
assert_parse("data:,hello%20world", None, None, Some(bytes!("hello world").into_owned()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plain_ct() {
|
||||
assert_parse("data:text/plain,hello",
|
||||
Some((~"text", ~"plain")), None, Some(bytes!("hello").into_owned()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plain_charset() {
|
||||
assert_parse("data:text/plain;charset=latin1,hello",
|
||||
Some((~"text", ~"plain")), Some(~"latin1"), Some(bytes!("hello").into_owned()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn base64() {
|
||||
assert_parse("data:;base64,C62+7w==", None, None, Some(~[0x0B, 0xAD, 0xBE, 0xEF]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn base64_ct() {
|
||||
assert_parse("data:application/octet-stream;base64,C62+7w==",
|
||||
Some((~"application", ~"octet-stream")), None, Some(~[0x0B, 0xAD, 0xBE, 0xEF]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn base64_charset() {
|
||||
assert_parse("data:text/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==",
|
||||
Some((~"text", ~"plain")), Some(~"koi8-r"),
|
||||
Some(~[0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4]));
|
||||
}
|
|
@ -2,7 +2,7 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use resource_task::{Done, LoaderTask, Payload};
|
||||
use resource_task::{Metadata, Payload, Done, LoaderTask, start_sending};
|
||||
|
||||
use std::io::{ReaderUtil, file_reader};
|
||||
use std::task;
|
||||
|
@ -10,23 +10,23 @@ use std::task;
|
|||
static READ_SIZE: uint = 1024;
|
||||
|
||||
pub fn factory() -> LoaderTask {
|
||||
let f: LoaderTask = |url, progress_chan| {
|
||||
assert!("file" == url.scheme);
|
||||
do task::spawn {
|
||||
// FIXME: Resolve bug prevents us from moving the path out of the URL.
|
||||
match file_reader(&Path(url.path)) {
|
||||
Ok(reader) => {
|
||||
while !reader.eof() {
|
||||
let data = reader.read_bytes(READ_SIZE);
|
||||
progress_chan.send(Payload(data));
|
||||
}
|
||||
progress_chan.send(Done(Ok(())));
|
||||
}
|
||||
Err(*) => {
|
||||
progress_chan.send(Done(Err(())));
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
f
|
||||
let f: LoaderTask = |url, start_chan| {
|
||||
assert!("file" == url.scheme);
|
||||
let progress_chan = start_sending(start_chan, Metadata::default(url.clone()));
|
||||
do task::spawn {
|
||||
match file_reader(&Path(url.path)) {
|
||||
Ok(reader) => {
|
||||
while !reader.eof() {
|
||||
let data = reader.read_bytes(READ_SIZE);
|
||||
progress_chan.send(Payload(data));
|
||||
}
|
||||
progress_chan.send(Done(Ok(())));
|
||||
}
|
||||
Err(*) => {
|
||||
progress_chan.send(Done(Err(())));
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
f
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use resource_task::{ProgressMsg, Payload, Done, UrlChange, LoaderTask};
|
||||
use resource_task::{Metadata, Payload, Done, LoadResponse, LoaderTask, start_sending};
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::vec;
|
||||
|
@ -13,15 +13,15 @@ use http::headers::HeaderEnum;
|
|||
use std::rt::io::Reader;
|
||||
|
||||
pub fn factory() -> LoaderTask {
|
||||
let f: LoaderTask = |url, progress_chan| {
|
||||
let f: LoaderTask = |url, start_chan| {
|
||||
let url = Cell::new(url);
|
||||
let progress_chan = Cell::new(progress_chan);
|
||||
spawn(|| load(url.take(), progress_chan.take()))
|
||||
};
|
||||
f
|
||||
let start_chan = Cell::new(start_chan);
|
||||
spawn(|| load(url.take(), start_chan.take()))
|
||||
};
|
||||
f
|
||||
}
|
||||
|
||||
fn load(url: Url, progress_chan: Chan<ProgressMsg>) {
|
||||
fn load(url: Url, start_chan: Chan<LoadResponse>) {
|
||||
assert!("http" == url.scheme);
|
||||
|
||||
info!("requesting %s", url.to_str());
|
||||
|
@ -30,34 +30,33 @@ fn load(url: Url, progress_chan: Chan<ProgressMsg>) {
|
|||
let mut response = match request.read_response() {
|
||||
Ok(r) => r,
|
||||
Err(_) => {
|
||||
progress_chan.send(Done(Err(())));
|
||||
start_sending(start_chan, Metadata::default(url)).send(Done(Err(())));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
info!("got HTTP response %s, headers:", response.status.to_str())
|
||||
|
||||
let is_redirect = 3 == (response.status.code() / 100);
|
||||
let mut redirect: Option<Url> = None;
|
||||
for header in response.headers.iter() {
|
||||
let name = header.header_name();
|
||||
let value = header.header_value();
|
||||
info!(" - %s: %s", name, value);
|
||||
if is_redirect && ("Location" == name) {
|
||||
redirect = Some(FromStr::from_str(value).expect("Failed to parse redirect URL"));
|
||||
}
|
||||
}
|
||||
// Dump headers, but only do the iteration if info!() is enabled.
|
||||
info!("got HTTP response %s, headers:", response.status.to_str());
|
||||
info!("%?",
|
||||
for header in response.headers.iter() {
|
||||
info!(" - %s: %s", header.header_name(), header.header_value());
|
||||
});
|
||||
|
||||
// FIXME: detect redirect loops
|
||||
match redirect {
|
||||
Some(url) => {
|
||||
info!("redirecting to %s", url.to_str());
|
||||
progress_chan.send(UrlChange(url.clone()));
|
||||
return load(url, progress_chan);
|
||||
if 3 == (response.status.code() / 100) {
|
||||
match response.headers.location {
|
||||
Some(url) => {
|
||||
info!("redirecting to %s", url.to_str());
|
||||
return load(url, start_chan);
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
|
||||
let mut metadata = Metadata::default(url);
|
||||
metadata.set_content_type(&response.headers.content_type);
|
||||
|
||||
let progress_chan = start_sending(start_chan, metadata);
|
||||
loop {
|
||||
let mut buf = vec::with_capacity(1024);
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -26,6 +26,7 @@ pub mod image {
|
|||
|
||||
pub mod file_loader;
|
||||
pub mod http_loader;
|
||||
pub mod data_loader;
|
||||
pub mod image_cache_task;
|
||||
pub mod local_image_cache;
|
||||
pub mod resource_task;
|
||||
|
|
|
@ -6,50 +6,115 @@
|
|||
|
||||
use file_loader;
|
||||
use http_loader;
|
||||
use data_loader;
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::comm::{Chan, Port, SharedChan};
|
||||
use std::comm;
|
||||
use extra::url::Url;
|
||||
use util::spawn_listener;
|
||||
use http::headers::content_type::MediaType;
|
||||
|
||||
#[cfg(test)]
|
||||
use std::from_str::FromStr;
|
||||
|
||||
pub enum ControlMsg {
|
||||
/// Request the data associated with a particular URL
|
||||
Load(Url, Chan<ProgressMsg>),
|
||||
Load(Url, Chan<LoadResponse>),
|
||||
Exit
|
||||
}
|
||||
|
||||
/// Metadata about a loaded resource, such as is obtained from HTTP headers.
|
||||
pub struct Metadata {
|
||||
/// Final URL after redirects.
|
||||
final_url: Url,
|
||||
|
||||
/// MIME type / subtype.
|
||||
content_type: Option<(~str, ~str)>,
|
||||
|
||||
/// Character set.
|
||||
charset: Option<~str>,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
/// Metadata with defaults for everything optional.
|
||||
pub fn default(url: Url) -> Metadata {
|
||||
Metadata {
|
||||
final_url: url,
|
||||
content_type: None,
|
||||
charset: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the parts of a MediaType that we care about.
|
||||
pub fn set_content_type(&mut self, content_type: &Option<MediaType>) {
|
||||
match *content_type {
|
||||
None => (),
|
||||
Some(MediaType { type_: ref type_,
|
||||
subtype: ref subtype,
|
||||
parameters: ref parameters }) => {
|
||||
self.content_type = Some((type_.clone(), subtype.clone()));
|
||||
for &(ref k, ref v) in parameters.iter() {
|
||||
if "charset" == k.as_slice() {
|
||||
self.charset = Some(v.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent in response to `Load`. Contains metadata, and a port
|
||||
/// for receiving the data.
|
||||
///
|
||||
/// Even if loading fails immediately, we send one of these and the
|
||||
/// progress_port will provide the error.
|
||||
pub struct LoadResponse {
|
||||
/// Metadata, such as from HTTP headers.
|
||||
metadata: Metadata,
|
||||
/// Port for reading data.
|
||||
progress_port: Port<ProgressMsg>,
|
||||
}
|
||||
|
||||
/// Messages sent in response to a `Load` message
|
||||
#[deriving(Eq)]
|
||||
pub enum ProgressMsg {
|
||||
/// URL changed due to a redirect. There can be zero or more of these,
|
||||
/// but they are guaranteed to arrive before messages of any other type.
|
||||
UrlChange(Url),
|
||||
/// Binary data - there may be multiple of these
|
||||
Payload(~[u8]),
|
||||
/// Indicates loading is complete, either successfully or not
|
||||
Done(Result<(), ()>)
|
||||
}
|
||||
|
||||
/// For use by loaders in responding to a Load message.
|
||||
pub fn start_sending(start_chan: Chan<LoadResponse>,
|
||||
metadata: Metadata) -> Chan<ProgressMsg> {
|
||||
let (progress_port, progress_chan) = comm::stream();
|
||||
start_chan.send(LoadResponse {
|
||||
metadata: metadata,
|
||||
progress_port: progress_port,
|
||||
});
|
||||
progress_chan
|
||||
}
|
||||
|
||||
/// Handle to a resource task
|
||||
pub type ResourceTask = SharedChan<ControlMsg>;
|
||||
|
||||
pub type LoaderTask = ~fn(url: Url, Chan<LoadResponse>);
|
||||
|
||||
/**
|
||||
Creates a task to load a specific resource
|
||||
|
||||
The ResourceManager delegates loading to a different type of loader task for
|
||||
each URL scheme
|
||||
*/
|
||||
type LoaderTaskFactory = ~fn() -> ~fn(url: Url, Chan<ProgressMsg>);
|
||||
|
||||
pub type LoaderTask = ~fn(url: Url, Chan<ProgressMsg>);
|
||||
type LoaderTaskFactory = extern "Rust" fn() -> LoaderTask;
|
||||
|
||||
/// Create a ResourceTask with the default loaders
|
||||
pub fn ResourceTask() -> ResourceTask {
|
||||
let file_loader_factory: LoaderTaskFactory = file_loader::factory;
|
||||
let http_loader_factory: LoaderTaskFactory = http_loader::factory;
|
||||
let loaders = ~[
|
||||
(~"file", file_loader_factory),
|
||||
(~"http", http_loader_factory)
|
||||
(~"file", file_loader::factory),
|
||||
(~"http", http_loader::factory),
|
||||
(~"data", data_loader::factory),
|
||||
];
|
||||
create_resource_task_with_loaders(loaders)
|
||||
}
|
||||
|
@ -83,8 +148,8 @@ impl ResourceManager {
|
|||
fn start(&self) {
|
||||
loop {
|
||||
match self.from_client.recv() {
|
||||
Load(url, progress_chan) => {
|
||||
self.load(url.clone(), progress_chan)
|
||||
Load(url, start_chan) => {
|
||||
self.load(url.clone(), start_chan)
|
||||
}
|
||||
Exit => {
|
||||
break
|
||||
|
@ -93,16 +158,15 @@ impl ResourceManager {
|
|||
}
|
||||
}
|
||||
|
||||
fn load(&self, url: Url, progress_chan: Chan<ProgressMsg>) {
|
||||
|
||||
fn load(&self, url: Url, start_chan: Chan<LoadResponse>) {
|
||||
match self.get_loader_factory(&url) {
|
||||
Some(loader_factory) => {
|
||||
debug!("resource_task: loading url: %s", url.to_str());
|
||||
loader_factory(url, progress_chan);
|
||||
loader_factory(url, start_chan);
|
||||
}
|
||||
None => {
|
||||
debug!("resource_task: no loader for scheme %s", url.scheme);
|
||||
progress_chan.send(Done(Err(())));
|
||||
start_sending(start_chan, Metadata::default(url)).send(Done(Err(())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -130,27 +194,40 @@ fn test_exit() {
|
|||
#[test]
|
||||
fn test_bad_scheme() {
|
||||
let resource_task = ResourceTask();
|
||||
let progress = Port();
|
||||
resource_task.send(Load(url::from_str(~"bogus://whatever").get(), progress.chan()));
|
||||
match progress.recv() {
|
||||
let (start, start_chan) = comm::stream();
|
||||
resource_task.send(Load(FromStr::from_str("bogus://whatever").unwrap(), start_chan));
|
||||
let response = start.recv();
|
||||
match response.progress_port.recv() {
|
||||
Done(result) => { assert!(result.is_err()) }
|
||||
_ => fail
|
||||
_ => fail!("bleh")
|
||||
}
|
||||
resource_task.send(Exit);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_delegate_to_scheme_loader() {
|
||||
let payload = ~[1, 2, 3];
|
||||
let loader_factory = |_url: Url, progress_chan: Chan<ProgressMsg>| {
|
||||
progress_chan.send(Payload(payload.clone()));
|
||||
#[cfg(test)]
|
||||
static snicklefritz_payload: [u8, ..3] = [1, 2, 3];
|
||||
|
||||
#[cfg(test)]
|
||||
fn snicklefritz_loader_factory() -> LoaderTask {
|
||||
let f: LoaderTask = |url: Url, start_chan: Chan<LoadResponse>| {
|
||||
let progress_chan = start_sending(start_chan, Metadata::default(url));
|
||||
progress_chan.send(Payload(snicklefritz_payload.into_owned()));
|
||||
progress_chan.send(Done(Ok(())));
|
||||
};
|
||||
let loader_factories = ~[(~"snicklefritz", loader_factory)];
|
||||
f
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_delegate_to_scheme_loader() {
|
||||
let loader_factories = ~[(~"snicklefritz", snicklefritz_loader_factory)];
|
||||
let resource_task = create_resource_task_with_loaders(loader_factories);
|
||||
let progress = Port();
|
||||
resource_task.send(Load(url::from_str(~"snicklefritz://heya").get(), progress.chan()));
|
||||
assert!(progress.recv() == Payload(payload));
|
||||
let (start, start_chan) = comm::stream();
|
||||
resource_task.send(Load(FromStr::from_str("snicklefritz://heya").unwrap(), start_chan));
|
||||
|
||||
let response = start.recv();
|
||||
let progress = response.progress_port;
|
||||
|
||||
assert!(progress.recv() == Payload(snicklefritz_payload.into_owned()));
|
||||
assert!(progress.recv() == Done(Ok(())));
|
||||
resource_task.send(Exit);
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use std::comm::Port;
|
|||
use std::task;
|
||||
use newcss::stylesheet::Stylesheet;
|
||||
use newcss::util::DataStream;
|
||||
use servo_net::resource_task::{ResourceTask, ProgressMsg, Load, Payload, Done, UrlChange};
|
||||
use servo_net::resource_task::{Load, LoadResponse, Payload, Done, ResourceTask};
|
||||
use extra::url::Url;
|
||||
|
||||
/// Where a style sheet comes from.
|
||||
|
@ -55,21 +55,13 @@ fn data_stream(provenance: StylesheetProvenance, resource_task: ResourceTask) ->
|
|||
}
|
||||
}
|
||||
|
||||
fn resource_port_to_data_stream(input_port: Port<ProgressMsg>) -> DataStream {
|
||||
fn resource_port_to_data_stream(input_port: Port<LoadResponse>) -> DataStream {
|
||||
let progress_port = input_port.recv().progress_port;
|
||||
return || {
|
||||
// Can't just 'return' the value since we're inside a lambda
|
||||
let mut result = None;
|
||||
loop {
|
||||
match input_port.recv() {
|
||||
UrlChange(*) => (), // don't care that URL changed
|
||||
Payload(data) => {
|
||||
result = Some(data);
|
||||
break;
|
||||
}
|
||||
Done(*) => break
|
||||
}
|
||||
match progress_port.recv() {
|
||||
Payload(data) => Some(data),
|
||||
Done(*) => None
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ use std::from_str::FromStr;
|
|||
use hubbub::hubbub;
|
||||
use servo_msg::constellation_msg::{ConstellationChan, SubpageId};
|
||||
use servo_net::image_cache_task::ImageCacheTask;
|
||||
use servo_net::resource_task::{ProgressMsg, Done, Load, Payload, UrlChange, ResourceTask};
|
||||
use servo_net::resource_task::{Load, Payload, Done, ResourceTask};
|
||||
use servo_util::tree::TreeNodeRef;
|
||||
use servo_util::url::make_url;
|
||||
use extra::url::Url;
|
||||
|
@ -170,10 +170,10 @@ fn js_script_listener(to_parent: SharedChan<HtmlDiscoveryMessage>,
|
|||
// TODO: change copy to move once we can move into closures
|
||||
resource_task.send(Load(url.clone(), input_chan));
|
||||
|
||||
let progress_port = input_port.recv().progress_port;
|
||||
let mut buf = ~[];
|
||||
loop {
|
||||
match input_port.recv() {
|
||||
UrlChange(*) => (), // don't care that URL changed
|
||||
match progress_port.recv() {
|
||||
Payload(data) => {
|
||||
buf.push_all(data);
|
||||
}
|
||||
|
@ -331,25 +331,15 @@ pub fn parse_html(cx: *JSContext,
|
|||
}
|
||||
let js_chan = SharedChan::new(js_msg_chan);
|
||||
|
||||
// Process any UrlChange messages before we build the parser, because the
|
||||
// tree handler functions need to know the final URL.
|
||||
let mut final_url = url.clone();
|
||||
// Wait for the LoadResponse so that the parser knows the final URL.
|
||||
let (input_port, input_chan) = comm::stream();
|
||||
resource_task.send(Load(url.clone(), input_chan));
|
||||
let mut progress_msg: ProgressMsg;
|
||||
loop {
|
||||
progress_msg = input_port.recv();
|
||||
match progress_msg {
|
||||
UrlChange(url) => {
|
||||
debug!("page URL changed to %s", url.to_str());
|
||||
final_url = url;
|
||||
}
|
||||
_ => break
|
||||
}
|
||||
}
|
||||
let load_response = input_port.recv();
|
||||
|
||||
let url2 = final_url.clone();
|
||||
let url3 = final_url.clone();
|
||||
debug!("Fetched page; metadata is %?", load_response.metadata);
|
||||
|
||||
let url2 = load_response.metadata.final_url.clone();
|
||||
let url3 = url2.clone();
|
||||
|
||||
// Build the root node.
|
||||
let root = @HTMLHtmlElement { htmlelement: HTMLElement::new(HTMLHtmlElementTypeId, ~"html", document) };
|
||||
|
@ -573,11 +563,7 @@ pub fn parse_html(cx: *JSContext,
|
|||
|
||||
debug!("loaded page");
|
||||
loop {
|
||||
// We already have a message from the earlier UrlChange processing.
|
||||
match progress_msg {
|
||||
UrlChange(*) => {
|
||||
fail!("got UrlChange message after others");
|
||||
}
|
||||
match load_response.progress_port.recv() {
|
||||
Payload(data) => {
|
||||
debug!("received data");
|
||||
parser.parse_chunk(data);
|
||||
|
@ -589,7 +575,6 @@ pub fn parse_html(cx: *JSContext,
|
|||
break;
|
||||
}
|
||||
}
|
||||
progress_msg = input_port.recv();
|
||||
}
|
||||
|
||||
css_chan.send(CSSTaskExit);
|
||||
|
@ -598,7 +583,7 @@ pub fn parse_html(cx: *JSContext,
|
|||
HtmlParserResult {
|
||||
root: root,
|
||||
discovery_port: discovery_port,
|
||||
url: final_url,
|
||||
url: load_response.metadata.final_url,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,16 +51,18 @@ impl<K: Clone + Eq, V: Clone> Cache<K,V> for MonoCache<K,V> {
|
|||
|
||||
#[test]
|
||||
fn test_monocache() {
|
||||
let cache = MonoCache::new(10);
|
||||
let mut cache = MonoCache::new(10);
|
||||
let one = @"one";
|
||||
let two = @"two";
|
||||
cache.insert(&1, one);
|
||||
cache.insert(1, one);
|
||||
|
||||
assert!(cache.find(&1).is_some());
|
||||
assert!(cache.find(&2).is_none());
|
||||
/* FIXME: clarify behavior here:
|
||||
cache.find_or_create(&2, |_v| { two });
|
||||
assert!(cache.find(&2).is_some());
|
||||
assert!(cache.find(&1).is_none());
|
||||
*/
|
||||
}
|
||||
|
||||
pub struct HashCache<K, V> {
|
||||
|
@ -98,11 +100,11 @@ impl<K: Clone + Eq + Hash, V: Clone> Cache<K,V> for HashCache<K,V> {
|
|||
|
||||
#[test]
|
||||
fn test_hashcache() {
|
||||
let cache = HashCache::new();
|
||||
let mut cache = HashCache::new();
|
||||
let one = @"one";
|
||||
let two = @"two";
|
||||
|
||||
cache.insert(&1, one);
|
||||
cache.insert(1, one);
|
||||
assert!(cache.find(&1).is_some());
|
||||
assert!(cache.find(&2).is_none());
|
||||
|
||||
|
@ -173,17 +175,17 @@ fn test_lru_cache() {
|
|||
let four = @"four";
|
||||
|
||||
// Test normal insertion.
|
||||
let cache = LRUCache::new(2); // (_, _) (cache is empty)
|
||||
cache.insert(&1, one); // (1, _)
|
||||
cache.insert(&2, two); // (1, 2)
|
||||
cache.insert(&3, three); // (2, 3)
|
||||
let mut cache = LRUCache::new(2); // (_, _) (cache is empty)
|
||||
cache.insert(1, one); // (1, _)
|
||||
cache.insert(2, two); // (1, 2)
|
||||
cache.insert(3, three); // (2, 3)
|
||||
|
||||
assert!(cache.find(&1).is_none()); // (2, 3) (no change)
|
||||
assert!(cache.find(&3).is_some()); // (2, 3)
|
||||
assert!(cache.find(&2).is_some()); // (3, 2)
|
||||
|
||||
// Test that LRU works (this insertion should replace 3, not 2).
|
||||
cache.insert(&4, four); // (2, 4)
|
||||
cache.insert(4, four); // (2, 4)
|
||||
|
||||
assert!(cache.find(&1).is_none()); // (2, 4) (no change)
|
||||
assert!(cache.find(&2).is_some()); // (4, 2)
|
||||
|
@ -191,7 +193,7 @@ fn test_lru_cache() {
|
|||
assert!(cache.find(&4).is_some()); // (2, 4) (no change)
|
||||
|
||||
// Test find_or_create.
|
||||
do cache.find_or_create(&1) |_| { one } // (4, 1)
|
||||
do cache.find_or_create(&1) |_| { one }; // (4, 1)
|
||||
|
||||
assert!(cache.find(&1).is_some()); // (4, 1) (no change)
|
||||
assert!(cache.find(&2).is_none()); // (4, 1) (no change)
|
||||
|
|
|
@ -200,12 +200,9 @@ pub fn time<T>(msg: &str, callback: &fn() -> T) -> T{
|
|||
return val;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
// ensure that the order of the buckets matches the order of the enum categories
|
||||
#[test]
|
||||
fn check_order() {
|
||||
let buckets = ProfilerCategory::empty_buckets();
|
||||
assert!(buckets.len() == NumBuckets as uint);
|
||||
}
|
||||
// ensure that the order of the buckets matches the order of the enum categories
|
||||
#[test]
|
||||
fn check_order() {
|
||||
let buckets = ProfilerCategory::empty_buckets();
|
||||
assert!(buckets.len() == NumBuckets as uint);
|
||||
}
|
||||
|
|
|
@ -62,6 +62,12 @@ pub fn make_url(str_url: ~str, current_url: Option<Url>) -> Url {
|
|||
_ => str_url
|
||||
}
|
||||
},
|
||||
~"data" => {
|
||||
// Drop whitespace within data: URLs, e.g. newlines within a base64
|
||||
// src="..." block. Whitespace intended as content should be
|
||||
// %-encoded or base64'd.
|
||||
str_url.iter().filter(|&c| !c.is_whitespace()).collect()
|
||||
},
|
||||
_ => str_url
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +77,10 @@ pub fn make_url(str_url: ~str, current_url: Option<Url>) -> Url {
|
|||
url::from_str(str_url).unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod make_url_tests {
|
||||
use super::make_url;
|
||||
use std::os;
|
||||
|
||||
#[test]
|
||||
fn should_create_absolute_file_url_if_current_url_is_none_and_str_url_looks_filey() {
|
||||
|
|
87
src/test/html/data-url.html
Normal file
87
src/test/html/data-url.html
Normal file
|
@ -0,0 +1,87 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
</head>
|
||||
<body>
|
||||
<img src="data:image/png;base64,
|
||||
iVBORw0KGgoAAAANSUhEUgAAAM4AAADOCAMAAABBwc32AAAABGdBTUEAALGPC/xhBQAAAAFzUkdC
|
||||
AK7OHOkAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAwBQTFRF
|
||||
////+fn58fHx4ODgGhoaAAAAAwMDpKSk9vb29PT0xcXFFhYWAQEBZmZm+/v7TExMERER6Ojop6en
|
||||
Dw8PBQUFhISELi4uYGBg/v7+r6+vXFxcrq6uAgIC2NjYyMjIaGho7+/vGBgYtLS0dXV19fX15eXl
|
||||
a2trgYGBc3NzJycn+Pj4DAwMGxsb/Pz88PDw6urqvr6+Pj4+39/fEhISkpKS0dHRBAQEg4OD09PT
|
||||
uLi4cnJyBgYGe3t7Pz8/qampq6urfX19jo6OMjIyCQkJhYWFl5eXWFhY2dnZVFRUSEhI6+vrkJCQ
|
||||
CAgI4uLiKCgo0NDQh4eH/f39Wlpaubm5LCwsFxcXHR0dPDw8ZGRkhoaGFBQUrKysTU1NSUlJzs7O
|
||||
dHR0DQ0NZWVlxsbG3Nzct7e3UFBQ6enpJCQkIyMjRUVFXV1d0tLSwcHBDg4On5+fgICAgoKC+vr6
|
||||
7e3tS0tLV1dXNjY2ISEh9/f3i4uLw8PDoKCgCgoKmpqaeHh4ExMTb29vGRkZHBwc1dXVTk5OioqK
|
||||
PT09oaGhLy8vJSUlzMzMYWFhk5OT8/PzQEBAz8/PHh4ed3d3iIiIEBAQRkZGVlZW4+PjpqambGxs
|
||||
3t7ejY2N4eHhU1NTT09Po6OjHx8f5ubmtra2SkpKWVlZ1tbWlJSUICAgQkJCY2NjOjo6bm5uCwsL
|
||||
mZmZIiIiurq6enp6tbW17u7uUVFRycnJREREbW1ty8vLVVVV1NTUJiYm3d3dMzMzMDAwOTk58vLy
|
||||
NDQ0sbGxnZ2dx8fH5OTkmJiYODg4BwcHnJycwsLCpaWlQUFB5+fnkZGRlpaWX19fNzc3jIyMLS0t
|
||||
wMDAaWlpsrKyv7+/29vbvLy8xMTEZ2dnQ0NDKSkplZWVqqqqNTU1m5ubs7OzfHx8f39/ampqu7u7
|
||||
UlJSOzs7sLCw2traj4+PFRUVfn5+dnZ2R0dHra2toqKieXl5qKiocXFxMTExXl5eKysr7Ozsvb29
|
||||
zc3NiYmJW1tbKioqysrKYmJi19fXnp6ecHBw33iQwQAAAAFiS0dEAIgFHUgAAA0ySURBVHja7V1p
|
||||
YBRFFq4EeghkckACDBJkhEQkIYgLRJYsyJ0AATKwAUUuUQ6JQcYFEXDDpUCWAEPCpSDIIUcI10YO
|
||||
QcUrasIpuIoH7AZWEBddUGEXd9ndPmf6qJrp6q6e3h/1/Xo99eq996W7q6teHQGAgoKCgoKCgoKC
|
||||
goKCgoKCgoKCgoKCgoKCwhAiIgNyrdqMo07gMqqu3cFho160MyZWuohjGCa+vnTVICGxod3h4aIR
|
||||
y6CxS5CbMBzuEkuaOhgmqZnd8eHhbp5Bc15238NfMC34qxaJnNzS7gB1IDnFL94rMGjFyfcJMtM6
|
||||
leOZxstt0iXNtvfbHTYC7RKYB34liKntRQodAOiYIcrMgwB0kgp+LSp2znT+xu7A4ejCRpnQlRcf
|
||||
khgkdeveQ5KZnr1615bkPrxeFleYUd+MV6vgEh6jbDcr92VCIbMfW6N/jvxO/X9hgBjowEGgU0ZI
|
||||
OkyuJ3KwKPa3O3QYhkiB5vw2NBkWeUMlKcbu0GHorosEDMPsDt2Pug93ecQtiDGG6QwXDHgeHTFy
|
||||
lL10RrPRjOk8gBOHIcN1Pjb28SfGBeEznq0e1XsCK020l86TfDiJk/LBUwVINpN51afRdKZ4n3kw
|
||||
WhAjTQZkDr+TQg7y+k8VVF3T0CrPpknSdFvpZOt4NZqKus/p0E1y2Upnko4QZ4i6M3XozrKVja7m
|
||||
7HlR9/c6dAvtpTNbR4iNvLxq5BwdunNtYTGvVui/eOH8F8YKN68x9373ExrBF0ctuHehE1lpkWg3
|
||||
siiMbcIf2D9jn8WD0kHxEkRYPVot5V5qj/BuLfMtb17CS6XJXP3xDVbMgtcr6QjAylWrFzqYCWEb
|
||||
B60RfRf0hTfPE156WVKNXasoWZcqFRS/8gC0btrU9aLUPVx0NgR7+pNGvuqR6UZslJVt8srNbF6d
|
||||
FszQlnDRAa8hY3Bmz1PpRm31F25LV5W5lw9H0wlf9w35tRmxVKtcvF0s3AH5REZ2K0OYKkgPHQch
|
||||
7IRHUL4Lqu0Whte74bZWroUb6xs2NmAezH+GLxmh7m7c3tl6AdLanjYwc2HMLLogj/zAYM+6KzmY
|
||||
ub2bIHSeCh8dsE/TBOww9dlrWqI2mJNqxh4e6qq/N2mrTFqcN1BlMfOPlrOIml1Rkd0n5nXfVpXv
|
||||
vP2mbbsrVDbn+jrHTDrQt+LAQavooPqbh94gYDy2McL6ax7zxqEohfs73I+Idc82uHlnsUV0XoC6
|
||||
O/IMIfOeh6H237SIDXirBOLt7bbE7LugT/NRq+iAlhBvjX2GsPydd6O0fCC9p0NWvToAvJfJEERG
|
||||
j0fWqBxMn6bRet8yNgA0xg86OD6ooxgzgBT1CDzPylHpQaehoINhYguFh7GVymJru276JgfwcJdi
|
||||
Xv5DZWGWlWzaWcCGYRbly330UZTNtoZI+huvvhSzJN4SOkxBPZmn8dHyosTDm6Z0HRRlOG4oPnqz
|
||||
0hoiIhKKZM4+1pYnlZNMhESV4EeIhzkrA95cVTCNPeToVJNv0NQYJkskHIUp9CZ4ezbiRoePyQFv
|
||||
gyHFJSRn61OGY4eHi6H+BUkrEyHFXQmyAWCI9Y/bx5Kv+yCFzYmyAWC15XTiRE91IV+DQ6RXwHmn
|
||||
WU2nLELwdExb1JP8APtgT9G2Y8Xx4ydyLOAjZLY8h/iLk4+dCKTfThFnA8Bp8Y/4CXeRn8e7KeJx
|
||||
Lzy8MZpBzpntlUHoCF+Ws5zY/lNWivC/RH8izyZLNL1TuNzFyRsFuRc8vKEQK8nH70HSEZpq/hv6
|
||||
maC9QixZTphL6kMjxNYzR+o+cdMwzma4dABwI1dT8U3bUk56W9QdJRUtfKmaGBfX5+cCjU259OsX
|
||||
3NUZfDogYhGCDv8Un+CkbaJqeuDz4Bi5ikw/9BOF+zJp9PglfxWJTwd8BVd3bmDL6ldy4grpTioU
|
||||
CnIJsPGqHvavhZ+LhD/cHgN06sHVz3NlF3ixUPzMLFCpEEjDpyYpTeZt5n6tbi1cfWaAzp/h6s9x
|
||||
ZR0E+QzfIa1VW6VCoqejTn4VtGyxq1eeII/Za4DOPqh2NJ/sHCtexX3ujfxQnQYZSOLt8SA+KxyE
|
||||
XjsWnfS/wLU7CMXz0c6OEGrdahD2G50FuHTeatAIrrxVzA7WvYjK5b3oJsOGfaKTtNadfS9JxQg6
|
||||
sw6o8Ndvnk1CxCobjW7YXQjTiCOYL7is7qMl+GSTIAg6GLjnitxbxALtLYwhOpH9rcL21e/GywtN
|
||||
0xlepHLnOavKF3Qhu85tscx0xk7VfTdLZ+hmiMdRd8lfoq1E2Rx8W2Z6jrrUJJ0qRBrgjEwnoQE5
|
||||
Msk+xej9NFE6ma1Qb8XfFHpPXgFkcHS9MoAuJOmkDUL6naLUzKkhMZnw8jp1BNeI3p1SN8rzt2rV
|
||||
RbuAaWiXPM4nSocZiBprXtCoJn1vmo7W/zqydJiriOcNsrJ3n2k62r0rZB82FoXwdQm9tZq9TNM5
|
||||
qxncP0CaDrMQugBHs+Ukw0dg2tfdXNXROk+cDvMIzHFDldLJQaGD1YN6pxVmXyNPJ0251GKLQ6uS
|
||||
sIXYjLw3V75OuFLdf0LQid8pR3dflYNBQpkYhGSQs1P0hxsaivUr6hXOesc7LyNW3bBoHYIO4ZUs
|
||||
iiSEuv+rf/g2E3mDFL3Qc5ricUTZNFsmtz3TMB1Y4ysgV66l3UaT8QNBNqMUk8nMaON0QAVclxkp
|
||||
VzqvLXcuJsZmhmqmV/0dxaHTEUGnUNa+REK3nU4hxOYJ9faH9qomEyuTcwTBR9b/R2zD6kxkRHpU
|
||||
m1rpaIJOLoLO3/0aWQkIlXME2KRD8iodTNCZgYjVMVocsu8ayqDwqXk60yETbV/GKHAe7rzNXhn8
|
||||
67ySkfuxh2+7fun6jR8ZNJqaISLiVJCPuX44/Otfjxi2UeE1w0PC492WGY4ggAGSOYObmct+akes
|
||||
01a0o9BYEBA67xionBH3NdlF/JGnSNHJx6+773GiXDi45hKi4ynArmvBfgQ3KTpgCXZdAhkcNVZg
|
||||
B4Gisxu7bm0y2wJk6G2SjYzODPzKpYR3ljcrw48BRcdrYO0iuf40h6ifzbKR0THwtDGJRFuD0abZ
|
||||
yOmkVOJX/5ngane3Af9B6Og6iEENs9vRZNibZMB/EDqeUvz6kw1Hr8WURHz/QeiA+odwq3churXi
|
||||
5q2vzlRF48aApANSPtBdsezwpNzL5Hs5HKlc3UGo4VB9Cb39W+saeJSusfAwk9iJCl+Z7xL3UKza
|
||||
g/IP68gAUF2pdJa30rxNBVzqRSwkWwANWqmfhRcjyDrQDEOOWMgmStsYDBYfbc+8ybNrjOwdKr78
|
||||
fZb/9XhU+z5dMmBTJ96HvKs32Efw8u4v+EGM85/YJi9wB2S0H/n0E9yUyK1Krf1r2Cb1wgP9WlTJ
|
||||
MwlDME2e9a/2dLaetBw2sHPmY5rUjSah29XbmCavhTb5k1V0dJyPVY5n0TMmtMkcIukoCNaifbaX
|
||||
VqrhHdZzMDQbpozwxreA81/YL2d8fHm5+hk/zH7sUi7yYgssi0KSav5TezfXqMeHGeXl0fHsj/Ef
|
||||
WsQmgM3KpHgP4SQPHyd/h2WIn2a7wYv7VWcAhe/UHwBuKzzfEn6czm3BwmtXuQTvMvFL3FBhM826
|
||||
neNaKLpuidIKrnHcS4TTY+TT72fEC9VpNR0x7JhEisKxf2Keb/lwegYfcRWkE4JVJ1eF8WHrqvT8
|
||||
L+HXdH7/0CsYdvge2gjxYojSaJ/w0Xld6blUeNq28BerMewc4JswYSeYSzUJ3CZ8dNRTNNkb2Huz
|
||||
RZh0HNpAd66/SFgrt4jbBZSqmSdZo9eMWbTV9HszvqnI818Uzu6kw8jNYwulCplrfefy1CaZBTqM
|
||||
EIG0b4zp2ZOB4+SxAUEt3Lx+oBJR1T+RRHqHGBJebgB8dd2psd789YigGOc03x74GS3p9bqdRKYJ
|
||||
EneCK5P/zZ3QlEhouZcOtH3nhzuCFDQ/65yYXVPn7vekLqSneunRYxc/mBWsykBB9f4mO/U8sMRx
|
||||
g9GBq+Ucaus4QpxZbweJAJrroYOBq/bSOUGYTpq9dMzOyamRYS+dEYTpMLHmYzIB4aBcJvrpOz4T
|
||||
Wflr+z+W/vnDW7bS+YkL4cvnufT+j8bpcP2ZdnHc8DwhnMMcLa4869jaQhgg3DbMpkzgcCcmLe0/
|
||||
trJhO8L+eZeGhun0kEykE04Rm4GOLBwClmXSzOCOGFxiryw963eyx+6Sjoswv1HCAsRW8rE14kZj
|
||||
PUKySaoFQKR4WN5lu0OHgt+pcIJvaQObO4ZtlC0dq1rrb82F/xvyPJ9dI7WrjSzYwXXJdUEcL4U9
|
||||
NR086mezCYA60hz4BUGxGXsfv7A7cAQ+WeDfHSv2FvpyTdVIkUFCNZASCkxP6Z9rRJzVPxi3D0Iz
|
||||
d41veAeI+U3h0BPhGJxJdgeIB+9VNubt4rHNwvaS7eIHnx/xfWZ3gJhgb8Iv0m7dCO5cEIe0r8QV
|
||||
w74u9vZlDGB/08A6jU8dDPPfQNFHe+ztN5tGk3U14TtpnoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKC
|
||||
goLCLvwPHShx71gaA20AAAAldEVYdGRhdGU6Y3JlYXRlADIwMTMtMDktMTJUMTc6MDI6MTgtMDc6
|
||||
MDDNBiM8AAAAJXRFWHRkYXRlOm1vZGlmeQAyMDEzLTA5LTEyVDE3OjAyOjE4LTA3OjAwvFubgAAA
|
||||
AABJRU5ErkJggg==" />
|
||||
</body>
|
||||
</html>
|
Loading…
Add table
Add a link
Reference in a new issue