Upgrade Rust.

This commit is contained in:
Jack Moffitt 2014-08-04 23:02:30 -06:00
parent 62c9a779a9
commit 7a4321d649
81 changed files with 265 additions and 236 deletions

View file

@ -45,6 +45,10 @@ ifdef CFG_ENABLE_DEBUG
CFG_RUSTC_SELF_FLAGS += -g CFG_RUSTC_SELF_FLAGS += -g
endif endif
# FIXME: Remove once were on a Rust version without the old url crate
# https://github.com/rust-lang/rust/issues/16140
CFG_RUSTC_FLAGS += --extern url=$(B)/src/support/url/rust-url/liburl.rlib
export CFG_RUSTC export CFG_RUSTC
export CFG_RUSTC_FLAGS export CFG_RUSTC_FLAGS
export CFG_LOCAL_RUSTC export CFG_LOCAL_RUSTC

5
configure vendored
View file

@ -409,10 +409,7 @@ CFG_BUILD_DIR="${CFG_BUILD_HOME}${CFG_TARGET}/"
make_dir "${CFG_BUILD_DIR}" make_dir "${CFG_BUILD_DIR}"
SNAPSHOT_VERSION=$(cat ${CFG_SRC_DIR}/src/compiler/rust-snapshot-hash | rev | cut -d/ -f1 | rev) SNAPSHOT_VERSION=$(cat ${CFG_SRC_DIR}/src/compiler/rust-snapshot-hash | rev | cut -d/ -f1 | rev)
SNAPSHOT_HASH=$(cat ${CFG_SRC_DIR}/src/compiler/rust-snapshot-hash | cut -d/ -f1) SNAPSHOT_HASH=$(cat ${CFG_SRC_DIR}/src/compiler/rust-snapshot-hash | cut -d/ -f1)
if [ $CFG_OSTYPE = "linux-androideabi" ]
then
CFG_ENABLE_TREE_RUST=1 # We don't yet have Android snapshots
fi
if [ -z "$CFG_ENABLE_TREE_RUST" -a -z "$CFG_LOCAL_RUST_ROOT" ] if [ -z "$CFG_ENABLE_TREE_RUST" -a -z "$CFG_LOCAL_RUST_ROOT" ]
then then
if ! [ -f ${CFG_BUILD_DIR}/rust_snapshot/${SNAPSHOT_VERSION}-${DEFAULT_TARGET}/bin/rustc -a -f ${CFG_BUILD_DIR}/src/compiler/rust-snapshot-hash-stamp -a -z "$(diff ${CFG_BUILD_DIR}/src/compiler/rust-snapshot-hash-stamp ${CFG_SRC_DIR}/src/compiler/rust-snapshot-hash)" ] if ! [ -f ${CFG_BUILD_DIR}/rust_snapshot/${SNAPSHOT_VERSION}-${DEFAULT_TARGET}/bin/rustc -a -f ${CFG_BUILD_DIR}/src/compiler/rust-snapshot-hash-stamp -a -z "$(diff ${CFG_BUILD_DIR}/src/compiler/rust-snapshot-hash-stamp ${CFG_SRC_DIR}/src/compiler/rust-snapshot-hash)" ]

View file

@ -20,7 +20,7 @@ $(eval $(call DEF_SUBMODULE_TEST_RULES,$(submodule))))
define DEF_LIB_CRATE_TEST_RULES define DEF_LIB_CRATE_TEST_RULES
servo-test-$(1): $$(DEPS_$(1)) servo-test-$(1): $$(DEPS_$(1))
@$$(call E, compile: servo-test-$(1)) @$$(call E, compile: servo-test-$(1))
$$(Q)$$(RUSTC) $$(RFLAGS_$(1)) --test -o $$@ $$< $$(Q)$$(RUSTC) $(strip $(CFG_RUSTC_FLAGS)) $$(RFLAGS_$(1)) --test -o $$@ $$<
.PHONY: check-servo-$(1) .PHONY: check-servo-$(1)
check-servo-$(1): servo-test-$(1) check-servo-$(1): servo-test-$(1)

View file

@ -4,7 +4,7 @@
RUSTDOC_HTML_IN_HEADER = $(S)/src/etc/rustdoc-style.html RUSTDOC_HTML_IN_HEADER = $(S)/src/etc/rustdoc-style.html
RUSTDOC_FLAGS = --html-in-header $(RUSTDOC_HTML_IN_HEADER) RUSTDOC_FLAGS = --extern url=$(B)/src/support/url/rust-url/liburl.rlib --html-in-header $(RUSTDOC_HTML_IN_HEADER)
RUSTDOC_DEPS = $(RUSTDOC_HTML_IN_HEADER) RUSTDOC_DEPS = $(RUSTDOC_HTML_IN_HEADER)
# FIXME(#2924) These crates make rustdoc fail for undetermined reasons. # FIXME(#2924) These crates make rustdoc fail for undetermined reasons.
@ -36,15 +36,24 @@ $(eval $(call DEF_SERVO_DOC_RULES,$(lib_crate))))
define DEF_SUBMODULES_DOC_RULES define DEF_SUBMODULES_DOC_RULES
ifeq (,$(filter $(1),$(DOC_BLACKLISTED)))
.PHONY: doc-$(1) .PHONY: doc-$(1)
doc-$(1): $$(DONE_DEPS_$(1)) $$(ROUGH_DEPS_$(1)) $$(RUSTC_DEP_$(1)) doc-$(1): $$(DONE_DEPS_$(1)) $$(ROUGH_DEPS_$(1)) $$(RUSTC_DEP_$(1))
@$$(call E, rustdoc: $(1)) @$$(call E, rustdoc: $(1))
$$(Q) \ $$(Q) \
RUSTDOC_FLAGS="$$(ENV_RLDFLAGS_$(1))" \ RUSTDOC_FLAGS="$$(ENV_RLDFLAGS_$(1)) $$(RUSTDOC_FLAGS)" \
RUSTDOC_TARGET="$$(CFG_BUILD_HOME)/doc" \ RUSTDOC_TARGET="$$(CFG_BUILD_HOME)/doc" \
$$(ENV_EXT_DEPS_$(1)) \ $$(ENV_EXT_DEPS_$(1)) \
$$(MAKE) -C $$(B)src/$$(PATH_$(1)) doc $$(MAKE) -C $$(B)src/$$(PATH_$(1)) doc
else
.PHONY: doc-$(1)
doc-$(1): $$(DONE_DEPS_$(1)) $$(ROUGH_DEPS_$(1)) $$(RUSTC_DEP_$(1))
@echo SKIPPED: blacklisted rustdoc: $$@
endif
endef endef
# Only Rust submodules # Only Rust submodules

View file

@ -1 +1 @@
5e4a171bd551433e8d58114744c4efbc4bc90ae4/rust-0.12.0-pre 9de20198aedb3c3419ee503755e04bcc198d3a94/rust-0.12.0-pre

View file

@ -35,7 +35,7 @@ extern crate servo_util = "util";
extern crate libc; extern crate libc;
extern crate time; extern crate time;
extern crate url = "url_"; extern crate url;
#[cfg(target_os="macos")] #[cfg(target_os="macos")]
extern crate core_graphics; extern crate core_graphics;

View file

@ -434,7 +434,7 @@ impl<LTF: LayoutTaskFactory, STF: ScriptTaskFactory> Constellation<LTF, STF> {
}); });
idx.map(|idx| { idx.map(|idx| {
debug!("removing pending frame change for failed pipeline"); debug!("removing pending frame change for failed pipeline");
force_pipeline_exit(&self.pending_frames.get(idx).after.pipeline); force_pipeline_exit(&self.pending_frames[idx].after.pipeline);
self.pending_frames.remove(idx) self.pending_frames.remove(idx)
}); });
if idx.is_none() { if idx.is_none() {

View file

@ -162,7 +162,7 @@ impl WindowMethods<Application> for Window {
{ {
let mut event_queue = self.event_queue.borrow_mut(); let mut event_queue = self.event_queue.borrow_mut();
if !event_queue.is_empty() { if !event_queue.is_empty() {
return event_queue.shift().unwrap(); return event_queue.remove(0).unwrap();
} }
} }
@ -174,7 +174,7 @@ impl WindowMethods<Application> for Window {
if self.glfw_window.should_close() { if self.glfw_window.should_close() {
QuitWindowEvent QuitWindowEvent
} else { } else {
self.event_queue.borrow_mut().shift().unwrap_or(IdleWindowEvent) self.event_queue.borrow_mut().remove(0).unwrap_or(IdleWindowEvent)
} }
} }

View file

@ -161,12 +161,12 @@ impl WindowMethods<Application> for Window {
fn recv(&self) -> WindowEvent { fn recv(&self) -> WindowEvent {
if !self.event_queue.borrow_mut().is_empty() { if !self.event_queue.borrow_mut().is_empty() {
return self.event_queue.borrow_mut().shift().unwrap(); return self.event_queue.borrow_mut().remove(0).unwrap();
} }
glut::check_loop(); glut::check_loop();
self.event_queue.borrow_mut().shift().unwrap_or(IdleWindowEvent) self.event_queue.borrow_mut().remove(0).unwrap_or(IdleWindowEvent)
} }
/// Sets the ready state. /// Sets the ready state.

View file

@ -4,7 +4,7 @@
use libc::{calloc, c_int, size_t}; use libc::{calloc, c_int, size_t};
use std::mem; use std::mem;
use std::str; use std::string;
use std::c_vec::CVec; use std::c_vec::CVec;
use string::{cef_string_userfree_utf16_alloc, cef_string_utf16_set}; use string::{cef_string_userfree_utf16_alloc, cef_string_utf16_set};
use types::{cef_command_line_t, cef_string_t, cef_string_userfree_t, cef_string_utf16_t}; use types::{cef_command_line_t, cef_string_t, cef_string_userfree_t, cef_string_utf16_t};
@ -30,7 +30,7 @@ pub fn command_line_init(argc: c_int, argv: *const *const u8) {
unsafe { unsafe {
let mut a: Vec<String> = vec!(); let mut a: Vec<String> = vec!();
for i in range(0u, argc as uint) { for i in range(0u, argc as uint) {
a.push(str::raw::from_c_str(*argv.offset(i as int) as *const i8)); a.push(string::raw::from_buf(*argv.offset(i as int) as *const u8));
} }
let cl = command_line_new(); let cl = command_line_new();
(*cl).argc = argc; (*cl).argc = argc;

View file

@ -19,7 +19,6 @@ use render_context::RenderContext;
use text::glyph::CharIndex; use text::glyph::CharIndex;
use text::TextRun; use text::TextRun;
use std::collections::Deque;
use collections::dlist::DList; use collections::dlist::DList;
use collections::dlist; use collections::dlist;
use geom::{Point2D, Rect, SideOffsets2D, Size2D}; use geom::{Point2D, Rect, SideOffsets2D, Size2D};
@ -222,7 +221,7 @@ impl StackingContext {
} }
let mut new_list = DisplayList::new(); let mut new_list = DisplayList::new();
new_list.list.push_back(item); new_list.list.push(item);
stacking_context.positioned_descendants.push((z_index, new_list)) stacking_context.positioned_descendants.push((z_index, new_list))
} }
} }
@ -320,7 +319,7 @@ impl DisplayList {
/// Appends the given item to the display list. /// Appends the given item to the display list.
pub fn push(&mut self, item: DisplayItem) { pub fn push(&mut self, item: DisplayItem) {
self.list.push_back(item) self.list.push(item)
} }
/// Appends the given display list to this display list, consuming the other display list in /// Appends the given display list to this display list, consuming the other display list in

View file

@ -6,7 +6,6 @@ use display_list::{BorderDisplayItemClass, ClipDisplayItem, ClipDisplayItemClass
use display_list::{DisplayList, ImageDisplayItemClass, LineDisplayItemClass}; use display_list::{DisplayList, ImageDisplayItemClass, LineDisplayItemClass};
use display_list::{PseudoDisplayItemClass, SolidColorDisplayItemClass, TextDisplayItemClass}; use display_list::{PseudoDisplayItemClass, SolidColorDisplayItemClass, TextDisplayItemClass};
use std::collections::Deque;
use collections::dlist::DList; use collections::dlist::DList;
use geom::rect::Rect; use geom::rect::Rect;
use servo_util::geometry::Au; use servo_util::geometry::Au;
@ -36,7 +35,7 @@ impl DisplayListOptimizer {
for item in display_list.iter() { for item in display_list.iter() {
match self.process_display_item(item) { match self.process_display_item(item) {
None => {} None => {}
Some(display_item) => result.push_back(display_item), Some(display_item) => result.push(display_item),
} }
} }
DisplayList { DisplayList {

View file

@ -4,7 +4,7 @@
use geom::{Point2D, Rect, Size2D}; use geom::{Point2D, Rect, Size2D};
use std::mem; use std::mem;
use std::str; use std::string;
use std::rc::Rc; use std::rc::Rc;
use std::cell::RefCell; use std::cell::RefCell;
use servo_util::cache::{Cache, HashCache}; use servo_util::cache::{Cache, HashCache};
@ -53,7 +53,7 @@ pub trait FontTableTagConversions {
impl FontTableTagConversions for FontTableTag { impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String { fn tag_to_str(&self) -> String {
unsafe { unsafe {
let reversed = str::raw::from_buf_len(mem::transmute(self), 4); let reversed = string::raw::from_buf_len(mem::transmute(self), 4);
return String::from_chars([reversed.as_slice().char_at(3), return String::from_chars([reversed.as_slice().char_at(3),
reversed.as_slice().char_at(2), reversed.as_slice().char_at(2),
reversed.as_slice().char_at(1), reversed.as_slice().char_at(1),
@ -179,7 +179,7 @@ impl FontGroup {
assert!(self.fonts.len() > 0); assert!(self.fonts.len() > 0);
// TODO(Issue #177): Actually fall back through the FontGroup when a font is unsuitable. // TODO(Issue #177): Actually fall back through the FontGroup when a font is unsuitable.
TextRun::new(&mut *self.fonts.get(0).borrow_mut(), text.clone()) TextRun::new(&mut *self.fonts[0].borrow_mut(), text.clone())
} }
} }

View file

@ -29,7 +29,7 @@ extern crate servo_util = "util";
extern crate servo_msg = "msg"; extern crate servo_msg = "msg";
extern crate style; extern crate style;
extern crate sync; extern crate sync;
extern crate url = "url_"; extern crate url;
// Eventually we would like the shaper to be pluggable, as many operating systems have their own // Eventually we would like the shaper to be pluggable, as many operating systems have their own
// shapers. For now, however, this is a hard dependency. // shapers. For now, however, this is a hard dependency.

View file

@ -27,7 +27,7 @@ use freetype::tt_os2::TT_OS2;
use std::mem; use std::mem;
use std::ptr; use std::ptr;
use std::str; use std::string;
use sync::Arc; use sync::Arc;
@ -120,10 +120,10 @@ impl FontHandleMethods for FontHandle {
self.font_data.clone() self.font_data.clone()
} }
fn family_name(&self) -> String { fn family_name(&self) -> String {
unsafe { str::raw::from_c_str(&*(*self.face).family_name) } unsafe { string::raw::from_buf(&*(*self.face).family_name as *const i8 as *const u8) }
} }
fn face_name(&self) -> String { fn face_name(&self) -> String {
unsafe { str::raw::from_c_str(&*FT_Get_Postscript_Name(self.face)) } unsafe { string::raw::from_buf(&*FT_Get_Postscript_Name(self.face) as *const i8 as *const u8) }
} }
fn is_italic(&self) -> bool { fn is_italic(&self) -> bool {
unsafe { (*self.face).style_flags & FT_STYLE_FLAG_ITALIC != 0 } unsafe { (*self.face).style_flags & FT_STYLE_FLAG_ITALIC != 0 }

View file

@ -17,9 +17,9 @@ use fontconfig::fontconfig::{
}; };
use libc; use libc;
use libc::{c_int, c_char}; use libc::c_int;
use std::ptr; use std::ptr;
use std::str; use std::string;
pub fn get_available_families(callback: |String|) { pub fn get_available_families(callback: |String|) {
unsafe { unsafe {
@ -32,7 +32,7 @@ pub fn get_available_families(callback: |String|) {
let mut FC_FAMILY_C = "family".to_c_str(); let mut FC_FAMILY_C = "family".to_c_str();
let FC_FAMILY = FC_FAMILY_C.as_mut_ptr(); let FC_FAMILY = FC_FAMILY_C.as_mut_ptr();
while FcPatternGetString(*font, FC_FAMILY, v, &mut family) == FcResultMatch { while FcPatternGetString(*font, FC_FAMILY, v, &mut family) == FcResultMatch {
let family_name = str::raw::from_c_str(family as *const c_char); let family_name = string::raw::from_buf(family as *const i8 as *const u8);
callback(family_name); callback(family_name);
v += 1; v += 1;
} }
@ -75,7 +75,7 @@ pub fn get_variations_for_family(family_name: &str, callback: |String|) {
let FC_FILE = FC_FILE_C.as_mut_ptr(); let FC_FILE = FC_FILE_C.as_mut_ptr();
let mut file: *mut FcChar8 = ptr::mut_null(); let mut file: *mut FcChar8 = ptr::mut_null();
let file = if FcPatternGetString(*font, FC_FILE, 0, &mut file) == FcResultMatch { let file = if FcPatternGetString(*font, FC_FILE, 0, &mut file) == FcResultMatch {
str::raw::from_c_str(file as *const libc::c_char) string::raw::from_buf(file as *const i8 as *const u8)
} else { } else {
fail!(); fail!();
}; };

View file

@ -379,7 +379,7 @@ impl<'a> DetailedGlyphStore {
.expect("Invalid index not found in detailed glyph lookup table!"); .expect("Invalid index not found in detailed glyph lookup table!");
assert!(i + (detail_offset as uint) < self.detail_buffer.len()); assert!(i + (detail_offset as uint) < self.detail_buffer.len());
self.detail_buffer.get(i + (detail_offset as uint)) &self.detail_buffer[i + (detail_offset as uint)]
} }
fn ensure_sorted(&mut self) { fn ensure_sorted(&mut self) {
@ -451,7 +451,7 @@ pub enum GlyphInfo<'a> {
impl<'a> GlyphInfo<'a> { impl<'a> GlyphInfo<'a> {
pub fn id(self) -> GlyphId { pub fn id(self) -> GlyphId {
match self { match self {
SimpleGlyphInfo(store, entry_i) => store.entry_buffer.get(entry_i.to_uint()).id(), SimpleGlyphInfo(store, entry_i) => store.entry_buffer[entry_i.to_uint()].id(),
DetailGlyphInfo(store, entry_i, detail_j) => { DetailGlyphInfo(store, entry_i, detail_j) => {
store.detail_store.get_detailed_glyph_with_index(entry_i, detail_j).id store.detail_store.get_detailed_glyph_with_index(entry_i, detail_j).id
} }
@ -462,7 +462,7 @@ impl<'a> GlyphInfo<'a> {
// FIXME: Resolution conflicts with IteratorUtil trait so adding trailing _ // FIXME: Resolution conflicts with IteratorUtil trait so adding trailing _
pub fn advance(self) -> Au { pub fn advance(self) -> Au {
match self { match self {
SimpleGlyphInfo(store, entry_i) => store.entry_buffer.get(entry_i.to_uint()).advance(), SimpleGlyphInfo(store, entry_i) => store.entry_buffer[entry_i.to_uint()].advance(),
DetailGlyphInfo(store, entry_i, detail_j) => { DetailGlyphInfo(store, entry_i, detail_j) => {
store.detail_store.get_detailed_glyph_with_index(entry_i, detail_j).advance store.detail_store.get_detailed_glyph_with_index(entry_i, detail_j).advance
} }
@ -560,7 +560,7 @@ impl<'a> GlyphStore {
self.detail_store.add_detailed_glyphs_for_entry(i, glyph); self.detail_store.add_detailed_glyphs_for_entry(i, glyph);
GlyphEntry::complex(data.cluster_start, data.ligature_start, 1) GlyphEntry::complex(data.cluster_start, data.ligature_start, 1)
} }
}.adapt_character_flags_of_entry(*self.entry_buffer.get(i.to_uint())); }.adapt_character_flags_of_entry(self.entry_buffer[i.to_uint()]);
*self.entry_buffer.get_mut(i.to_uint()) = entry; *self.entry_buffer.get_mut(i.to_uint()) = entry;
} }
@ -586,7 +586,7 @@ impl<'a> GlyphStore {
first_glyph_data.ligature_start, first_glyph_data.ligature_start,
glyph_count) glyph_count)
} }
}.adapt_character_flags_of_entry(*self.entry_buffer.get(i.to_uint())); }.adapt_character_flags_of_entry(self.entry_buffer[i.to_uint()]);
debug!("Adding multiple glyphs[idx={}, count={}]: {:?}", i, glyph_count, entry); debug!("Adding multiple glyphs[idx={}, count={}]: {:?}", i, glyph_count, entry);
@ -633,56 +633,56 @@ impl<'a> GlyphStore {
// getter methods // getter methods
pub fn char_is_space(&self, i: CharIndex) -> bool { pub fn char_is_space(&self, i: CharIndex) -> bool {
assert!(i < self.char_len()); assert!(i < self.char_len());
self.entry_buffer.get(i.to_uint()).char_is_space() self.entry_buffer[i.to_uint()].char_is_space()
} }
pub fn char_is_tab(&self, i: CharIndex) -> bool { pub fn char_is_tab(&self, i: CharIndex) -> bool {
assert!(i < self.char_len()); assert!(i < self.char_len());
self.entry_buffer.get(i.to_uint()).char_is_tab() self.entry_buffer[i.to_uint()].char_is_tab()
} }
pub fn char_is_newline(&self, i: CharIndex) -> bool { pub fn char_is_newline(&self, i: CharIndex) -> bool {
assert!(i < self.char_len()); assert!(i < self.char_len());
self.entry_buffer.get(i.to_uint()).char_is_newline() self.entry_buffer[i.to_uint()].char_is_newline()
} }
pub fn is_ligature_start(&self, i: CharIndex) -> bool { pub fn is_ligature_start(&self, i: CharIndex) -> bool {
assert!(i < self.char_len()); assert!(i < self.char_len());
self.entry_buffer.get(i.to_uint()).is_ligature_start() self.entry_buffer[i.to_uint()].is_ligature_start()
} }
pub fn is_cluster_start(&self, i: CharIndex) -> bool { pub fn is_cluster_start(&self, i: CharIndex) -> bool {
assert!(i < self.char_len()); assert!(i < self.char_len());
self.entry_buffer.get(i.to_uint()).is_cluster_start() self.entry_buffer[i.to_uint()].is_cluster_start()
} }
pub fn can_break_before(&self, i: CharIndex) -> BreakType { pub fn can_break_before(&self, i: CharIndex) -> BreakType {
assert!(i < self.char_len()); assert!(i < self.char_len());
self.entry_buffer.get(i.to_uint()).can_break_before() self.entry_buffer[i.to_uint()].can_break_before()
} }
// setter methods // setter methods
pub fn set_char_is_space(&mut self, i: CharIndex) { pub fn set_char_is_space(&mut self, i: CharIndex) {
assert!(i < self.char_len()); assert!(i < self.char_len());
let entry = *self.entry_buffer.get(i.to_uint()); let entry = self.entry_buffer[i.to_uint()];
*self.entry_buffer.get_mut(i.to_uint()) = entry.set_char_is_space(); *self.entry_buffer.get_mut(i.to_uint()) = entry.set_char_is_space();
} }
pub fn set_char_is_tab(&mut self, i: CharIndex) { pub fn set_char_is_tab(&mut self, i: CharIndex) {
assert!(i < self.char_len()); assert!(i < self.char_len());
let entry = *self.entry_buffer.get(i.to_uint()); let entry = self.entry_buffer[i.to_uint()];
*self.entry_buffer.get_mut(i.to_uint()) = entry.set_char_is_tab(); *self.entry_buffer.get_mut(i.to_uint()) = entry.set_char_is_tab();
} }
pub fn set_char_is_newline(&mut self, i: CharIndex) { pub fn set_char_is_newline(&mut self, i: CharIndex) {
assert!(i < self.char_len()); assert!(i < self.char_len());
let entry = *self.entry_buffer.get(i.to_uint()); let entry = self.entry_buffer[i.to_uint()];
*self.entry_buffer.get_mut(i.to_uint()) = entry.set_char_is_newline(); *self.entry_buffer.get_mut(i.to_uint()) = entry.set_char_is_newline();
} }
pub fn set_can_break_before(&mut self, i: CharIndex, t: BreakType) { pub fn set_can_break_before(&mut self, i: CharIndex, t: BreakType) {
assert!(i < self.char_len()); assert!(i < self.char_len());
let entry = *self.entry_buffer.get(i.to_uint()); let entry = self.entry_buffer[i.to_uint()];
*self.entry_buffer.get_mut(i.to_uint()) = entry.set_can_break_before(t); *self.entry_buffer.get_mut(i.to_uint()) = entry.set_can_break_before(t);
} }
} }
@ -738,12 +738,12 @@ impl<'a> Iterator<(CharIndex, GlyphInfo<'a>)> for GlyphIterator<'a> {
self.char_range.next().and_then(|i| { self.char_range.next().and_then(|i| {
self.char_index = i; self.char_index = i;
assert!(i < self.store.char_len()); assert!(i < self.store.char_len());
let entry = self.store.entry_buffer.get(i.to_uint()); let entry = self.store.entry_buffer[i.to_uint()];
if entry.is_simple() { if entry.is_simple() {
Some((self.char_index, SimpleGlyphInfo(self.store, i))) Some((self.char_index, SimpleGlyphInfo(self.store, i)))
} else { } else {
// Fall back to the slow path. // Fall back to the slow path.
self.next_complex_glyph(entry, i) self.next_complex_glyph(&entry, i)
} }
}) })
} }

View file

@ -303,7 +303,7 @@ impl Shaper {
char_byte_span.begin(), char_byte_span.length(), glyph_span.begin()); char_byte_span.begin(), char_byte_span.length(), glyph_span.begin());
while char_byte_span.end() != byte_max && while char_byte_span.end() != byte_max &&
*byteToGlyph.get(char_byte_span.end() as uint) == NO_GLYPH { byteToGlyph[char_byte_span.end() as uint] == NO_GLYPH {
debug!("Extending char byte span to include byte offset={} with no associated \ debug!("Extending char byte span to include byte offset={} with no associated \
glyph", char_byte_span.end()); glyph", char_byte_span.end());
let range = text.char_range_at(char_byte_span.end() as uint); let range = text.char_range_at(char_byte_span.end() as uint);
@ -315,8 +315,8 @@ impl Shaper {
// in cases where one char made several glyphs and left some unassociated chars. // in cases where one char made several glyphs and left some unassociated chars.
let mut max_glyph_idx = glyph_span.end(); let mut max_glyph_idx = glyph_span.end();
for i in char_byte_span.each_index() { for i in char_byte_span.each_index() {
if *byteToGlyph.get(i as uint) > NO_GLYPH { if byteToGlyph[i as uint] > NO_GLYPH {
max_glyph_idx = cmp::max(*byteToGlyph.get(i as uint) as int + 1, max_glyph_idx); max_glyph_idx = cmp::max(byteToGlyph[i as uint] as int + 1, max_glyph_idx);
} }
} }
@ -375,7 +375,7 @@ impl Shaper {
let mut covered_byte_span = char_byte_span.clone(); let mut covered_byte_span = char_byte_span.clone();
// extend, clipping at end of text range. // extend, clipping at end of text range.
while covered_byte_span.end() < byte_max while covered_byte_span.end() < byte_max
&& *byteToGlyph.get(covered_byte_span.end() as uint) == NO_GLYPH { && byteToGlyph[covered_byte_span.end() as uint] == NO_GLYPH {
let range = text.char_range_at(covered_byte_span.end() as uint); let range = text.char_range_at(covered_byte_span.end() as uint);
drop(range.ch); drop(range.ch);
covered_byte_span.extend_to(range.next as int); covered_byte_span.extend_to(range.next as int);

View file

@ -28,7 +28,6 @@ use wrapper::ThreadSafeLayoutNode;
use style::ComputedValues; use style::ComputedValues;
use style::computed_values::{clear, position}; use style::computed_values::{clear, position};
use collections::Deque;
use collections::dlist::DList; use collections::dlist::DList;
use geom::{Size2D, Point2D, Rect}; use geom::{Size2D, Point2D, Rect};
use gfx::color; use gfx::color;
@ -334,8 +333,10 @@ impl CandidateBSizeIterator {
status: InitialCandidateBSizeStatus, status: InitialCandidateBSizeStatus,
} }
} }
}
pub fn next<'a>(&'a mut self) -> Option<(MaybeAuto, &'a mut Au)> { impl Iterator<MaybeAuto> for CandidateBSizeIterator {
fn next(&mut self) -> Option<MaybeAuto> {
self.status = match self.status { self.status = match self.status {
InitialCandidateBSizeStatus => TryingBSizeCandidateBSizeStatus, InitialCandidateBSizeStatus => TryingBSizeCandidateBSizeStatus,
TryingBSizeCandidateBSizeStatus => { TryingBSizeCandidateBSizeStatus => {
@ -360,12 +361,12 @@ impl CandidateBSizeIterator {
}; };
match self.status { match self.status {
TryingBSizeCandidateBSizeStatus => Some((self.block_size, &mut self.candidate_value)), TryingBSizeCandidateBSizeStatus => Some(self.block_size),
TryingMaxCandidateBSizeStatus => { TryingMaxCandidateBSizeStatus => {
Some((Specified(self.max_block_size.unwrap()), &mut self.candidate_value)) Some(Specified(self.max_block_size.unwrap()))
} }
TryingMinCandidateBSizeStatus => { TryingMinCandidateBSizeStatus => {
Some((Specified(self.min_block_size), &mut self.candidate_value)) Some(Specified(self.min_block_size))
} }
FoundCandidateBSizeStatus => None, FoundCandidateBSizeStatus => None,
InitialCandidateBSizeStatus => fail!(), InitialCandidateBSizeStatus => fail!(),
@ -970,8 +971,8 @@ impl BlockFlow {
let mut candidate_block_size_iterator = CandidateBSizeIterator::new(self.fragment.style(), let mut candidate_block_size_iterator = CandidateBSizeIterator::new(self.fragment.style(),
None); None);
for (candidate_block_size, new_candidate_block_size) in candidate_block_size_iterator { for candidate_block_size in candidate_block_size_iterator {
*new_candidate_block_size = match candidate_block_size { candidate_block_size_iterator.candidate_value = match candidate_block_size {
Auto => block_size, Auto => block_size,
Specified(value) => value Specified(value) => value
} }
@ -1086,8 +1087,8 @@ impl BlockFlow {
// Calculate content block-size, taking `min-block-size` and `max-block-size` into account. // Calculate content block-size, taking `min-block-size` and `max-block-size` into account.
let mut candidate_block_size_iterator = CandidateBSizeIterator::new(self.fragment.style(), None); let mut candidate_block_size_iterator = CandidateBSizeIterator::new(self.fragment.style(), None);
for (candidate_block_size, new_candidate_block_size) in candidate_block_size_iterator { for candidate_block_size in candidate_block_size_iterator {
*new_candidate_block_size = match candidate_block_size { candidate_block_size_iterator.candidate_value = match candidate_block_size {
Auto => content_block_size, Auto => content_block_size,
Specified(value) => value, Specified(value) => value,
} }
@ -1230,7 +1231,7 @@ impl BlockFlow {
let mut candidate_block_size_iterator = let mut candidate_block_size_iterator =
CandidateBSizeIterator::new(style, Some(containing_block_block_size)); CandidateBSizeIterator::new(style, Some(containing_block_block_size));
for (block_size_used_val, new_candidate_block_size) in candidate_block_size_iterator { for block_size_used_val in candidate_block_size_iterator {
solution = solution =
Some(BSizeConstraintSolution::solve_vertical_constraints_abs_nonreplaced( Some(BSizeConstraintSolution::solve_vertical_constraints_abs_nonreplaced(
block_size_used_val, block_size_used_val,
@ -1242,7 +1243,7 @@ impl BlockFlow {
available_block_size, available_block_size,
static_b_offset)); static_b_offset));
*new_candidate_block_size = solution.unwrap().block_size candidate_block_size_iterator.candidate_value = solution.unwrap().block_size
} }
} }
} }
@ -1294,7 +1295,7 @@ impl BlockFlow {
background_color: color::rgba(1.0, 1.0, 1.0, 0.0), background_color: color::rgba(1.0, 1.0, 1.0, 0.0),
scroll_policy: scroll_policy, scroll_policy: scroll_policy,
}; };
self.base.layers.push_back(new_layer) self.base.layers.push(new_layer)
} }
/// Return the block-start outer edge of the hypothetical box for an absolute flow. /// Return the block-start outer edge of the hypothetical box for an absolute flow.

View file

@ -8,16 +8,20 @@
use flow::{Flow, base, mut_base}; use flow::{Flow, base, mut_base};
use flow_ref::FlowRef; use flow_ref::FlowRef;
use std::kinds::marker::ContravariantLifetime;
use std::mem; use std::mem;
use std::ptr; use std::ptr;
pub type Link = Option<FlowRef>; pub type Link = Option<FlowRef>;
// FIXME: use TraitObject instead of duplicating the type
#[allow(raw_pointer_deriving)] #[allow(raw_pointer_deriving)]
#[deriving(Clone)] #[deriving(Clone)]
pub struct Rawlink { pub struct Rawlink<'a> {
vtable: *const (), vtable: *mut (),
obj: *mut (), obj: *mut (),
marker: ContravariantLifetime<'a>,
} }
/// Doubly-linked list of Flows. /// Doubly-linked list of Flows.
@ -38,17 +42,17 @@ pub struct FlowListIterator<'a> {
/// Double-ended mutable FlowList iterator /// Double-ended mutable FlowList iterator
pub struct MutFlowListIterator<'a> { pub struct MutFlowListIterator<'a> {
_list: &'a mut FlowList, head: Rawlink<'a>,
head: Rawlink,
nelem: uint, nelem: uint,
} }
impl Rawlink { impl<'a> Rawlink<'a> {
/// Like Option::None for Rawlink /// Like Option::None for Rawlink
pub fn none() -> Rawlink { pub fn none() -> Rawlink<'static> {
Rawlink { Rawlink {
vtable: ptr::null(), vtable: ptr::mut_null(),
obj: ptr::mut_null(), obj: ptr::mut_null(),
marker: ContravariantLifetime,
} }
} }
@ -57,7 +61,7 @@ impl Rawlink {
unsafe { mem::transmute(n) } unsafe { mem::transmute(n) }
} }
pub unsafe fn resolve_mut(&self) -> Option<&mut Flow> { pub unsafe fn resolve_mut(&self) -> Option<&'a mut Flow> {
if self.obj.is_null() { if self.obj.is_null() {
None None
} else { } else {
@ -201,14 +205,14 @@ impl FlowList {
/// Provide a forward iterator with mutable references /// Provide a forward iterator with mutable references
#[inline] #[inline]
pub fn mut_iter<'a>(&'a mut self) -> MutFlowListIterator<'a> { pub fn mut_iter<'a>(&'a mut self) -> MutFlowListIterator<'a> {
let len = self.len();
let head_raw = match self.list_head { let head_raw = match self.list_head {
Some(ref mut h) => Rawlink::some(h.get()), Some(ref mut h) => Rawlink::some(h.get()),
None => Rawlink::none(), None => Rawlink::none(),
}; };
MutFlowListIterator { MutFlowListIterator {
nelem: self.len(), nelem: len,
head: head_raw, head: head_raw,
_list: self
} }
} }
} }
@ -269,7 +273,10 @@ impl<'a> Iterator<&'a mut Flow> for MutFlowListIterator<'a> {
self.head = match mut_base(next).next_sibling { self.head = match mut_base(next).next_sibling {
Some(ref mut node) => { Some(ref mut node) => {
let x: &mut Flow = node.get_mut(); let x: &mut Flow = node.get_mut();
Rawlink::some(x) // NOTE: transmute needed here to break the link
// between x and next so that it is no longer
// borrowed.
mem::transmute(Rawlink::some(x))
} }
None => Rawlink::none(), None => Rawlink::none(),
}; };

View file

@ -13,6 +13,7 @@ use std::mem;
use std::ptr; use std::ptr;
use std::sync::atomics::SeqCst; use std::sync::atomics::SeqCst;
// FIXME: This should probably be a wrapper on TraitObject.
#[unsafe_no_drop_flag] #[unsafe_no_drop_flag]
pub struct FlowRef { pub struct FlowRef {
vtable: *const u8, vtable: *const u8,

View file

@ -1130,7 +1130,7 @@ impl Fragment {
UnscannedTextFragment(_) => fail!("Unscanned text fragments should have been scanned by now!"), UnscannedTextFragment(_) => fail!("Unscanned text fragments should have been scanned by now!"),
ScannedTextFragment(ref text_fragment_info) => { ScannedTextFragment(ref text_fragment_info) => {
let mut new_line_pos = self.new_line_pos.clone(); let mut new_line_pos = self.new_line_pos.clone();
let cur_new_line_pos = new_line_pos.shift().unwrap(); let cur_new_line_pos = new_line_pos.remove(0).unwrap();
let inline_start_range = Range::new(text_fragment_info.range.begin(), cur_new_line_pos); let inline_start_range = Range::new(text_fragment_info.range.begin(), cur_new_line_pos);
let inline_end_range = Range::new(text_fragment_info.range.begin() + cur_new_line_pos + CharIndex(1), let inline_end_range = Range::new(text_fragment_info.range.begin() + cur_new_line_pos + CharIndex(1),

View file

@ -719,7 +719,7 @@ impl InlineFragments {
/// A convenience function to return the fragment at a given index. /// A convenience function to return the fragment at a given index.
pub fn get<'a>(&'a self, index: uint) -> &'a Fragment { pub fn get<'a>(&'a self, index: uint) -> &'a Fragment {
self.fragments.get(index) &self.fragments[index]
} }
/// A convenience function to return a mutable reference to the fragment at a given index. /// A convenience function to return a mutable reference to the fragment at a given index.

View file

@ -32,7 +32,7 @@ extern crate collections;
extern crate green; extern crate green;
extern crate libc; extern crate libc;
extern crate sync; extern crate sync;
extern crate url = "url_"; extern crate url;
pub mod block; pub mod block;
pub mod construct; pub mod construct;

View file

@ -422,7 +422,7 @@ impl LayoutTask {
AddStylesheetMsg(sheet) => self.handle_add_stylesheet(sheet), AddStylesheetMsg(sheet) => self.handle_add_stylesheet(sheet),
ReflowMsg(data) => { ReflowMsg(data) => {
profile(time::LayoutPerformCategory, self.time_profiler_chan.clone(), || { profile(time::LayoutPerformCategory, self.time_profiler_chan.clone(), || {
self.handle_reflow(data); self.handle_reflow(&*data);
}); });
} }
QueryMsg(query) => { QueryMsg(query) => {
@ -617,8 +617,10 @@ impl LayoutTask {
/// The high-level routine that performs layout tasks. /// The high-level routine that performs layout tasks.
fn handle_reflow(&mut self, data: &Reflow) { fn handle_reflow(&mut self, data: &Reflow) {
// FIXME: Isolate this transmutation into a "bridge" module. // FIXME: Isolate this transmutation into a "bridge" module.
// FIXME(rust#16366): The following line had to be moved because of a
// rustc bug. It should be in the next unsafe block.
let mut node: JS<Node> = unsafe { JS::from_trusted_node_address(data.document_root) };
let node: &mut LayoutNode = unsafe { let node: &mut LayoutNode = unsafe {
let mut node: JS<Node> = JS::from_trusted_node_address(data.document_root);
mem::transmute(&mut node) mem::transmute(&mut node)
}; };
@ -673,7 +675,7 @@ impl LayoutTask {
let mut applicable_declarations = ApplicableDeclarations::new(); let mut applicable_declarations = ApplicableDeclarations::new();
let mut applicable_declarations_cache = ApplicableDeclarationsCache::new(); let mut applicable_declarations_cache = ApplicableDeclarationsCache::new();
let mut style_sharing_candidate_cache = StyleSharingCandidateCache::new(); let mut style_sharing_candidate_cache = StyleSharingCandidateCache::new();
drop(node.recalc_style_for_subtree(self.stylist, drop(node.recalc_style_for_subtree(&*self.stylist,
&mut layout_ctx, &mut layout_ctx,
font_context_opt.take_unwrap(), font_context_opt.take_unwrap(),
&mut applicable_declarations, &mut applicable_declarations,

View file

@ -218,7 +218,9 @@ fn recalc_style_for_node(unsafe_layout_node: UnsafeLayoutNode,
let layout_context = unsafe { &mut **proxy.user_data() }; let layout_context = unsafe { &mut **proxy.user_data() };
// Get a real layout node. // Get a real layout node.
let node: LayoutNode = layout_node_from_unsafe_layout_node(&unsafe_layout_node); let node: LayoutNode = unsafe {
layout_node_from_unsafe_layout_node(&unsafe_layout_node)
};
// Initialize layout data. // Initialize layout data.
// //
@ -309,7 +311,9 @@ fn construct_flows(mut unsafe_layout_node: UnsafeLayoutNode,
let layout_context = unsafe { &mut **proxy.user_data() }; let layout_context = unsafe { &mut **proxy.user_data() };
// Get a real layout node. // Get a real layout node.
let node: LayoutNode = layout_node_from_unsafe_layout_node(&unsafe_layout_node); let node: LayoutNode = unsafe {
layout_node_from_unsafe_layout_node(&unsafe_layout_node)
};
// Construct flows for this node. // Construct flows for this node.
{ {

View file

@ -203,7 +203,7 @@ impl Flow for TableFlow {
debug!("table until the previous row has {} column(s) and this row has {} column(s)", debug!("table until the previous row has {} column(s) and this row has {} column(s)",
num_cols, num_child_cols); num_cols, num_child_cols);
for i in range(num_cols, num_child_cols) { for i in range(num_cols, num_child_cols) {
self.col_inline_sizes.push( *kid_col_inline_sizes.get(i) ); self.col_inline_sizes.push((*kid_col_inline_sizes)[i]);
} }
}, },
AutoLayout => { AutoLayout => {
@ -217,9 +217,9 @@ impl Flow for TableFlow {
num_cols, num_child_cols); num_cols, num_child_cols);
for i in range(num_cols, num_child_cols) { for i in range(num_cols, num_child_cols) {
self.col_inline_sizes.push(Au::new(0)); self.col_inline_sizes.push(Au::new(0));
let new_kid_min = *kid.col_min_inline_sizes().get(i); let new_kid_min = kid.col_min_inline_sizes()[i];
self.col_min_inline_sizes.push( new_kid_min ); self.col_min_inline_sizes.push( new_kid_min );
let new_kid_pref = *kid.col_pref_inline_sizes().get(i); let new_kid_pref = kid.col_pref_inline_sizes()[i];
self.col_pref_inline_sizes.push( new_kid_pref ); self.col_pref_inline_sizes.push( new_kid_pref );
min_inline_size = min_inline_size + new_kid_min; min_inline_size = min_inline_size + new_kid_min;
pref_inline_size = pref_inline_size + new_kid_pref; pref_inline_size = pref_inline_size + new_kid_pref;

View file

@ -158,10 +158,10 @@ impl Flow for TableRowGroupFlow {
let num_child_cols = kid.col_min_inline_sizes().len(); let num_child_cols = kid.col_min_inline_sizes().len();
for i in range(num_cols, num_child_cols) { for i in range(num_cols, num_child_cols) {
self.col_inline_sizes.push(Au::new(0)); self.col_inline_sizes.push(Au::new(0));
let new_kid_min = *kid.col_min_inline_sizes().get(i); let new_kid_min = kid.col_min_inline_sizes()[i];
self.col_min_inline_sizes.push(*kid.col_min_inline_sizes().get(i)); self.col_min_inline_sizes.push(kid.col_min_inline_sizes()[i]);
let new_kid_pref = *kid.col_pref_inline_sizes().get(i); let new_kid_pref = kid.col_pref_inline_sizes()[i];
self.col_pref_inline_sizes.push(*kid.col_pref_inline_sizes().get(i)); self.col_pref_inline_sizes.push(kid.col_pref_inline_sizes()[i]);
min_inline_size = min_inline_size + new_kid_min; min_inline_size = min_inline_size + new_kid_min;
pref_inline_size = pref_inline_size + new_kid_pref; pref_inline_size = pref_inline_size + new_kid_pref;
} }

View file

@ -207,9 +207,9 @@ impl TextRunScanner {
let mut new_ranges: Vec<Range<CharIndex>> = vec![]; let mut new_ranges: Vec<Range<CharIndex>> = vec![];
let mut char_total = CharIndex(0); let mut char_total = CharIndex(0);
for i in range(0, transformed_strs.len() as int) { for i in range(0, transformed_strs.len() as int) {
let added_chars = CharIndex(transformed_strs.get(i as uint).as_slice().char_len() as int); let added_chars = CharIndex(transformed_strs[i as uint].as_slice().char_len() as int);
new_ranges.push(Range::new(char_total, added_chars)); new_ranges.push(Range::new(char_total, added_chars));
run_str.push_str(transformed_strs.get(i as uint).as_slice()); run_str.push_str(transformed_strs[i as uint].as_slice());
char_total = char_total + added_chars; char_total = char_total + added_chars;
} }
@ -219,7 +219,7 @@ impl TextRunScanner {
let clump = self.clump; let clump = self.clump;
let run = if clump.length() != CharIndex(0) && run_str.len() > 0 { let run = if clump.length() != CharIndex(0) && run_str.len() > 0 {
Some(Arc::new(box TextRun::new( Some(Arc::new(box TextRun::new(
&mut *fontgroup.fonts.get(0).borrow_mut(), &mut *fontgroup.fonts[0].borrow_mut(),
run_str.to_string()))) run_str.to_string())))
} else { } else {
None None
@ -229,21 +229,21 @@ impl TextRunScanner {
debug!("TextRunScanner: pushing fragment(s) in range: {}", self.clump); debug!("TextRunScanner: pushing fragment(s) in range: {}", self.clump);
for i in clump.each_index() { for i in clump.each_index() {
let logical_offset = i - self.clump.begin(); let logical_offset = i - self.clump.begin();
let range = new_ranges.get(logical_offset.to_uint()); let range = new_ranges[logical_offset.to_uint()];
if range.length() == CharIndex(0) { if range.length() == CharIndex(0) {
debug!("Elided an `UnscannedTextFragment` because it was zero-length after \ debug!("Elided an `UnscannedTextFragment` because it was zero-length after \
compression; {}", in_fragments[i.to_uint()]); compression; {}", in_fragments[i.to_uint()]);
continue continue
} }
let new_text_fragment_info = ScannedTextFragmentInfo::new(run.get_ref().clone(), *range); let new_text_fragment_info = ScannedTextFragmentInfo::new(run.get_ref().clone(), range);
let old_fragment = &in_fragments[i.to_uint()]; let old_fragment = &in_fragments[i.to_uint()];
let new_metrics = new_text_fragment_info.run.metrics_for_range(range); let new_metrics = new_text_fragment_info.run.metrics_for_range(&range);
let bounding_box_size = LogicalSize::from_physical( let bounding_box_size = LogicalSize::from_physical(
old_fragment.style.writing_mode, new_metrics.bounding_box.size); old_fragment.style.writing_mode, new_metrics.bounding_box.size);
let mut new_fragment = old_fragment.transform( let mut new_fragment = old_fragment.transform(
bounding_box_size, ScannedTextFragment(new_text_fragment_info)); bounding_box_size, ScannedTextFragment(new_text_fragment_info));
new_fragment.new_line_pos = new_line_positions.get(logical_offset.to_uint()).new_line_pos.clone(); new_fragment.new_line_pos = new_line_positions[logical_offset.to_uint()].new_line_pos.clone();
out_fragments.push(new_fragment) out_fragments.push(new_fragment)
} }
} }
@ -263,7 +263,7 @@ impl TextRunScanner {
pub fn font_metrics_for_style(font_context: &mut FontContext, font_style: &FontStyle) pub fn font_metrics_for_style(font_context: &mut FontContext, font_style: &FontStyle)
-> FontMetrics { -> FontMetrics {
let fontgroup = font_context.get_layout_font_group_for_style(font_style); let fontgroup = font_context.get_layout_font_group_for_style(font_style);
fontgroup.fonts.get(0).borrow().metrics.clone() fontgroup.fonts[0].borrow().metrics.clone()
} }
/// Converts a computed style to a font style used for rendering. /// Converts a computed style to a font style used for rendering.

View file

@ -322,7 +322,7 @@ impl<'a> Iterator<LayoutNode<'a>> for LayoutTreeIterator<'a> {
if self.index >= self.nodes.len() { if self.index >= self.nodes.len() {
None None
} else { } else {
let v = self.nodes.get(self.index).clone(); let v = self.nodes[self.index].clone();
self.index += 1; self.index += 1;
Some(v) Some(v)
} }
@ -768,9 +768,9 @@ pub fn layout_node_to_unsafe_layout_node(node: &LayoutNode) -> UnsafeLayoutNode
} }
} }
pub fn layout_node_from_unsafe_layout_node(node: &UnsafeLayoutNode) -> LayoutNode { // FIXME(#3044): This should be updated to use a real lifetime instead of
unsafe { // faking one.
let (node, _) = *node; pub unsafe fn layout_node_from_unsafe_layout_node(node: &UnsafeLayoutNode) -> LayoutNode<'static> {
mem::transmute(node) let (node, _) = *node;
} mem::transmute(node)
} }

View file

@ -26,7 +26,7 @@ extern crate gfx;
extern crate libc; extern crate libc;
extern crate native; extern crate native;
extern crate rustrt; extern crate rustrt;
extern crate url = "url_"; extern crate url;
#[cfg(not(test))] #[cfg(not(test))]
use compositing::{CompositorChan, CompositorTask, Constellation}; use compositing::{CompositorChan, CompositorTask, Constellation};
@ -55,7 +55,7 @@ use std::os;
#[cfg(not(test))] #[cfg(not(test))]
use std::task::TaskBuilder; use std::task::TaskBuilder;
#[cfg(not(test), target_os="android")] #[cfg(not(test), target_os="android")]
use std::str; use std::string;
#[cfg(not(test))] #[cfg(not(test))]
use url::{Url, UrlParser}; use url::{Url, UrlParser};
@ -78,7 +78,7 @@ pub extern "C" fn android_start(argc: int, argv: *const *const u8) -> int {
let mut args: Vec<String> = vec!(); let mut args: Vec<String> = vec!();
for i in range(0u, argc as uint) { for i in range(0u, argc as uint) {
unsafe { unsafe {
args.push(str::raw::from_c_str(*argv.offset(i as int) as *const i8)); args.push(string::raw::from_buf(*argv.offset(i as int) as *const u8));
} }
} }

View file

@ -11,7 +11,7 @@ extern crate layers;
extern crate serialize; extern crate serialize;
extern crate servo_util = "util"; extern crate servo_util = "util";
extern crate std; extern crate std;
extern crate url = "url_"; extern crate url;
#[cfg(target_os="macos")] #[cfg(target_os="macos")]
extern crate core_foundation; extern crate core_foundation;

View file

@ -10,7 +10,7 @@ use serialize::base64::FromBase64;
use http::headers::test_utils::from_stream_with_str; use http::headers::test_utils::from_stream_with_str;
use http::headers::content_type::MediaType; use http::headers::content_type::MediaType;
use url::{percent_decode, OtherSchemeData}; use url::{percent_decode, NonRelativeSchemeData};
pub fn factory() -> LoaderTask { pub fn factory() -> LoaderTask {
@ -30,7 +30,7 @@ fn load(load_data: LoadData, start_chan: Sender<LoadResponse>) {
// Split out content type and data. // Split out content type and data.
let mut scheme_data = match url.scheme_data { let mut scheme_data = match url.scheme_data {
OtherSchemeData(scheme_data) => scheme_data, NonRelativeSchemeData(scheme_data) => scheme_data,
_ => fail!("Expected a non-relative scheme URL.") _ => fail!("Expected a non-relative scheme URL.")
}; };
match url.query { match url.query {
@ -49,7 +49,7 @@ fn load(load_data: LoadData, start_chan: Sender<LoadResponse>) {
// ";base64" must come at the end of the content type, per RFC 2397. // ";base64" must come at the end of the content type, per RFC 2397.
// rust-http will fail to parse it because there's no =value part. // rust-http will fail to parse it because there's no =value part.
let mut is_base64 = false; let mut is_base64 = false;
let mut ct_str = *parts.get(0); let mut ct_str = parts[0];
if ct_str.ends_with(";base64") { if ct_str.ends_with(";base64") {
is_base64 = true; is_base64 = true;
ct_str = ct_str.slice_to(ct_str.as_bytes().len() - 7); ct_str = ct_str.slice_to(ct_str.as_bytes().len() - 7);
@ -61,7 +61,7 @@ fn load(load_data: LoadData, start_chan: Sender<LoadResponse>) {
metadata.set_content_type(&content_type); metadata.set_content_type(&content_type);
let progress_chan = start_sending(start_chan, metadata); let progress_chan = start_sending(start_chan, metadata);
let bytes = percent_decode(parts.get(1).as_bytes()); let bytes = percent_decode(parts[1].as_bytes());
if is_base64 { if is_base64 {
// FIXME(#2909): Its unclear what to do with non-alphabet characters, // FIXME(#2909): Its unclear what to do with non-alphabet characters,

View file

@ -18,7 +18,7 @@ extern crate serialize;
extern crate servo_util = "util"; extern crate servo_util = "util";
extern crate stb_image; extern crate stb_image;
extern crate sync; extern crate sync;
extern crate url = "url_"; extern crate url;
/// Image handling. /// Image handling.
/// ///

View file

@ -165,12 +165,12 @@ impl CORSRequest {
.eq_ignore_ascii_case(name)) .eq_ignore_ascii_case(name))
.map(|h| h.header_value()) .map(|h| h.header_value())
} }
let methods_string = match find_header(response.headers, "Access-Control-Allow-Methods") { let methods_string = match find_header(&*response.headers, "Access-Control-Allow-Methods") {
Some(s) => s, Some(s) => s,
_ => return error _ => return error
}; };
let methods = methods_string.as_slice().split(','); let methods = methods_string.as_slice().split(',');
let headers_string = match find_header(response.headers, "Access-Control-Allow-Headers") { let headers_string = match find_header(&*response.headers, "Access-Control-Allow-Headers") {
Some(s) => s, Some(s) => s,
_ => return error _ => return error
}; };
@ -197,7 +197,7 @@ impl CORSRequest {
} }
} }
// Substep 7, 8 // Substep 7, 8
let max_age: uint = find_header(response.headers, "Access-Control-Max-Age") let max_age: uint = find_header(&*response.headers, "Access-Control-Max-Age")
.and_then(|h| FromStr::from_str(h.as_slice())).unwrap_or(0); .and_then(|h| FromStr::from_str(h.as_slice())).unwrap_or(0);
// Substep 9: Impose restrictions on max-age, if any (unimplemented) // Substep 9: Impose restrictions on max-age, if any (unimplemented)
// Substeps 10-12: Add a cache (partially implemented, XXXManishearth) // Substeps 10-12: Add a cache (partially implemented, XXXManishearth)

View file

@ -140,7 +140,8 @@ pub struct CallSetup {
impl CallSetup { impl CallSetup {
/// Performs the setup needed to make a call. /// Performs the setup needed to make a call.
pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup { pub fn new<T: CallbackContainer>(callback: &T, handling: ExceptionHandling) -> CallSetup {
let global = global_object_for_js_object(callback.callback()).root(); let global = global_object_for_js_object(callback.callback());
let global = global.root();
let cx = global.root_ref().get_cx(); let cx = global.root_ref().get_cx();
CallSetup { CallSetup {
cx: cx, cx: cx,

View file

@ -2175,7 +2175,8 @@ class CGCallGenerator(CGThing):
if static: if static:
glob = "" glob = ""
else: else:
glob = " let global = global_object_for_js_object(this.reflector().get_jsobject()).root();\n" glob = " let global = global_object_for_js_object(this.reflector().get_jsobject());\n"\
" let global = global.root();\n"
self.cgRoot.append(CGGeneric( self.cgRoot.append(CGGeneric(
"let result = match result {\n" "let result = match result {\n"
@ -3915,7 +3916,8 @@ class CGClassConstructHook(CGAbstractExternMethod):
def definition_body(self): def definition_body(self):
preamble = CGGeneric("""\ preamble = CGGeneric("""\
let global = global_object_for_js_object(JS_CALLEE(cx, vp).to_object()).root(); let global = global_object_for_js_object(JS_CALLEE(cx, vp).to_object());
let global = global.root();
""") """)
nativeName = MakeNativeName(self._ctor.identifier.name) nativeName = MakeNativeName(self._ctor.identifier.name)
callGenerator = CGMethodCall(["&global.root_ref()"], nativeName, True, callGenerator = CGMethodCall(["&global.root_ref()"], nativeName, True,
@ -4271,10 +4273,10 @@ class CGDictionary(CGThing):
return string.Template( return string.Template(
"impl<'a, 'b> ${selfName}<'a, 'b> {\n" "impl<'a, 'b> ${selfName}<'a, 'b> {\n"
" pub fn empty() -> ${selfName} {\n" " pub fn empty() -> ${selfName}<'a, 'b> {\n"
" ${selfName}::new(ptr::mut_null(), NullValue()).unwrap()\n" " ${selfName}::new(ptr::mut_null(), NullValue()).unwrap()\n"
" }\n" " }\n"
" pub fn new(cx: *mut JSContext, val: JSVal) -> Result<${selfName}, ()> {\n" " pub fn new(cx: *mut JSContext, val: JSVal) -> Result<${selfName}<'a, 'b>, ()> {\n"
" let object = if val.is_null_or_undefined() {\n" " let object = if val.is_null_or_undefined() {\n"
" ptr::mut_null()\n" " ptr::mut_null()\n"
" } else if val.is_object() {\n" " } else if val.is_object() {\n"

View file

@ -21,7 +21,7 @@ use js::{JSPROP_GETTER, JSPROP_ENUMERATE, JSPROP_READONLY, JSRESOLVE_QUALIFIED};
use libc; use libc;
use std::mem; use std::mem;
use std::ptr; use std::ptr;
use std::str; use std::string;
use std::mem::size_of; use std::mem::size_of;
static JSPROXYSLOT_EXPANDO: u32 = 0; static JSPROXYSLOT_EXPANDO: u32 = 0;
@ -99,7 +99,7 @@ pub extern fn delete_(cx: *mut JSContext, proxy: *mut JSObject, id: jsid,
pub fn _obj_toString(cx: *mut JSContext, className: *const libc::c_char) -> *mut JSString { pub fn _obj_toString(cx: *mut JSContext, className: *const libc::c_char) -> *mut JSString {
unsafe { unsafe {
let name = str::raw::from_c_str(className); let name = string::raw::from_buf(className as *const i8 as *const u8);
let nchars = "[object ]".len() + name.len(); let nchars = "[object ]".len() + name.len();
let chars: *mut jschar = JS_malloc(cx, (nchars + 1) as libc::size_t * (size_of::<jschar>() as libc::size_t)) as *mut jschar; let chars: *mut jschar = JS_malloc(cx, (nchars + 1) as libc::size_t * (size_of::<jschar>() as libc::size_t)) as *mut jschar;
if chars.is_null() { if chars.is_null() {

View file

@ -670,7 +670,8 @@ pub fn global_object_for_js_object(obj: *mut JSObject) -> GlobalField {
/// Get the `JSContext` for the `JSRuntime` associated with the thread /// Get the `JSContext` for the `JSRuntime` associated with the thread
/// this object is on. /// this object is on.
fn cx_for_dom_reflector(obj: *mut JSObject) -> *mut JSContext { fn cx_for_dom_reflector(obj: *mut JSObject) -> *mut JSContext {
let global = global_object_for_js_object(obj).root(); let global = global_object_for_js_object(obj);
let global = global.root();
global.root_ref().get_cx() global.root_ref().get_cx()
} }

View file

@ -35,7 +35,7 @@ impl BrowserContext {
} }
pub fn active_document(&self) -> Temporary<Document> { pub fn active_document(&self) -> Temporary<Document> {
Temporary::new(self.history.get(self.active_index).document.clone()) Temporary::new(self.history[self.active_index].document.clone())
} }
pub fn active_window(&self) -> Temporary<Window> { pub fn active_window(&self) -> Temporary<Window> {

View file

@ -43,7 +43,7 @@ impl<'a> ClientRectListMethods for JSRef<'a, ClientRectList> {
fn Item(&self, index: u32) -> Option<Temporary<ClientRect>> { fn Item(&self, index: u32) -> Option<Temporary<ClientRect>> {
let rects = &self.rects; let rects = &self.rects;
if index < rects.len() as u32 { if index < rects.len() as u32 {
Some(Temporary::new(rects.get(index as uint).clone())) Some(Temporary::new(rects[index as uint].clone()))
} else { } else {
None None
} }

View file

@ -173,7 +173,7 @@ impl<'a> DocumentHelpers for JSRef<'a, Document> {
let elem: Option<&JSRef<Element>> = ElementCast::to_ref(&node); let elem: Option<&JSRef<Element>> = ElementCast::to_ref(&node);
match elem { match elem {
Some(elem) => { Some(elem) => {
if &*elements.get(head).root() == elem { if &*(*elements)[head].root() == elem {
head = head + 1; head = head + 1;
} }
if new_node == &node || head == elements.len() { if new_node == &node || head == elements.len() {
@ -365,7 +365,7 @@ impl<'a> DocumentMethods for JSRef<'a, Document> {
fn GetElementById(&self, id: DOMString) -> Option<Temporary<Element>> { fn GetElementById(&self, id: DOMString) -> Option<Temporary<Element>> {
match self.idmap.deref().borrow().find_equiv(&id) { match self.idmap.deref().borrow().find_equiv(&id) {
None => None, None => None,
Some(ref elements) => Some(Temporary::new(elements.get(0).clone())), Some(ref elements) => Some(Temporary::new((*elements)[0].clone())),
} }
} }

View file

@ -323,7 +323,7 @@ impl<'a> AttributeHandlers for JSRef<'a, Element> {
} }
}; };
self.deref().attrs.borrow().get(idx).root().set_value(set_type, value); (*self.deref().attrs.borrow())[idx].root().set_value(set_type, value);
} }
fn parse_attribute(&self, namespace: &Namespace, local_name: &str, fn parse_attribute(&self, namespace: &Namespace, local_name: &str,
@ -352,7 +352,7 @@ impl<'a> AttributeHandlers for JSRef<'a, Element> {
} }
if namespace == namespace::Null { if namespace == namespace::Null {
let removed_raw_value = self.deref().attrs.borrow().get(idx).root().Value(); let removed_raw_value = (*self.deref().attrs.borrow())[idx].root().Value();
vtable_for(NodeCast::from_ref(self)) vtable_for(NodeCast::from_ref(self))
.before_remove_attr(local_name.to_string(), removed_raw_value); .before_remove_attr(local_name.to_string(), removed_raw_value);
} }

View file

@ -70,7 +70,7 @@ impl<'a> FormDataMethods for JSRef<'a, FormData> {
fn Get(&self, name: DOMString) -> Option<FileOrString> { fn Get(&self, name: DOMString) -> Option<FileOrString> {
if self.data.deref().borrow().contains_key_equiv(&name) { if self.data.deref().borrow().contains_key_equiv(&name) {
match self.data.deref().borrow().get(&name).get(0).clone() { match self.data.deref().borrow().get(&name)[0].clone() {
StringData(ref s) => Some(eString(s.clone())), StringData(ref s) => Some(eString(s.clone())),
FileData(ref f) => { FileData(ref f) => {
Some(eFile(f.clone())) Some(eFile(f.clone()))

View file

@ -370,11 +370,11 @@ impl<'a> PrivateNodeHelpers for JSRef<'a, Node> {
} }
} }
pub trait NodeHelpers { pub trait NodeHelpers<'m, 'n> {
fn ancestors(&self) -> AncestorIterator; fn ancestors(&self) -> AncestorIterator<'n>;
fn children(&self) -> AbstractNodeChildrenIterator; fn children(&self) -> AbstractNodeChildrenIterator<'n>;
fn child_elements(&self) -> ChildElementIterator; fn child_elements(&self) -> ChildElementIterator<'m, 'n>;
fn following_siblings(&self) -> AbstractNodeChildrenIterator; fn following_siblings(&self) -> AbstractNodeChildrenIterator<'n>;
fn is_in_doc(&self) -> bool; fn is_in_doc(&self) -> bool;
fn is_inclusive_ancestor_of(&self, parent: &JSRef<Node>) -> bool; fn is_inclusive_ancestor_of(&self, parent: &JSRef<Node>) -> bool;
fn is_parent_of(&self, child: &JSRef<Node>) -> bool; fn is_parent_of(&self, child: &JSRef<Node>) -> bool;
@ -412,9 +412,9 @@ pub trait NodeHelpers {
fn dump_indent(&self, indent: uint); fn dump_indent(&self, indent: uint);
fn debug_str(&self) -> String; fn debug_str(&self) -> String;
fn traverse_preorder<'a>(&'a self) -> TreeIterator<'a>; fn traverse_preorder(&self) -> TreeIterator<'n>;
fn sequential_traverse_postorder<'a>(&'a self) -> TreeIterator<'a>; fn sequential_traverse_postorder(&self) -> TreeIterator<'n>;
fn inclusively_following_siblings<'a>(&'a self) -> AbstractNodeChildrenIterator<'a>; fn inclusively_following_siblings(&self) -> AbstractNodeChildrenIterator<'n>;
fn to_trusted_node_address(&self) -> TrustedNodeAddress; fn to_trusted_node_address(&self) -> TrustedNodeAddress;
@ -427,7 +427,7 @@ pub trait NodeHelpers {
fn remove_self(&self); fn remove_self(&self);
} }
impl<'a> NodeHelpers for JSRef<'a, Node> { impl<'m, 'n> NodeHelpers<'m, 'n> for JSRef<'n, Node> {
/// Dumps the subtree rooted at this node, for debugging. /// Dumps the subtree rooted at this node, for debugging.
fn dump(&self) { fn dump(&self) {
self.dump_indent(0); self.dump_indent(0);
@ -550,20 +550,20 @@ impl<'a> NodeHelpers for JSRef<'a, Node> {
} }
/// Iterates over this node and all its descendants, in preorder. /// Iterates over this node and all its descendants, in preorder.
fn traverse_preorder<'a>(&'a self) -> TreeIterator<'a> { fn traverse_preorder(&self) -> TreeIterator<'n> {
let mut nodes = vec!(); let mut nodes = vec!();
gather_abstract_nodes(self, &mut nodes, false); gather_abstract_nodes(self, &mut nodes, false);
TreeIterator::new(nodes) TreeIterator::new(nodes)
} }
/// Iterates over this node and all its descendants, in postorder. /// Iterates over this node and all its descendants, in postorder.
fn sequential_traverse_postorder<'a>(&'a self) -> TreeIterator<'a> { fn sequential_traverse_postorder(&self) -> TreeIterator<'n> {
let mut nodes = vec!(); let mut nodes = vec!();
gather_abstract_nodes(self, &mut nodes, true); gather_abstract_nodes(self, &mut nodes, true);
TreeIterator::new(nodes) TreeIterator::new(nodes)
} }
fn inclusively_following_siblings<'a>(&'a self) -> AbstractNodeChildrenIterator<'a> { fn inclusively_following_siblings(&self) -> AbstractNodeChildrenIterator<'n> {
AbstractNodeChildrenIterator { AbstractNodeChildrenIterator {
current_node: Some(self.clone()), current_node: Some(self.clone()),
} }
@ -573,7 +573,7 @@ impl<'a> NodeHelpers for JSRef<'a, Node> {
self == parent || parent.ancestors().any(|ancestor| &ancestor == self) self == parent || parent.ancestors().any(|ancestor| &ancestor == self)
} }
fn following_siblings(&self) -> AbstractNodeChildrenIterator { fn following_siblings(&self) -> AbstractNodeChildrenIterator<'n> {
AbstractNodeChildrenIterator { AbstractNodeChildrenIterator {
current_node: self.next_sibling().root().map(|next| next.deref().clone()), current_node: self.next_sibling().root().map(|next| next.deref().clone()),
} }
@ -659,7 +659,7 @@ impl<'a> NodeHelpers for JSRef<'a, Node> {
Ok(NodeList::new_simple_list(&window.root_ref(), nodes)) Ok(NodeList::new_simple_list(&window.root_ref(), nodes))
} }
fn ancestors(&self) -> AncestorIterator { fn ancestors(&self) -> AncestorIterator<'n> {
AncestorIterator { AncestorIterator {
current: self.parent_node.get().map(|node| (*node.root()).clone()), current: self.parent_node.get().map(|node| (*node.root()).clone()),
} }
@ -677,13 +677,13 @@ impl<'a> NodeHelpers for JSRef<'a, Node> {
self.owner_doc().root().is_html_document self.owner_doc().root().is_html_document
} }
fn children(&self) -> AbstractNodeChildrenIterator { fn children(&self) -> AbstractNodeChildrenIterator<'n> {
AbstractNodeChildrenIterator { AbstractNodeChildrenIterator {
current_node: self.first_child.get().map(|node| (*node.root()).clone()), current_node: self.first_child.get().map(|node| (*node.root()).clone()),
} }
} }
fn child_elements(&self) -> ChildElementIterator { fn child_elements(&self) -> ChildElementIterator<'m, 'n> {
self.children() self.children()
.filter(|node| { .filter(|node| {
node.is_element() node.is_element()
@ -858,7 +858,8 @@ impl<'a> Iterator<JSRef<'a, Node>> for TreeIterator<'a> {
if self.index >= self.nodes.len() { if self.index >= self.nodes.len() {
None None
} else { } else {
let v = self.nodes.get(self.index).clone(); let v = self.nodes[self.index];
let v = v.clone();
self.index += 1; self.index += 1;
Some(v) Some(v)
} }
@ -886,7 +887,7 @@ impl NodeIterator {
} }
} }
fn next_child<'b>(&self, node: &JSRef<'b, Node>) -> Option<JSRef<Node>> { fn next_child<'b>(&self, node: &JSRef<'b, Node>) -> Option<JSRef<'b, Node>> {
if !self.include_descendants_of_void && node.is_element() { if !self.include_descendants_of_void && node.is_element() {
let elem: &JSRef<Element> = ElementCast::to_ref(node).unwrap(); let elem: &JSRef<Element> = ElementCast::to_ref(node).unwrap();
if elem.deref().is_void() { if elem.deref().is_void() {
@ -901,7 +902,7 @@ impl NodeIterator {
} }
impl<'a> Iterator<JSRef<'a, Node>> for NodeIterator { impl<'a> Iterator<JSRef<'a, Node>> for NodeIterator {
fn next(&mut self) -> Option<JSRef<Node>> { fn next(&mut self) -> Option<JSRef<'a, Node>> {
self.current_node = match self.current_node.as_ref().map(|node| node.root()) { self.current_node = match self.current_node.as_ref().map(|node| node.root()) {
None => { None => {
if self.include_start { if self.include_start {
@ -1090,9 +1091,11 @@ impl Node {
return Err(HierarchyRequest); return Err(HierarchyRequest);
} }
match child { match child {
Some(ref child) if child.inclusively_following_siblings() Some(ref child) => {
.any(|child| child.is_doctype()) => { if child.inclusively_following_siblings()
return Err(HierarchyRequest); .any(|child| child.is_doctype()) {
return Err(HierarchyRequest)
}
} }
_ => (), _ => (),
} }
@ -1109,9 +1112,11 @@ impl Node {
return Err(HierarchyRequest); return Err(HierarchyRequest);
} }
match child { match child {
Some(ref child) if child.inclusively_following_siblings() Some(ref child) => {
.any(|child| child.is_doctype()) => { if child.inclusively_following_siblings()
return Err(HierarchyRequest); .any(|child| child.is_doctype()) {
return Err(HierarchyRequest)
}
} }
_ => (), _ => (),
} }

View file

@ -59,7 +59,7 @@ impl<'a> NodeListMethods for JSRef<'a, NodeList> {
fn Item(&self, index: u32) -> Option<Temporary<Node>> { fn Item(&self, index: u32) -> Option<Temporary<Node>> {
match self.list_type { match self.list_type {
_ if index >= self.Length() => None, _ if index >= self.Length() => None,
Simple(ref elems) => Some(Temporary::new(elems.get(index as uint).clone())), Simple(ref elems) => Some(Temporary::new(elems[index as uint].clone())),
Children(ref node) => { Children(ref node) => {
let node = node.root(); let node = node.root();
node.deref().children().nth(index as uint) node.deref().children().nth(index as uint)

View file

@ -72,7 +72,7 @@ impl<'a> URLSearchParamsMethods for JSRef<'a, URLSearchParams> {
} }
fn Get(&self, name: DOMString) -> Option<DOMString> { fn Get(&self, name: DOMString) -> Option<DOMString> {
self.data.deref().borrow().find_equiv(&name).map(|v| v.get(0).clone()) self.data.deref().borrow().find_equiv(&name).map(|v| v[0].clone())
} }
fn Has(&self, name: DOMString) -> bool { fn Has(&self, name: DOMString) -> bool {

View file

@ -346,14 +346,8 @@ pub fn parse_html(page: &Page,
let comment: &JSRef<Node> = NodeCast::from_ref(&*comment); let comment: &JSRef<Node> = NodeCast::from_ref(&*comment);
unsafe { comment.to_hubbub_node() } unsafe { comment.to_hubbub_node() }
}, },
create_doctype: |doctype: Box<hubbub::Doctype>| { create_doctype: |box hubbub::Doctype { name: name, public_id: public_id, system_id: system_id, ..}: Box<hubbub::Doctype>| {
debug!("create doctype"); debug!("create doctype");
let box hubbub::Doctype {
name: name,
public_id: public_id,
system_id: system_id,
force_quirks: _
} = doctype;
// NOTE: tmp vars are workaround for lifetime issues. Both required. // NOTE: tmp vars are workaround for lifetime issues. Both required.
let tmp_borrow = doc_cell.borrow(); let tmp_borrow = doc_cell.borrow();
let tmp = &*tmp_borrow; let tmp = &*tmp_borrow;
@ -410,16 +404,19 @@ pub fn parse_html(page: &Page,
// Handle CSS style sheets from <link> elements // Handle CSS style sheets from <link> elements
ElementNodeTypeId(HTMLLinkElementTypeId) => { ElementNodeTypeId(HTMLLinkElementTypeId) => {
match (rel, href) { match (rel, href) {
(Some(ref rel), Some(ref href)) if rel.as_slice().split(HTML_SPACE_CHARACTERS.as_slice()) (Some(ref rel), Some(ref href)) => {
.any(|s| { if rel.as_slice()
s.as_slice().eq_ignore_ascii_case("stylesheet") .split(HTML_SPACE_CHARACTERS.as_slice())
}) => { .any(|s| {
debug!("found CSS stylesheet: {:s}", *href); s.as_slice().eq_ignore_ascii_case("stylesheet")
match UrlParser::new().base_url(base_url).parse(href.as_slice()) { }) {
Ok(url) => css_chan2.send(CSSTaskNewFile( debug!("found CSS stylesheet: {:s}", *href);
UrlProvenance(url, resource_task.clone()))), match UrlParser::new().base_url(base_url).parse(href.as_slice()) {
Err(e) => debug!("Parsing url {:s} failed: {:s}", *href, e) Ok(url) => css_chan2.send(CSSTaskNewFile(
}; UrlProvenance(url, resource_task.clone()))),
Err(e) => debug!("Parsing url {:s} failed: {:?}", *href, e)
};
}
} }
_ => {} _ => {}
} }
@ -502,7 +499,7 @@ pub fn parse_html(page: &Page,
match UrlParser::new().base_url(base_url) match UrlParser::new().base_url(base_url)
.parse(src.deref().value().as_slice()) { .parse(src.deref().value().as_slice()) {
Ok(new_url) => js_chan2.send(JSTaskNewFile(new_url)), Ok(new_url) => js_chan2.send(JSTaskNewFile(new_url)),
Err(e) => debug!("Parsing url {:s} failed: {:s}", src.deref().Value(), e) Err(e) => debug!("Parsing url {:s} failed: {:?}", src.deref().Value(), e)
}; };
} }
None => { None => {
@ -554,7 +551,7 @@ pub fn parse_html(page: &Page,
} }
} }
fn build_parser(node: hubbub::NodeDataPtr) -> hubbub::Parser { fn build_parser<'a>(node: hubbub::NodeDataPtr) -> hubbub::Parser<'a> {
let mut parser = hubbub::Parser::new("UTF-8", false); let mut parser = hubbub::Parser::new("UTF-8", false);
parser.set_document_node(node); parser.set_document_node(node);
parser.enable_scripting(true); parser.enable_scripting(true);

View file

@ -173,7 +173,7 @@ impl ScriptLayoutChan for OpaqueScriptLayoutChannel {
fn sender(&self) -> Sender<Msg> { fn sender(&self) -> Sender<Msg> {
let &OpaqueScriptLayoutChannel((ref sender, _)) = self; let &OpaqueScriptLayoutChannel((ref sender, _)) = self;
(*sender.as_ref::<Sender<Msg>>().unwrap()).clone() (*sender.downcast_ref::<Sender<Msg>>().unwrap()).clone()
} }
fn receiver(self) -> Receiver<Msg> { fn receiver(self) -> Receiver<Msg> {

View file

@ -41,7 +41,7 @@ extern crate servo_util = "util";
extern crate style; extern crate style;
extern crate sync; extern crate sync;
extern crate servo_msg = "msg"; extern crate servo_msg = "msg";
extern crate url = "url_"; extern crate url;
extern crate gfx; extern crate gfx;
pub mod cors; pub mod cors;

View file

@ -452,7 +452,7 @@ impl ScriptTask {
let new_page = { let new_page = {
let window_size = parent_page.window_size.deref().get(); let window_size = parent_page.window_size.deref().get();
Page::new(new_pipeline_id, Some(subpage_id), Page::new(new_pipeline_id, Some(subpage_id),
LayoutChan(layout_chan.as_ref::<Sender<layout_interface::Msg>>().unwrap().clone()), LayoutChan(layout_chan.downcast_ref::<Sender<layout_interface::Msg>>().unwrap().clone()),
window_size, window_size,
parent_page.resource_task.deref().clone(), parent_page.resource_task.deref().clone(),
self.constellation_chan.clone(), self.constellation_chan.clone(),

View file

@ -11,7 +11,7 @@
extern crate geom; extern crate geom;
extern crate servo_msg = "msg"; extern crate servo_msg = "msg";
extern crate servo_net = "net"; extern crate servo_net = "net";
extern crate url = "url_"; extern crate url;
extern crate std; extern crate std;
extern crate serialize; extern crate serialize;

View file

@ -994,42 +994,42 @@ mod tests {
#[test] #[test]
fn test_rule_ordering_same_specificity(){ fn test_rule_ordering_same_specificity(){
let rules_list = get_mock_rules(["a.intro", "img.sidebar"]); let rules_list = get_mock_rules(["a.intro", "img.sidebar"]);
let rule1 = rules_list.get(0).get(0).clone(); let rule1 = rules_list[0][0].clone();
let rule2 = rules_list.get(1).get(0).clone(); let rule2 = rules_list[1][0].clone();
assert!(rule1.property < rule2.property, "The rule that comes later should win."); assert!(rule1.property < rule2.property, "The rule that comes later should win.");
} }
#[test] #[test]
fn test_get_id_name(){ fn test_get_id_name(){
let rules_list = get_mock_rules([".intro", "#top"]); let rules_list = get_mock_rules([".intro", "#top"]);
assert_eq!(SelectorMap::get_id_name(rules_list.get(0).get(0)), None); assert_eq!(SelectorMap::get_id_name(&rules_list[0][0]), None);
assert_eq!(SelectorMap::get_id_name(rules_list.get(1).get(0)), Some("top".to_string())); assert_eq!(SelectorMap::get_id_name(&rules_list[1][0]), Some("top".to_string()));
} }
#[test] #[test]
fn test_get_class_name(){ fn test_get_class_name(){
let rules_list = get_mock_rules([".intro.foo", "#top"]); let rules_list = get_mock_rules([".intro.foo", "#top"]);
assert_eq!(SelectorMap::get_class_name(rules_list.get(0).get(0)), Some("intro".to_string())); assert_eq!(SelectorMap::get_class_name(&rules_list[0][0]), Some("intro".to_string()));
assert_eq!(SelectorMap::get_class_name(rules_list.get(1).get(0)), None); assert_eq!(SelectorMap::get_class_name(&rules_list[1][0]), None);
} }
#[test] #[test]
fn test_get_element_name(){ fn test_get_element_name(){
let rules_list = get_mock_rules(["img.foo", "#top", "IMG", "ImG"]); let rules_list = get_mock_rules(["img.foo", "#top", "IMG", "ImG"]);
assert_eq!(SelectorMap::get_element_name(rules_list.get(0).get(0)), Some("img".to_string())); assert_eq!(SelectorMap::get_element_name(&rules_list[0][0]), Some("img".to_string()));
assert_eq!(SelectorMap::get_element_name(rules_list.get(1).get(0)), None); assert_eq!(SelectorMap::get_element_name(&rules_list[1][0]), None);
assert_eq!(SelectorMap::get_element_name(rules_list.get(2).get(0)), Some("img".to_string())); assert_eq!(SelectorMap::get_element_name(&rules_list[2][0]), Some("img".to_string()));
assert_eq!(SelectorMap::get_element_name(rules_list.get(3).get(0)), Some("img".to_string())); assert_eq!(SelectorMap::get_element_name(&rules_list[3][0]), Some("img".to_string()));
} }
#[test] #[test]
fn test_insert(){ fn test_insert(){
let rules_list = get_mock_rules([".intro.foo", "#top"]); let rules_list = get_mock_rules([".intro.foo", "#top"]);
let mut selector_map = SelectorMap::new(); let mut selector_map = SelectorMap::new();
selector_map.insert(rules_list.get(1).get(0).clone()); selector_map.insert(rules_list[1][0].clone());
assert_eq!(1, selector_map.id_hash.find_equiv(&("top")).unwrap().get(0).property.source_order); assert_eq!(1, selector_map.id_hash.find_equiv(&("top")).unwrap()[0].property.source_order);
selector_map.insert(rules_list.get(0).get(0).clone()); selector_map.insert(rules_list[0][0].clone());
assert_eq!(0, selector_map.class_hash.find_equiv(&("intro")).unwrap().get(0).property.source_order); assert_eq!(0, selector_map.class_hash.find_equiv(&("intro")).unwrap()[0].property.source_order);
assert!(selector_map.class_hash.find_equiv(&("foo")).is_none()); assert!(selector_map.class_hash.find_equiv(&("foo")).is_none());
} }
} }

View file

@ -19,7 +19,7 @@ extern crate geom;
extern crate num; extern crate num;
extern crate serialize; extern crate serialize;
extern crate sync; extern crate sync;
extern crate url = "url_"; extern crate url;
extern crate cssparser; extern crate cssparser;
extern crate encoding; extern crate encoding;

View file

@ -138,7 +138,7 @@ impl<K: Clone + PartialEq, V: Clone> LRUCache<K,V> {
let entry = self.entries.remove(pos); let entry = self.entries.remove(pos);
self.entries.push(entry.unwrap()); self.entries.push(entry.unwrap());
} }
self.entries.get(last_index).ref1().clone() self.entries[last_index].ref1().clone()
} }
pub fn iter<'a>(&'a self) -> Items<'a,(K,V)> { pub fn iter<'a>(&'a self) -> Items<'a,(K,V)> {
@ -206,8 +206,8 @@ impl<K:Clone+PartialEq+Hash,V:Clone> SimpleHashCache<K,V> {
#[inline] #[inline]
pub fn find_equiv<'a,Q:Hash+Equiv<K>>(&'a self, key: &Q) -> Option<&'a V> { pub fn find_equiv<'a,Q:Hash+Equiv<K>>(&'a self, key: &Q) -> Option<&'a V> {
let bucket_index = self.bucket_for_key(key); let bucket_index = self.bucket_for_key(key);
match self.entries.get(bucket_index) { match self.entries[bucket_index] {
&Some((ref existing_key, ref value)) if key.equiv(existing_key) => Some(value), Some((ref existing_key, ref value)) if key.equiv(existing_key) => Some(value),
_ => None, _ => None,
} }
} }
@ -221,8 +221,8 @@ impl<K:Clone+PartialEq+Hash,V:Clone> Cache<K,V> for SimpleHashCache<K,V> {
fn find(&mut self, key: &K) -> Option<V> { fn find(&mut self, key: &K) -> Option<V> {
let bucket_index = self.bucket_for_key(key); let bucket_index = self.bucket_for_key(key);
match self.entries.get(bucket_index) { match self.entries[bucket_index] {
&Some((ref existing_key, ref value)) if existing_key == key => Some((*value).clone()), Some((ref existing_key, ref value)) if existing_key == key => Some((*value).clone()),
_ => None, _ => None,
} }
} }

View file

@ -198,7 +198,7 @@ impl TimeProfiler {
if data_len > 0 { if data_len > 0 {
let (mean, median, min, max) = let (mean, median, min, max) =
(data.iter().map(|&x|x).sum() / (data_len as f64), (data.iter().map(|&x|x).sum() / (data_len as f64),
*data.get(data_len / 2), (*data)[data_len / 2],
data.iter().fold(f64::INFINITY, |a, &b| a.min(b)), data.iter().fold(f64::INFINITY, |a, &b| a.min(b)),
data.iter().fold(-f64::INFINITY, |a, &b| a.max(b))); data.iter().fold(-f64::INFINITY, |a, &b| a.max(b)));
println!("{:-35s}: {:15.4f} {:15.4f} {:15.4f} {:15.4f} {:15u}", println!("{:-35s}: {:15.4f} {:15.4f} {:15.4f} {:15.4f} {:15u}",

View file

@ -222,7 +222,7 @@ impl<QueueData: Send, WorkData: Send> WorkQueue<QueueData, WorkData> {
for i in range(0, thread_count) { for i in range(0, thread_count) {
for j in range(0, thread_count) { for j in range(0, thread_count) {
if i != j { if i != j {
threads.get_mut(i).other_deques.push(infos.get(j).thief.clone()) threads.get_mut(i).other_deques.push(infos[j].thief.clone())
} }
} }
assert!(threads.get(i).other_deques.len() == thread_count - 1) assert!(threads.get(i).other_deques.len() == thread_count - 1)

@ -1 +1 @@
Subproject commit af4cd2acbc87e37681baf23c66fccd9aa6bae20f Subproject commit 1b406788b5581e3a446326f1396ca50e628dd810

@ -1 +1 @@
Subproject commit 2e6d5b189435f9f9118af5c2f06af3711a01a084 Subproject commit 6d742cf825d0266562a9f0c9735ce95e89d053db

@ -1 +1 @@
Subproject commit 746fb521517eb58340c63c760c05e994391b0e34 Subproject commit 5aa1d323ab85c7f3c3a3a1585da53a54cf301b98

@ -1 +1 @@
Subproject commit aa718dcd3a14277ecfd51af16d5e352c6efa124b Subproject commit 79904fb42ff8a0e888f70fae336fbf6c11f1e6c8

@ -1 +1 @@
Subproject commit 41fa53d78296d35e8190dcf1514a58e9ca98c02f Subproject commit 6fdfe1254a29f1c742cc25545313e3463161e621

@ -1 +1 @@
Subproject commit 494309421daed6f1de77db350a36ac1a8faaef51 Subproject commit 1f88c3efac0833ff038fdcba736000e9be17260f

@ -1 +1 @@
Subproject commit 64c3ee2ea7898686420b8a5fe170fe41555a5509 Subproject commit 918bae60d4703e3db7f306b6c4a09de0670b9472

@ -1 +1 @@
Subproject commit 876c98d1d803325a6e1e13bf71ce0b001ac39a94 Subproject commit 5ba3f58283d5ad6307a654f8dd72cf866755b3b5

@ -1 +1 @@
Subproject commit 8fb37cc40e977977b82e42460eccbf4edffb87a3 Subproject commit b41f144a3a8b6388d0956f341bcffa5bbaecc899

@ -1 +1 @@
Subproject commit 454d3b9f7b5e046ac61d26dee5aafb50cd44f602 Subproject commit 12180ed055f9df2b28cee4bbbf26fc68b3c405f3

@ -1 +1 @@
Subproject commit 3722106a0d3af88da702cb21fdc997f23b4e5b86 Subproject commit ee4c20f4b6bee1b360cb708fe24fdc8350cb80ec

@ -1 +1 @@
Subproject commit 4a89d9e01f178c7dd441ddae472526c9a6641d38 Subproject commit 5c2bf7ad9e0893755800ae0ee25b196a43df529b

@ -1 +1 @@
Subproject commit 99036a8596ca78b9c36de815bd4b39f050b5e6fd Subproject commit 0e6502e5b1bc795dbc4927bdb4616d9d7d686271

@ -1 +1 @@
Subproject commit 6b5af96a1e95db3c4083a411855a592ac23a6fb4 Subproject commit b773ce447af68ed01e219639f593f28113b76731

@ -1 +1 @@
Subproject commit a5cd4ef464ad86d81ecfb982ed18b9b9f8a0d38a Subproject commit 4e5740761b2a143e7d3290a58ccabc89200a4763

@ -1 +1 @@
Subproject commit ddcc9bae5a9e6e6ea449044f102e41310239ada3 Subproject commit 6bcf4146652a9868bc64c2835f7770c8a7007e93

@ -1 +1 @@
Subproject commit 7379a686661109407abfa808a8f85000344c1bea Subproject commit 869ec5657a628a72cbe0fcf9969e6a28eba4bf18

@ -1 +1 @@
Subproject commit b9de1811d78554659f0f720166331ca7fa73f8c8 Subproject commit ae080410feafcc52632859b5f5916f44630712fe

@ -1 +1 @@
Subproject commit 6e60003076ad9db70ecf1b46bb8d6c271a773601 Subproject commit 3104cf76fc0ceffec78bcbf430faabd524864d2c

@ -1 +1 @@
Subproject commit 89631a6db2c4c87cfd9c17634e3337833c555e1f Subproject commit cf5cd961f5d952501c21ce64d1fcc293144a96f6

@ -1 +1 @@
Subproject commit 826fb1a9f8a3a72349c7feaad4ea67cbb4170c5f Subproject commit ef968ec053aa7cce7b5e0c422cac23a6d249ef7e

View file

@ -12,11 +12,11 @@ SRC=$(shell find $(VPATH)/src -type f)
.PHONY: all .PHONY: all
all: liburl.dummy all: liburl.dummy
liburl.dummy: src/url.rs $(SRC) $(EXT_DEPS) liburl.dummy: src/lib.rs $(SRC) $(EXT_DEPS)
$(RUSTC) $(RUSTFLAGS) $< --out-dir . --crate-type rlib $(RUSTC) $(RUSTFLAGS) $< --out-dir . --crate-type rlib
touch $@ touch $@
url-test: src/url.rs $(SRC) url-test: src/lib.rs $(SRC)
$(RUSTC) $(RUSTFLAGS) $< -o $@ --test $(RUSTC) $(RUSTFLAGS) $< -o $@ --test
.PHONY: check .PHONY: check
@ -26,7 +26,7 @@ check: url-test
.PHONY: doc .PHONY: doc
doc: $(RUSTDOC_TARGET)/url/index.html doc: $(RUSTDOC_TARGET)/url/index.html
$(RUSTDOC_TARGET)/url/index.html: src/url.rs $(SRC) $(EXT_DEPS) $(RUSTDOC_TARGET)/url/index.html: src/lib.rs $(SRC) $(EXT_DEPS)
$(RUSTDOC) $(RUSTDOC_FLAGS) $< -o $(RUSTDOC_TARGET) $(RUSTDOC) $(RUSTDOC_FLAGS) $< -o $(RUSTDOC_TARGET)
.PHONY: clean .PHONY: clean

@ -1 +1 @@
Subproject commit 05e77706496b5de1c457a0868d84dc56e0cb8d30 Subproject commit 678bb4d52638b1cfdab78ef8e521566c9240fb1a

View file

@ -113,17 +113,17 @@ fn parse_lists(file: &String, servo_args: &[String], render_mode: RenderMode) ->
3 => { 3 => {
TestLine { TestLine {
conditions: "", conditions: "",
kind: *parts.get(0), kind: parts[0],
file_left: *parts.get(1), file_left: parts[1],
file_right: *parts.get(2), file_right: parts[2],
} }
}, },
4 => { 4 => {
TestLine { TestLine {
conditions: *parts.get(0), conditions: parts[0],
kind: *parts.get(1), kind: parts[1],
file_left: *parts.get(2), file_left: parts[2],
file_right: *parts.get(3), file_right: parts[3],
} }
}, },
_ => { _ => {