Replace most ~"string"s with "string".to_owned().

This commit is contained in:
Ms2ger 2014-05-04 09:39:07 +02:00
parent 660f7a016e
commit 25542e3f7e
35 changed files with 206 additions and 206 deletions

View file

@ -54,11 +54,11 @@ impl FontContext {
// TODO: Allow users to specify these. // TODO: Allow users to specify these.
let mut generic_fonts = HashMap::with_capacity(5); let mut generic_fonts = HashMap::with_capacity(5);
generic_fonts.insert(~"serif", ~"Times New Roman"); generic_fonts.insert("serif".to_owned(), "Times New Roman".to_owned());
generic_fonts.insert(~"sans-serif", ~"Arial"); generic_fonts.insert("sans-serif".to_owned(), "Arial".to_owned());
generic_fonts.insert(~"cursive", ~"Apple Chancery"); generic_fonts.insert("cursive".to_owned(), "Apple Chancery".to_owned());
generic_fonts.insert(~"fantasy", ~"Papyrus"); generic_fonts.insert("fantasy".to_owned(), "Papyrus".to_owned());
generic_fonts.insert(~"monospace", ~"Menlo"); generic_fonts.insert("monospace".to_owned(), "Menlo".to_owned());
FontContext { FontContext {
instance_cache: LRUCache::new(10), instance_cache: LRUCache::new(10),

View file

@ -130,7 +130,7 @@ impl FontListHandle {
} }
pub fn get_last_resort_font_families() -> ~[~str] { pub fn get_last_resort_font_families() -> ~[~str] {
~[~"Roboto"] ~["Roboto".to_owned()]
} }
} }

View file

@ -132,7 +132,7 @@ impl FontListHandle {
} }
pub fn get_last_resort_font_families() -> ~[~str] { pub fn get_last_resort_font_families() -> ~[~str] {
~[~"Arial"] ~["Arial".to_owned()]
} }
} }

View file

@ -59,6 +59,6 @@ impl FontListHandle {
} }
pub fn get_last_resort_font_families() -> ~[~str] { pub fn get_last_resort_font_families() -> ~[~str] {
~[~"Arial Unicode MS",~"Arial"] ~["Arial Unicode MS".to_owned(),"Arial".to_owned()]
} }
} }

View file

@ -139,13 +139,13 @@ fn test_true_type_tag() {
#[test] #[test]
fn test_transform_compress_none() { fn test_transform_compress_none() {
let test_strs : ~[~str] = ~[~" foo bar", let test_strs : ~[~str] = ~[" foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo\n bar", "foo\n bar".to_owned(),
~"foo \nbar", "foo \nbar".to_owned(),
~" foo bar \nbaz", " foo bar \nbaz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz\n\n"]; "foobarbaz\n\n".to_owned()];
let mode = CompressNone; let mode = CompressNone;
for i in range(0, test_strs.len()) { for i in range(0, test_strs.len()) {
@ -158,21 +158,21 @@ fn test_transform_compress_none() {
#[test] #[test]
fn test_transform_discard_newline() { fn test_transform_discard_newline() {
let test_strs : ~[~str] = ~[~" foo bar", let test_strs : ~[~str] = ~[" foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo\n bar", "foo\n bar".to_owned(),
~"foo \nbar", "foo \nbar".to_owned(),
~" foo bar \nbaz", " foo bar \nbaz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz\n\n"]; "foobarbaz\n\n".to_owned()];
let oracle_strs : ~[~str] = ~[~" foo bar", let oracle_strs : ~[~str] = ~[" foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo bar", "foo bar".to_owned(),
~"foo bar", "foo bar".to_owned(),
~" foo bar baz", " foo bar baz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz"]; "foobarbaz".to_owned()];
assert_eq!(test_strs.len(), oracle_strs.len()); assert_eq!(test_strs.len(), oracle_strs.len());
let mode = DiscardNewline; let mode = DiscardNewline;
@ -187,21 +187,21 @@ fn test_transform_discard_newline() {
/* FIXME: Fix and re-enable /* FIXME: Fix and re-enable
#[test] #[test]
fn test_transform_compress_whitespace() { fn test_transform_compress_whitespace() {
let test_strs : ~[~str] = ~[~" foo bar", let test_strs : ~[~str] = ~[" foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo\n bar", "foo\n bar".to_owned(),
~"foo \nbar", "foo \nbar".to_owned(),
~" foo bar \nbaz", " foo bar \nbaz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz\n\n"]; "foobarbaz\n\n".to_owned()];
let oracle_strs : ~[~str] = ~[~" foo bar", let oracle_strs : ~[~str] = ~[" foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo\n bar", "foo\n bar".to_owned(),
~"foo \nbar", "foo \nbar".to_owned(),
~" foo bar \nbaz", " foo bar \nbaz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz\n\n"]; "foobarbaz\n\n".to_owned()];
assert_eq!(test_strs.len(), oracle_strs.len()); assert_eq!(test_strs.len(), oracle_strs.len());
let mode = CompressWhitespace; let mode = CompressWhitespace;
@ -215,21 +215,21 @@ fn test_transform_compress_whitespace() {
#[test] #[test]
fn test_transform_compress_whitespace_newline() { fn test_transform_compress_whitespace_newline() {
let test_strs : ~[~str] = ~[~" foo bar", let test_strs : ~[~str] = ~[" foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo\n bar", "foo\n bar".to_owned(),
~"foo \nbar", "foo \nbar".to_owned(),
~" foo bar \nbaz", " foo bar \nbaz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz\n\n"]; "foobarbaz\n\n".to_owned()];
let oracle_strs : ~[~str] = ~[~"foo bar", let oracle_strs : ~[~str] = ~["foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo bar", "foo bar".to_owned(),
~"foo bar", "foo bar".to_owned(),
~" foo bar baz", " foo bar baz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz "]; "foobarbaz ".to_owned()];
assert_eq!(test_strs.len(), oracle_strs.len()); assert_eq!(test_strs.len(), oracle_strs.len());
let mode = CompressWhitespaceNewline; let mode = CompressWhitespaceNewline;
@ -244,23 +244,23 @@ fn test_transform_compress_whitespace_newline() {
#[test] #[test]
fn test_transform_compress_whitespace_newline_no_incoming() { fn test_transform_compress_whitespace_newline_no_incoming() {
let test_strs : ~[~str] = ~[~" foo bar", let test_strs : ~[~str] = ~[" foo bar".to_owned(),
~"\nfoo bar", "\nfoo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo\n bar", "foo\n bar".to_owned(),
~"foo \nbar", "foo \nbar".to_owned(),
~" foo bar \nbaz", " foo bar \nbaz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz\n\n"]; "foobarbaz\n\n".to_owned()];
let oracle_strs : ~[~str] = ~[~" foo bar", let oracle_strs : ~[~str] = ~[" foo bar".to_owned(),
~" foo bar", " foo bar".to_owned(),
~"foo bar ", "foo bar ".to_owned(),
~"foo bar", "foo bar".to_owned(),
~"foo bar", "foo bar".to_owned(),
~" foo bar baz", " foo bar baz".to_owned(),
~"foo bar baz", "foo bar baz".to_owned(),
~"foobarbaz "]; "foobarbaz ".to_owned()];
assert_eq!(test_strs.len(), oracle_strs.len()); assert_eq!(test_strs.len(), oracle_strs.len());
let mode = CompressWhitespaceNewline; let mode = CompressWhitespaceNewline;

View file

@ -1696,11 +1696,11 @@ impl Flow for BlockFlow {
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = if self.is_float() { let txt = if self.is_float() {
~"FloatFlow: " "FloatFlow: ".to_owned()
} else if self.is_root() { } else if self.is_root() {
~"RootFlow: " "RootFlow: ".to_owned()
} else { } else {
~"BlockFlow: " "BlockFlow: ".to_owned()
}; };
txt.append(self.box_.debug_str()) txt.append(self.box_.debug_str())
} }

View file

@ -667,7 +667,7 @@ impl<'a> FlowConstructor<'a> {
whitespace_style)) whitespace_style))
=> { => {
// Instantiate the whitespace box. // Instantiate the whitespace box.
let box_info = UnscannedTextBox(UnscannedTextBoxInfo::from_text(~" ")); let box_info = UnscannedTextBox(UnscannedTextBoxInfo::from_text(" ".to_owned()));
let fragment = Box::from_opaque_node_and_style(whitespace_node, let fragment = Box::from_opaque_node_and_style(whitespace_node,
whitespace_style.clone(), whitespace_style.clone(),
box_info); box_info);

View file

@ -272,7 +272,7 @@ pub trait Flow {
/// Returns a debugging string describing this flow. /// Returns a debugging string describing this flow.
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
~"???" "???".to_owned()
} }
} }

View file

@ -296,7 +296,7 @@ impl Flow for TableFlow {
} }
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = ~"TableFlow: "; let txt = "TableFlow: ".to_owned();
txt.append(self.block_flow.box_.debug_str()) txt.append(self.block_flow.box_.debug_str())
} }
} }

View file

@ -66,7 +66,7 @@ impl Flow for TableCaptionFlow {
} }
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = ~"TableCaptionFlow: "; let txt = "TableCaptionFlow: ".to_owned();
txt.append(self.block_flow.box_.debug_str()) txt.append(self.block_flow.box_.debug_str())
} }
} }

View file

@ -115,7 +115,7 @@ impl Flow for TableCellFlow {
} }
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = ~"TableCellFlow: "; let txt = "TableCellFlow: ".to_owned();
txt.append(self.block_flow.box_.debug_str()) txt.append(self.block_flow.box_.debug_str())
} }
} }

View file

@ -83,7 +83,7 @@ impl Flow for TableColGroupFlow {
} }
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = ~"TableColGroupFlow: "; let txt = "TableColGroupFlow: ".to_owned();
txt.append(match self.box_ { txt.append(match self.box_ {
Some(ref rb) => rb.debug_str(), Some(ref rb) => rb.debug_str(),
None => "".to_owned(), None => "".to_owned(),

View file

@ -222,7 +222,7 @@ impl Flow for TableRowFlow {
} }
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = ~"TableRowFlow: "; let txt = "TableRowFlow: ".to_owned();
txt.append(self.block_flow.box_.debug_str()) txt.append(self.block_flow.box_.debug_str())
} }
} }

View file

@ -204,7 +204,7 @@ impl Flow for TableRowGroupFlow {
} }
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = ~"TableRowGroupFlow: "; let txt = "TableRowGroupFlow: ".to_owned();
txt.append(self.block_flow.box_.debug_str()) txt.append(self.block_flow.box_.debug_str())
} }
} }

View file

@ -196,9 +196,9 @@ impl Flow for TableWrapperFlow {
fn debug_str(&self) -> ~str { fn debug_str(&self) -> ~str {
let txt = if self.is_float() { let txt = if self.is_float() {
~"TableWrapperFlow(Float): " "TableWrapperFlow(Float): ".to_owned()
} else { } else {
~"TableWrapperFlow: " "TableWrapperFlow: ".to_owned()
}; };
txt.append(self.block_flow.box_.debug_str()) txt.append(self.block_flow.box_.debug_str())
} }

View file

@ -337,7 +337,7 @@ impl Window {
alert.run(); alert.run();
let value = alert.prompt_value(); let value = alert.prompt_value();
if "" == value { // To avoid crashing on Linux. if "" == value { // To avoid crashing on Linux.
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(~"http://purple.com/")) self.event_queue.borrow_mut().push(LoadUrlWindowEvent("http://purple.com/".to_owned()))
} else { } else {
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone())) self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone()))
} }

View file

@ -64,7 +64,7 @@ impl WindowMethods<Application> for Window {
fn new(_: &Application) -> Rc<Window> { fn new(_: &Application) -> Rc<Window> {
// Create the GLUT window. // Create the GLUT window.
glut::init_window_size(800, 600); glut::init_window_size(800, 600);
let glut_window = glut::create_window(~"Servo"); let glut_window = glut::create_window("Servo".to_owned());
// Create our window object. // Create our window object.
let window = Window { let window = Window {
@ -268,7 +268,7 @@ impl Window {
alert.run(); alert.run();
let value = alert.prompt_value(); let value = alert.prompt_value();
if "" == value { // To avoid crashing on Linux. if "" == value { // To avoid crashing on Linux.
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(~"http://purple.com/")) self.event_queue.borrow_mut().push(LoadUrlWindowEvent("http://purple.com/".to_owned()))
} else { } else {
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone())) self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone()))
} }

View file

@ -192,7 +192,7 @@ fn run(opts: opts::Opts) {
let url = if filename.starts_with("data:") { let url = if filename.starts_with("data:") {
// As a hack for easier command-line testing, // As a hack for easier command-line testing,
// assume that data URLs are not URL-encoded. // assume that data URLs are not URL-encoded.
Url::new(~"data", None, "".to_owned(), None, Url::new("data".to_owned(), None, "".to_owned(), None,
filename.slice_from(5).to_owned(), Vec::new(), None) filename.slice_from(5).to_owned(), Vec::new(), None)
} else { } else {
parse_url(*filename, None) parse_url(*filename, None)

View file

@ -108,13 +108,13 @@ fn plain() {
#[test] #[test]
fn plain_ct() { fn plain_ct() {
assert_parse("data:text/plain,hello", assert_parse("data:text/plain,hello",
Some((~"text", ~"plain")), None, Some(bytes!("hello").iter().map(|&x| x).collect())); Some(("text".to_owned(), "plain".to_owned())), None, Some(bytes!("hello").iter().map(|&x| x).collect()));
} }
#[test] #[test]
fn plain_charset() { fn plain_charset() {
assert_parse("data:text/plain;charset=latin1,hello", assert_parse("data:text/plain;charset=latin1,hello",
Some((~"text", ~"plain")), Some(~"latin1"), Some(bytes!("hello").iter().map(|&x| x).collect())); Some(("text".to_owned(), "plain".to_owned())), Some("latin1".to_owned()), Some(bytes!("hello").iter().map(|&x| x).collect()));
} }
#[test] #[test]
@ -125,12 +125,12 @@ fn base64() {
#[test] #[test]
fn base64_ct() { fn base64_ct() {
assert_parse("data:application/octet-stream;base64,C62+7w==", assert_parse("data:application/octet-stream;base64,C62+7w==",
Some((~"application", ~"octet-stream")), None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF))); Some(("application".to_owned(), "octet-stream".to_owned())), None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));
} }
#[test] #[test]
fn base64_charset() { fn base64_charset() {
assert_parse("data:text/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==", assert_parse("data:text/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==",
Some((~"text", ~"plain")), Some(~"koi8-r"), Some(("text".to_owned(), "plain".to_owned())), Some("koi8-r".to_owned()),
Some(vec!(0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4))); Some(vec!(0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4)));
} }

View file

@ -127,9 +127,9 @@ type LoaderTaskFactory = extern "Rust" fn() -> LoaderTask;
/// Create a ResourceTask with the default loaders /// Create a ResourceTask with the default loaders
pub fn ResourceTask() -> ResourceTask { pub fn ResourceTask() -> ResourceTask {
let loaders = ~[ let loaders = ~[
(~"file", file_loader::factory), ("file".to_owned(), file_loader::factory),
(~"http", http_loader::factory), ("http".to_owned(), http_loader::factory),
(~"data", data_loader::factory), ("data".to_owned(), data_loader::factory),
]; ];
create_resource_task_with_loaders(loaders) create_resource_task_with_loaders(loaders)
} }
@ -236,7 +236,7 @@ fn snicklefritz_loader_factory() -> LoaderTask {
#[test] #[test]
fn should_delegate_to_scheme_loader() { fn should_delegate_to_scheme_loader() {
let loader_factories = ~[(~"snicklefritz", snicklefritz_loader_factory)]; let loader_factories = ~[("snicklefritz".to_owned(), snicklefritz_loader_factory)];
let resource_task = create_resource_task_with_loaders(loader_factories); let resource_task = create_resource_task_with_loaders(loader_factories);
let (start_chan, start) = channel(); let (start_chan, start) = channel();
resource_task.send(Load(FromStr::from_str("snicklefritz://heya").unwrap(), start_chan)); resource_task.send(Load(FromStr::from_str("snicklefritz://heya").unwrap(), start_chan));

View file

@ -82,7 +82,7 @@ pub fn _obj_toString(cx: *JSContext, className: *libc::c_char) -> *JSString {
return ptr::null(); return ptr::null();
} }
let result = ~"[object " + name + "]"; let result = "[object ".to_owned() + name + "]";
for (i, c) in result.chars().enumerate() { for (i, c) in result.chars().enumerate() {
*chars.offset(i as int) = c as jschar; *chars.offset(i as int) = c as jschar;
} }

View file

@ -212,16 +212,16 @@ impl Document {
Some(string) => string.clone(), Some(string) => string.clone(),
None => match is_html_document { None => match is_html_document {
// http://dom.spec.whatwg.org/#dom-domimplementation-createhtmldocument // http://dom.spec.whatwg.org/#dom-domimplementation-createhtmldocument
HTMLDocument => ~"text/html", HTMLDocument => "text/html".to_owned(),
// http://dom.spec.whatwg.org/#concept-document-content-type // http://dom.spec.whatwg.org/#concept-document-content-type
NonHTMLDocument => ~"application/xml" NonHTMLDocument => "application/xml".to_owned()
} }
}, },
url: Untraceable::new(url), url: Untraceable::new(url),
// http://dom.spec.whatwg.org/#concept-document-quirks // http://dom.spec.whatwg.org/#concept-document-quirks
quirks_mode: Untraceable::new(NoQuirks), quirks_mode: Untraceable::new(NoQuirks),
// http://dom.spec.whatwg.org/#concept-document-encoding // http://dom.spec.whatwg.org/#concept-document-encoding
encoding_name: ~"utf-8", encoding_name: "utf-8".to_owned(),
is_html_document: is_html_document == HTMLDocument, is_html_document: is_html_document == HTMLDocument,
} }
} }
@ -346,8 +346,8 @@ impl<'a> DocumentMethods for JSRef<'a, Document> {
// http://dom.spec.whatwg.org/#dom-document-compatmode // http://dom.spec.whatwg.org/#dom-document-compatmode
fn CompatMode(&self) -> DOMString { fn CompatMode(&self) -> DOMString {
match *self.quirks_mode { match *self.quirks_mode {
NoQuirks => ~"CSS1Compat", NoQuirks => "CSS1Compat".to_owned(),
LimitedQuirks | FullQuirks => ~"BackCompat" LimitedQuirks | FullQuirks => "BackCompat".to_owned()
} }
} }
@ -585,7 +585,7 @@ impl<'a> DocumentMethods for JSRef<'a, Document> {
assert!(title_node.AppendChild(NodeCast::from_mut_ref(&mut *new_text)).is_ok()); assert!(title_node.AppendChild(NodeCast::from_mut_ref(&mut *new_text)).is_ok());
}, },
None => { None => {
let mut new_title = HTMLTitleElement::new(~"title", self).root(); let mut new_title = HTMLTitleElement::new("title".to_owned(), self).root();
let new_title: &mut JSRef<Node> = NodeCast::from_mut_ref(&mut *new_title); let new_title: &mut JSRef<Node> = NodeCast::from_mut_ref(&mut *new_title);
let mut new_text = self.CreateTextNode(title.clone()).root(); let mut new_text = self.CreateTextNode(title.clone()).root();

View file

@ -88,27 +88,27 @@ impl<'a> DOMExceptionMethods for JSRef<'a, DOMException> {
// http://dom.spec.whatwg.org/#error-names-0 // http://dom.spec.whatwg.org/#error-names-0
fn Message(&self) -> DOMString { fn Message(&self) -> DOMString {
match self.code { match self.code {
IndexSizeError => ~"The index is not in the allowed range.", IndexSizeError => "The index is not in the allowed range.".to_owned(),
HierarchyRequestError => ~"The operation would yield an incorrect node tree.", HierarchyRequestError => "The operation would yield an incorrect node tree.".to_owned(),
WrongDocumentError => ~"The object is in the wrong document.", WrongDocumentError => "The object is in the wrong document.".to_owned(),
InvalidCharacterError => ~"The string contains invalid characters.", InvalidCharacterError => "The string contains invalid characters.".to_owned(),
NoModificationAllowedError => ~"The object can not be modified.", NoModificationAllowedError => "The object can not be modified.".to_owned(),
NotFoundError => ~"The object can not be found here.", NotFoundError => "The object can not be found here.".to_owned(),
NotSupportedError => ~"The operation is not supported.", NotSupportedError => "The operation is not supported.".to_owned(),
InvalidStateError => ~"The object is in an invalid state.", InvalidStateError => "The object is in an invalid state.".to_owned(),
SyntaxError => ~"The string did not match the expected pattern.", SyntaxError => "The string did not match the expected pattern.".to_owned(),
InvalidModificationError => ~"The object can not be modified in this way.", InvalidModificationError => "The object can not be modified in this way.".to_owned(),
NamespaceError => ~"The operation is not allowed by Namespaces in XML.", NamespaceError => "The operation is not allowed by Namespaces in XML.".to_owned(),
InvalidAccessError => ~"The object does not support the operation or argument.", InvalidAccessError => "The object does not support the operation or argument.".to_owned(),
SecurityError => ~"The operation is insecure.", SecurityError => "The operation is insecure.".to_owned(),
NetworkError => ~"A network error occurred.", NetworkError => "A network error occurred.".to_owned(),
AbortError => ~"The operation was aborted.", AbortError => "The operation was aborted.".to_owned(),
URLMismatchError => ~"The given URL does not match another URL.", URLMismatchError => "The given URL does not match another URL.".to_owned(),
QuotaExceededError => ~"The quota has been exceeded.", QuotaExceededError => "The quota has been exceeded.".to_owned(),
TimeoutError => ~"The operation timed out.", TimeoutError => "The operation timed out.".to_owned(),
InvalidNodeTypeError => ~"The supplied node is incorrect or has an incorrect ancestor for this operation.", InvalidNodeTypeError => "The supplied node is incorrect or has an incorrect ancestor for this operation.".to_owned(),
DataCloneError => ~"The object can not be cloned.", DataCloneError => "The object can not be cloned.".to_owned(),
EncodingError => ~"The encoding operation (either encoded or decoding) failed." EncodingError => "The encoding operation (either encoded or decoding) failed.".to_owned()
} }
} }
} }

View file

@ -129,18 +129,18 @@ impl<'a> DOMImplementationMethods for JSRef<'a, DOMImplementation> {
{ {
// Step 3. // Step 3.
let mut doc_type = DocumentType::new(~"html", None, None, &*doc).root(); let mut doc_type = DocumentType::new("html".to_owned(), None, None, &*doc).root();
assert!(doc_node.AppendChild(NodeCast::from_mut_ref(&mut *doc_type)).is_ok()); assert!(doc_node.AppendChild(NodeCast::from_mut_ref(&mut *doc_type)).is_ok());
} }
{ {
// Step 4. // Step 4.
let mut doc_html = NodeCast::from_unrooted(HTMLHtmlElement::new(~"html", &*doc)).root(); let mut doc_html = NodeCast::from_unrooted(HTMLHtmlElement::new("html".to_owned(), &*doc)).root();
assert!(doc_node.AppendChild(&mut *doc_html).is_ok()); assert!(doc_node.AppendChild(&mut *doc_html).is_ok());
{ {
// Step 5. // Step 5.
let mut doc_head = NodeCast::from_unrooted(HTMLHeadElement::new(~"head", &*doc)).root(); let mut doc_head = NodeCast::from_unrooted(HTMLHeadElement::new("head".to_owned(), &*doc)).root();
assert!(doc_html.AppendChild(&mut *doc_head).is_ok()); assert!(doc_html.AppendChild(&mut *doc_head).is_ok());
// Step 6. // Step 6.
@ -148,7 +148,7 @@ impl<'a> DOMImplementationMethods for JSRef<'a, DOMImplementation> {
None => (), None => (),
Some(title_str) => { Some(title_str) => {
// Step 6.1. // Step 6.1.
let mut doc_title = NodeCast::from_unrooted(HTMLTitleElement::new(~"title", &*doc)).root(); let mut doc_title = NodeCast::from_unrooted(HTMLTitleElement::new("title".to_owned(), &*doc)).root();
assert!(doc_head.AppendChild(&mut *doc_title).is_ok()); assert!(doc_head.AppendChild(&mut *doc_title).is_ok());
// Step 6.2. // Step 6.2.
@ -159,7 +159,7 @@ impl<'a> DOMImplementationMethods for JSRef<'a, DOMImplementation> {
} }
// Step 7. // Step 7.
let mut doc_body = HTMLBodyElement::new(~"body", &*doc).root(); let mut doc_body = HTMLBodyElement::new("body".to_owned(), &*doc).root();
assert!(doc_html.AppendChild(NodeCast::from_mut_ref(&mut *doc_body)).is_ok()); assert!(doc_html.AppendChild(NodeCast::from_mut_ref(&mut *doc_body)).is_ok());
} }

View file

@ -48,10 +48,10 @@ impl<'a> DOMParserMethods for JSRef<'a, DOMParser> {
let owner = self.owner.root(); let owner = self.owner.root();
match ty { match ty {
Text_html => { Text_html => {
Ok(Document::new(&owner.root_ref(), None, HTMLDocument, Some(~"text/html"))) Ok(Document::new(&owner.root_ref(), None, HTMLDocument, Some("text/html".to_owned())))
} }
Text_xml => { Text_xml => {
Ok(Document::new(&owner.root_ref(), None, NonHTMLDocument, Some(~"text/xml"))) Ok(Document::new(&owner.root_ref(), None, NonHTMLDocument, Some("text/xml".to_owned())))
} }
_ => { _ => {
Err(FailureUnknown) Err(FailureUnknown)

View file

@ -571,7 +571,7 @@ impl<'a> ElementMethods for JSRef<'a, Element> {
} }
// Step 8. // Step 8.
if namespace == namespace::XMLNS && "xmlns" != name && Some(~"xmlns") != prefix { if namespace == namespace::XMLNS && "xmlns" != name && Some("xmlns".to_owned()) != prefix {
return Err(NamespaceError); return Err(NamespaceError);
} }

View file

@ -55,7 +55,7 @@ impl<'a> FormDataMethods for JSRef<'a, FormData> {
fn Append(&mut self, name: DOMString, value: &JSRef<Blob>, filename: Option<DOMString>) { fn Append(&mut self, name: DOMString, value: &JSRef<Blob>, filename: Option<DOMString>) {
let blob = BlobData { let blob = BlobData {
blob: value.unrooted(), blob: value.unrooted(),
name: filename.unwrap_or(~"default") name: filename.unwrap_or("default".to_owned())
}; };
self.data.insert(name.clone(), blob); self.data.insert(name.clone(), blob);
} }

View file

@ -25,7 +25,7 @@ pub fn serialize(iterator: &mut NodeIterator) -> ~str {
for node in *iterator { for node in *iterator {
while open_elements.len() > iterator.depth { while open_elements.len() > iterator.depth {
html.push_str(~"</" + open_elements.pop().unwrap().as_slice() + ">"); html.push_str("</".to_owned() + open_elements.pop().unwrap().as_slice() + ">");
} }
html.push_str( html.push_str(
match node.type_id() { match node.type_id() {
@ -60,13 +60,13 @@ pub fn serialize(iterator: &mut NodeIterator) -> ~str {
); );
} }
while open_elements.len() > 0 { while open_elements.len() > 0 {
html.push_str(~"</" + open_elements.pop().unwrap().as_slice() + ">"); html.push_str("</".to_owned() + open_elements.pop().unwrap().as_slice() + ">");
} }
html html
} }
fn serialize_comment(comment: &JSRef<Comment>) -> ~str { fn serialize_comment(comment: &JSRef<Comment>) -> ~str {
~"<!--" + comment.deref().characterdata.data + "-->" "<!--".to_owned() + comment.deref().characterdata.data + "-->"
} }
fn serialize_text(text: &JSRef<Text>) -> ~str { fn serialize_text(text: &JSRef<Text>) -> ~str {
@ -88,15 +88,15 @@ fn serialize_text(text: &JSRef<Text>) -> ~str {
} }
fn serialize_processing_instruction(processing_instruction: &JSRef<ProcessingInstruction>) -> ~str { fn serialize_processing_instruction(processing_instruction: &JSRef<ProcessingInstruction>) -> ~str {
~"<?" + processing_instruction.deref().target + " " + processing_instruction.deref().characterdata.data + "?>" "<?".to_owned() + processing_instruction.deref().target + " " + processing_instruction.deref().characterdata.data + "?>"
} }
fn serialize_doctype(doctype: &JSRef<DocumentType>) -> ~str { fn serialize_doctype(doctype: &JSRef<DocumentType>) -> ~str {
~"<!DOCTYPE" + doctype.deref().name + ">" "<!DOCTYPE".to_owned() + doctype.deref().name + ">"
} }
fn serialize_elem(elem: &JSRef<Element>, open_elements: &mut Vec<~str>) -> ~str { fn serialize_elem(elem: &JSRef<Element>, open_elements: &mut Vec<~str>) -> ~str {
let mut rv = ~"<" + elem.deref().local_name; let mut rv = "<".to_owned() + elem.deref().local_name;
for attr in elem.deref().attrs.iter() { for attr in elem.deref().attrs.iter() {
let attr = attr.root(); let attr = attr.root();
rv.push_str(serialize_attr(&*attr)); rv.push_str(serialize_attr(&*attr));
@ -125,18 +125,18 @@ fn serialize_elem(elem: &JSRef<Element>, open_elements: &mut Vec<~str>) -> ~str
fn serialize_attr(attr: &JSRef<Attr>) -> ~str { fn serialize_attr(attr: &JSRef<Attr>) -> ~str {
let attr_name = if attr.deref().namespace == namespace::XML { let attr_name = if attr.deref().namespace == namespace::XML {
~"xml:" + attr.deref().local_name.clone() "xml:".to_owned() + attr.deref().local_name.clone()
} else if attr.deref().namespace == namespace::XMLNS && } else if attr.deref().namespace == namespace::XMLNS &&
attr.deref().local_name.as_slice() == "xmlns" { attr.deref().local_name.as_slice() == "xmlns" {
~"xmlns" "xmlns".to_owned()
} else if attr.deref().namespace == namespace::XMLNS { } else if attr.deref().namespace == namespace::XMLNS {
~"xmlns:" + attr.deref().local_name.clone() "xmlns:".to_owned() + attr.deref().local_name.clone()
} else if attr.deref().namespace == namespace::XLink { } else if attr.deref().namespace == namespace::XLink {
~"xlink:" + attr.deref().local_name.clone() "xlink:".to_owned() + attr.deref().local_name.clone()
} else { } else {
attr.deref().name.clone() attr.deref().name.clone()
}; };
~" " + attr_name + "=\"" + escape(attr.deref().value, true) + "\"" " ".to_owned() + attr_name + "=\"" + escape(attr.deref().value, true) + "\""
} }
fn escape(string: &str, attr_mode: bool) -> ~str { fn escape(string: &str, attr_mode: bool) -> ~str {

View file

@ -49,7 +49,7 @@ pub trait NavigatorMethods {
impl<'a> NavigatorMethods for JSRef<'a, Navigator> { impl<'a> NavigatorMethods for JSRef<'a, Navigator> {
fn DoNotTrack(&self) -> DOMString { fn DoNotTrack(&self) -> DOMString {
~"unspecified" "unspecified".to_owned()
} }
fn Vendor(&self) -> DOMString { fn Vendor(&self) -> DOMString {
@ -61,7 +61,7 @@ impl<'a> NavigatorMethods for JSRef<'a, Navigator> {
} }
fn Product(&self) -> DOMString { fn Product(&self) -> DOMString {
~"Gecko" "Gecko".to_owned()
} }
fn ProductSub(&self) -> DOMString { fn ProductSub(&self) -> DOMString {
@ -85,11 +85,11 @@ impl<'a> NavigatorMethods for JSRef<'a, Navigator> {
} }
fn AppName(&self) -> DOMString { fn AppName(&self) -> DOMString {
~"Netscape" // Like Gecko/Webkit "Netscape".to_owned() // Like Gecko/Webkit
} }
fn GetAppCodeName(&self) -> Fallible<DOMString> { fn GetAppCodeName(&self) -> Fallible<DOMString> {
Ok(~"Mozilla") // Like Gecko/Webkit Ok("Mozilla".to_owned()) // Like Gecko/Webkit
} }
fn GetAppVersion(&self) -> Fallible<DOMString> { fn GetAppVersion(&self) -> Fallible<DOMString> {

View file

@ -1387,19 +1387,19 @@ impl<'a> NodeMethods for JSRef<'a, Node> {
let elem: &JSRef<Element> = ElementCast::to_ref(self).unwrap(); let elem: &JSRef<Element> = ElementCast::to_ref(self).unwrap();
elem.TagName() elem.TagName()
} }
TextNodeTypeId => ~"#text", TextNodeTypeId => "#text".to_owned(),
ProcessingInstructionNodeTypeId => { ProcessingInstructionNodeTypeId => {
let processing_instruction: &JSRef<ProcessingInstruction> = let processing_instruction: &JSRef<ProcessingInstruction> =
ProcessingInstructionCast::to_ref(self).unwrap(); ProcessingInstructionCast::to_ref(self).unwrap();
processing_instruction.Target() processing_instruction.Target()
} }
CommentNodeTypeId => ~"#comment", CommentNodeTypeId => "#comment".to_owned(),
DoctypeNodeTypeId => { DoctypeNodeTypeId => {
let doctype: &JSRef<DocumentType> = DocumentTypeCast::to_ref(self).unwrap(); let doctype: &JSRef<DocumentType> = DocumentTypeCast::to_ref(self).unwrap();
doctype.deref().name.clone() doctype.deref().name.clone()
}, },
DocumentFragmentNodeTypeId => ~"#document-fragment", DocumentFragmentNodeTypeId => "#document-fragment".to_owned(),
DocumentNodeTypeId => ~"#document" DocumentNodeTypeId => "#document".to_owned()
} }
} }

View file

@ -1002,7 +1002,7 @@ impl ScriptTask {
// "load" event as soon as we've finished executing all scripts parsed during // "load" event as soon as we've finished executing all scripts parsed during
// the initial load. // the initial load.
let mut event = Event::new(&*window).root(); let mut event = Event::new(&*window).root();
event.InitEvent(~"load", false, false); event.InitEvent("load".to_owned(), false, false);
let doctarget: &JSRef<EventTarget> = EventTargetCast::from_ref(&*document); let doctarget: &JSRef<EventTarget> = EventTargetCast::from_ref(&*document);
let wintarget: &JSRef<EventTarget> = EventTargetCast::from_ref(&*window); let wintarget: &JSRef<EventTarget> = EventTargetCast::from_ref(&*window);
let _ = wintarget.dispatch_event_with_target(Some((*doctarget).clone()), let _ = wintarget.dispatch_event_with_target(Some((*doctarget).clone()),
@ -1069,7 +1069,7 @@ impl ScriptTask {
// http://dev.w3.org/csswg/cssom-view/#resizing-viewports // http://dev.w3.org/csswg/cssom-view/#resizing-viewports
// https://dvcs.w3.org/hg/dom3events/raw-file/tip/html/DOM3-Events.html#event-type-resize // https://dvcs.w3.org/hg/dom3events/raw-file/tip/html/DOM3-Events.html#event-type-resize
let mut uievent = UIEvent::new(&*window).root(); let mut uievent = UIEvent::new(&*window).root();
uievent.InitUIEvent(~"resize", false, false, uievent.InitUIEvent("resize".to_owned(), false, false,
Some((*window).clone()), 0i32); Some((*window).clone()), 0i32);
let event: &mut JSRef<Event> = EventCast::from_mut_ref(&mut *uievent); let event: &mut JSRef<Event> = EventCast::from_mut_ref(&mut *uievent);

View file

@ -980,23 +980,23 @@ mod tests {
fn test_get_id_name(){ fn test_get_id_name(){
let rules_list = get_mock_rules([".intro", "#top"]); let rules_list = get_mock_rules([".intro", "#top"]);
assert_eq!(SelectorMap::get_id_name(&rules_list[0][0]), None); assert_eq!(SelectorMap::get_id_name(&rules_list[0][0]), None);
assert_eq!(SelectorMap::get_id_name(&rules_list[1][0]), Some(~"top")); assert_eq!(SelectorMap::get_id_name(&rules_list[1][0]), Some("top".to_owned()));
} }
#[test] #[test]
fn test_get_class_name(){ fn test_get_class_name(){
let rules_list = get_mock_rules([".intro.foo", "#top"]); let rules_list = get_mock_rules([".intro.foo", "#top"]);
assert_eq!(SelectorMap::get_class_name(&rules_list[0][0]), Some(~"intro")); assert_eq!(SelectorMap::get_class_name(&rules_list[0][0]), Some("intro".to_owned()));
assert_eq!(SelectorMap::get_class_name(&rules_list[1][0]), None); assert_eq!(SelectorMap::get_class_name(&rules_list[1][0]), None);
} }
#[test] #[test]
fn test_get_element_name(){ fn test_get_element_name(){
let rules_list = get_mock_rules(["img.foo", "#top", "IMG", "ImG"]); let rules_list = get_mock_rules(["img.foo", "#top", "IMG", "ImG"]);
assert_eq!(SelectorMap::get_element_name(&rules_list[0][0]), Some(~"img")); assert_eq!(SelectorMap::get_element_name(&rules_list[0][0]), Some("img".to_owned()));
assert_eq!(SelectorMap::get_element_name(&rules_list[1][0]), None); assert_eq!(SelectorMap::get_element_name(&rules_list[1][0]), None);
assert_eq!(SelectorMap::get_element_name(&rules_list[2][0]), Some(~"img")); assert_eq!(SelectorMap::get_element_name(&rules_list[2][0]), Some("img".to_owned()));
assert_eq!(SelectorMap::get_element_name(&rules_list[3][0]), Some(~"img")); assert_eq!(SelectorMap::get_element_name(&rules_list[3][0]), Some("img".to_owned()));
} }
#[test] #[test]

View file

@ -603,7 +603,7 @@ mod tests {
assert!(parse("") == None) assert!(parse("") == None)
assert!(parse("e") == Some(~[Selector{ assert!(parse("e") == Some(~[Selector{
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[LocalNameSelector(~"e")], simple_selectors: ~[LocalNameSelector("e".to_owned())],
next: None, next: None,
}), }),
pseudo_element: None, pseudo_element: None,
@ -611,7 +611,7 @@ mod tests {
}])) }]))
assert!(parse(".foo") == Some(~[Selector{ assert!(parse(".foo") == Some(~[Selector{
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[ClassSelector(~"foo")], simple_selectors: ~[ClassSelector("foo".to_owned())],
next: None, next: None,
}), }),
pseudo_element: None, pseudo_element: None,
@ -619,7 +619,7 @@ mod tests {
}])) }]))
assert!(parse("#bar") == Some(~[Selector{ assert!(parse("#bar") == Some(~[Selector{
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[IDSelector(~"bar")], simple_selectors: ~[IDSelector("bar".to_owned())],
next: None, next: None,
}), }),
pseudo_element: None, pseudo_element: None,
@ -627,9 +627,9 @@ mod tests {
}])) }]))
assert!(parse("e.foo#bar") == Some(~[Selector{ assert!(parse("e.foo#bar") == Some(~[Selector{
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[LocalNameSelector(~"e"), simple_selectors: ~[LocalNameSelector("e".to_owned()),
ClassSelector(~"foo"), ClassSelector("foo".to_owned()),
IDSelector(~"bar")], IDSelector("bar".to_owned())],
next: None, next: None,
}), }),
pseudo_element: None, pseudo_element: None,
@ -637,10 +637,10 @@ mod tests {
}])) }]))
assert!(parse("e.foo #bar") == Some(~[Selector{ assert!(parse("e.foo #bar") == Some(~[Selector{
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[IDSelector(~"bar")], simple_selectors: ~[IDSelector("bar".to_owned())],
next: Some((~CompoundSelector { next: Some((~CompoundSelector {
simple_selectors: ~[LocalNameSelector(~"e"), simple_selectors: ~[LocalNameSelector("e".to_owned()),
ClassSelector(~"foo")], ClassSelector("foo".to_owned())],
next: None, next: None,
}, Descendant)), }, Descendant)),
}), }),
@ -653,8 +653,8 @@ mod tests {
assert!(parse_ns("[Foo]", &namespaces) == Some(~[Selector{ assert!(parse_ns("[Foo]", &namespaces) == Some(~[Selector{
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[AttrExists(AttrSelector { simple_selectors: ~[AttrExists(AttrSelector {
name: ~"Foo", name: "Foo".to_owned(),
lower_name: ~"foo", lower_name: "foo".to_owned(),
namespace: SpecificNamespace(namespace::Null), namespace: SpecificNamespace(namespace::Null),
})], })],
next: None, next: None,
@ -668,8 +668,8 @@ mod tests {
assert!(parse_ns("[Foo]", &namespaces) == Some(~[Selector{ assert!(parse_ns("[Foo]", &namespaces) == Some(~[Selector{
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[AttrExists(AttrSelector { simple_selectors: ~[AttrExists(AttrSelector {
name: ~"Foo", name: "Foo".to_owned(),
lower_name: ~"foo", lower_name: "foo".to_owned(),
namespace: SpecificNamespace(namespace::Null), namespace: SpecificNamespace(namespace::Null),
})], })],
next: None, next: None,
@ -682,7 +682,7 @@ mod tests {
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[ simple_selectors: ~[
NamespaceSelector(namespace::MathML), NamespaceSelector(namespace::MathML),
LocalNameSelector(~"e"), LocalNameSelector("e".to_owned()),
], ],
next: None, next: None,
}), }),
@ -702,7 +702,7 @@ mod tests {
compound_selectors: Arc::new(CompoundSelector { compound_selectors: Arc::new(CompoundSelector {
simple_selectors: ~[], simple_selectors: ~[],
next: Some((~CompoundSelector { next: Some((~CompoundSelector {
simple_selectors: ~[LocalNameSelector(~"div")], simple_selectors: ~[LocalNameSelector("div".to_owned())],
next: None, next: None,
}, Descendant)), }, Descendant)),
}), }),

View file

@ -533,50 +533,50 @@ pub mod tests {
#[test] #[test]
pub fn test_inline() { pub fn test_inline() {
let mut v = SmallVec16::new(); let mut v = SmallVec16::new();
v.push(~"hello"); v.push("hello".to_owned());
v.push(~"there"); v.push("there".to_owned());
assert_eq!(v.as_slice(), &[~"hello", ~"there"]); assert_eq!(v.as_slice(), &["hello".to_owned(), "there".to_owned()]);
} }
#[test] #[test]
pub fn test_spill() { pub fn test_spill() {
let mut v = SmallVec2::new(); let mut v = SmallVec2::new();
v.push(~"hello"); v.push("hello".to_owned());
v.push(~"there"); v.push("there".to_owned());
v.push(~"burma"); v.push("burma".to_owned());
v.push(~"shave"); v.push("shave".to_owned());
assert_eq!(v.as_slice(), &[~"hello", ~"there", ~"burma", ~"shave"]); assert_eq!(v.as_slice(), &["hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned()]);
} }
#[test] #[test]
pub fn test_double_spill() { pub fn test_double_spill() {
let mut v = SmallVec2::new(); let mut v = SmallVec2::new();
v.push(~"hello"); v.push("hello".to_owned());
v.push(~"there"); v.push("there".to_owned());
v.push(~"burma"); v.push("burma".to_owned());
v.push(~"shave"); v.push("shave".to_owned());
v.push(~"hello"); v.push("hello".to_owned());
v.push(~"there"); v.push("there".to_owned());
v.push(~"burma"); v.push("burma".to_owned());
v.push(~"shave"); v.push("shave".to_owned());
assert_eq!(v.as_slice(), &[ assert_eq!(v.as_slice(), &[
~"hello", ~"there", ~"burma", ~"shave", ~"hello", ~"there", ~"burma", ~"shave", "hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(), "hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(),
]); ]);
} }
#[test] #[test]
pub fn test_smallvec0() { pub fn test_smallvec0() {
let mut v = SmallVec0::new(); let mut v = SmallVec0::new();
v.push(~"hello"); v.push("hello".to_owned());
v.push(~"there"); v.push("there".to_owned());
v.push(~"burma"); v.push("burma".to_owned());
v.push(~"shave"); v.push("shave".to_owned());
v.push(~"hello"); v.push("hello".to_owned());
v.push(~"there"); v.push("there".to_owned());
v.push(~"burma"); v.push("burma".to_owned());
v.push(~"shave"); v.push("shave".to_owned());
assert_eq!(v.as_slice(), &[ assert_eq!(v.as_slice(), &[
~"hello", ~"there", ~"burma", ~"shave", ~"hello", ~"there", ~"burma", ~"shave", "hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(), "hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(),
]); ]);
} }
} }

View file

@ -27,12 +27,12 @@ pub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url
// Assume we've been given a file path. If it's absolute just return // Assume we've been given a file path. If it's absolute just return
// it, otherwise make it absolute with the cwd. // it, otherwise make it absolute with the cwd.
if str_url.starts_with("/") { if str_url.starts_with("/") {
~"file://" + str_url "file://".to_owned() + str_url
} else { } else {
let mut path = os::getcwd(); let mut path = os::getcwd();
path.push(str_url); path.push(str_url);
// FIXME (#1094): not the right way to transform a path // FIXME (#1094): not the right way to transform a path
~"file://" + path.display().to_str() "file://".to_owned() + path.display().to_str()
} }
} else { } else {
let base_url = base_url.unwrap(); let base_url = base_url.unwrap();
@ -45,7 +45,7 @@ pub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url
if str_url.starts_with("//") { if str_url.starts_with("//") {
new_url.scheme + ":" + str_url new_url.scheme + ":" + str_url
} else if base_url.path.is_empty() || str_url.starts_with("/") { } else if base_url.path.is_empty() || str_url.starts_with("/") {
new_url.path = ~"/"; new_url.path = "/".to_owned();
new_url.to_str() + str_url.trim_left_chars(&'/') new_url.to_str() + str_url.trim_left_chars(&'/')
} else if str_url.starts_with("#") { } else if str_url.starts_with("#") {
new_url.to_str() + str_url new_url.to_str() + str_url
@ -67,7 +67,7 @@ pub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url
let mut path = os::self_exe_path().expect("can't get exe path"); let mut path = os::self_exe_path().expect("can't get exe path");
path.push("../src/test/html/failure.html"); path.push("../src/test/html/failure.html");
// FIXME (#1094): not the right way to transform a path // FIXME (#1094): not the right way to transform a path
~"file://" + path.display().to_str() "file://".to_owned() + path.display().to_str()
} }
// TODO: handle the rest of the about: pages // TODO: handle the rest of the about: pages
_ => str_url _ => str_url