mirror of
https://github.com/servo/servo.git
synced 2025-06-24 09:04:33 +01:00
Replace most ~"string"s with "string".to_owned().
This commit is contained in:
parent
660f7a016e
commit
25542e3f7e
35 changed files with 206 additions and 206 deletions
|
@ -54,11 +54,11 @@ impl FontContext {
|
|||
|
||||
// TODO: Allow users to specify these.
|
||||
let mut generic_fonts = HashMap::with_capacity(5);
|
||||
generic_fonts.insert(~"serif", ~"Times New Roman");
|
||||
generic_fonts.insert(~"sans-serif", ~"Arial");
|
||||
generic_fonts.insert(~"cursive", ~"Apple Chancery");
|
||||
generic_fonts.insert(~"fantasy", ~"Papyrus");
|
||||
generic_fonts.insert(~"monospace", ~"Menlo");
|
||||
generic_fonts.insert("serif".to_owned(), "Times New Roman".to_owned());
|
||||
generic_fonts.insert("sans-serif".to_owned(), "Arial".to_owned());
|
||||
generic_fonts.insert("cursive".to_owned(), "Apple Chancery".to_owned());
|
||||
generic_fonts.insert("fantasy".to_owned(), "Papyrus".to_owned());
|
||||
generic_fonts.insert("monospace".to_owned(), "Menlo".to_owned());
|
||||
|
||||
FontContext {
|
||||
instance_cache: LRUCache::new(10),
|
||||
|
|
|
@ -130,7 +130,7 @@ impl FontListHandle {
|
|||
}
|
||||
|
||||
pub fn get_last_resort_font_families() -> ~[~str] {
|
||||
~[~"Roboto"]
|
||||
~["Roboto".to_owned()]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ impl FontListHandle {
|
|||
}
|
||||
|
||||
pub fn get_last_resort_font_families() -> ~[~str] {
|
||||
~[~"Arial"]
|
||||
~["Arial".to_owned()]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -59,6 +59,6 @@ impl FontListHandle {
|
|||
}
|
||||
|
||||
pub fn get_last_resort_font_families() -> ~[~str] {
|
||||
~[~"Arial Unicode MS",~"Arial"]
|
||||
~["Arial Unicode MS".to_owned(),"Arial".to_owned()]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,13 +139,13 @@ fn test_true_type_tag() {
|
|||
#[test]
|
||||
fn test_transform_compress_none() {
|
||||
|
||||
let test_strs : ~[~str] = ~[~" foo bar",
|
||||
~"foo bar ",
|
||||
~"foo\n bar",
|
||||
~"foo \nbar",
|
||||
~" foo bar \nbaz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz\n\n"];
|
||||
let test_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo\n bar".to_owned(),
|
||||
"foo \nbar".to_owned(),
|
||||
" foo bar \nbaz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz\n\n".to_owned()];
|
||||
let mode = CompressNone;
|
||||
|
||||
for i in range(0, test_strs.len()) {
|
||||
|
@ -158,21 +158,21 @@ fn test_transform_compress_none() {
|
|||
#[test]
|
||||
fn test_transform_discard_newline() {
|
||||
|
||||
let test_strs : ~[~str] = ~[~" foo bar",
|
||||
~"foo bar ",
|
||||
~"foo\n bar",
|
||||
~"foo \nbar",
|
||||
~" foo bar \nbaz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz\n\n"];
|
||||
let test_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo\n bar".to_owned(),
|
||||
"foo \nbar".to_owned(),
|
||||
" foo bar \nbaz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz\n\n".to_owned()];
|
||||
|
||||
let oracle_strs : ~[~str] = ~[~" foo bar",
|
||||
~"foo bar ",
|
||||
~"foo bar",
|
||||
~"foo bar",
|
||||
~" foo bar baz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz"];
|
||||
let oracle_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo bar".to_owned(),
|
||||
"foo bar".to_owned(),
|
||||
" foo bar baz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz".to_owned()];
|
||||
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = DiscardNewline;
|
||||
|
@ -187,21 +187,21 @@ fn test_transform_discard_newline() {
|
|||
/* FIXME: Fix and re-enable
|
||||
#[test]
|
||||
fn test_transform_compress_whitespace() {
|
||||
let test_strs : ~[~str] = ~[~" foo bar",
|
||||
~"foo bar ",
|
||||
~"foo\n bar",
|
||||
~"foo \nbar",
|
||||
~" foo bar \nbaz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz\n\n"];
|
||||
let test_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo\n bar".to_owned(),
|
||||
"foo \nbar".to_owned(),
|
||||
" foo bar \nbaz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz\n\n".to_owned()];
|
||||
|
||||
let oracle_strs : ~[~str] = ~[~" foo bar",
|
||||
~"foo bar ",
|
||||
~"foo\n bar",
|
||||
~"foo \nbar",
|
||||
~" foo bar \nbaz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz\n\n"];
|
||||
let oracle_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo\n bar".to_owned(),
|
||||
"foo \nbar".to_owned(),
|
||||
" foo bar \nbaz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz\n\n".to_owned()];
|
||||
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = CompressWhitespace;
|
||||
|
@ -215,21 +215,21 @@ fn test_transform_compress_whitespace() {
|
|||
|
||||
#[test]
|
||||
fn test_transform_compress_whitespace_newline() {
|
||||
let test_strs : ~[~str] = ~[~" foo bar",
|
||||
~"foo bar ",
|
||||
~"foo\n bar",
|
||||
~"foo \nbar",
|
||||
~" foo bar \nbaz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz\n\n"];
|
||||
let test_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo\n bar".to_owned(),
|
||||
"foo \nbar".to_owned(),
|
||||
" foo bar \nbaz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz\n\n".to_owned()];
|
||||
|
||||
let oracle_strs : ~[~str] = ~[~"foo bar",
|
||||
~"foo bar ",
|
||||
~"foo bar",
|
||||
~"foo bar",
|
||||
~" foo bar baz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz "];
|
||||
let oracle_strs : ~[~str] = ~["foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo bar".to_owned(),
|
||||
"foo bar".to_owned(),
|
||||
" foo bar baz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz ".to_owned()];
|
||||
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = CompressWhitespaceNewline;
|
||||
|
@ -244,23 +244,23 @@ fn test_transform_compress_whitespace_newline() {
|
|||
|
||||
#[test]
|
||||
fn test_transform_compress_whitespace_newline_no_incoming() {
|
||||
let test_strs : ~[~str] = ~[~" foo bar",
|
||||
~"\nfoo bar",
|
||||
~"foo bar ",
|
||||
~"foo\n bar",
|
||||
~"foo \nbar",
|
||||
~" foo bar \nbaz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz\n\n"];
|
||||
let test_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
"\nfoo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo\n bar".to_owned(),
|
||||
"foo \nbar".to_owned(),
|
||||
" foo bar \nbaz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz\n\n".to_owned()];
|
||||
|
||||
let oracle_strs : ~[~str] = ~[~" foo bar",
|
||||
~" foo bar",
|
||||
~"foo bar ",
|
||||
~"foo bar",
|
||||
~"foo bar",
|
||||
~" foo bar baz",
|
||||
~"foo bar baz",
|
||||
~"foobarbaz "];
|
||||
let oracle_strs : ~[~str] = ~[" foo bar".to_owned(),
|
||||
" foo bar".to_owned(),
|
||||
"foo bar ".to_owned(),
|
||||
"foo bar".to_owned(),
|
||||
"foo bar".to_owned(),
|
||||
" foo bar baz".to_owned(),
|
||||
"foo bar baz".to_owned(),
|
||||
"foobarbaz ".to_owned()];
|
||||
|
||||
assert_eq!(test_strs.len(), oracle_strs.len());
|
||||
let mode = CompressWhitespaceNewline;
|
||||
|
|
|
@ -1696,11 +1696,11 @@ impl Flow for BlockFlow {
|
|||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = if self.is_float() {
|
||||
~"FloatFlow: "
|
||||
"FloatFlow: ".to_owned()
|
||||
} else if self.is_root() {
|
||||
~"RootFlow: "
|
||||
"RootFlow: ".to_owned()
|
||||
} else {
|
||||
~"BlockFlow: "
|
||||
"BlockFlow: ".to_owned()
|
||||
};
|
||||
txt.append(self.box_.debug_str())
|
||||
}
|
||||
|
|
|
@ -667,7 +667,7 @@ impl<'a> FlowConstructor<'a> {
|
|||
whitespace_style))
|
||||
=> {
|
||||
// Instantiate the whitespace box.
|
||||
let box_info = UnscannedTextBox(UnscannedTextBoxInfo::from_text(~" "));
|
||||
let box_info = UnscannedTextBox(UnscannedTextBoxInfo::from_text(" ".to_owned()));
|
||||
let fragment = Box::from_opaque_node_and_style(whitespace_node,
|
||||
whitespace_style.clone(),
|
||||
box_info);
|
||||
|
|
|
@ -272,7 +272,7 @@ pub trait Flow {
|
|||
|
||||
/// Returns a debugging string describing this flow.
|
||||
fn debug_str(&self) -> ~str {
|
||||
~"???"
|
||||
"???".to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -296,7 +296,7 @@ impl Flow for TableFlow {
|
|||
}
|
||||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = ~"TableFlow: ";
|
||||
let txt = "TableFlow: ".to_owned();
|
||||
txt.append(self.block_flow.box_.debug_str())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ impl Flow for TableCaptionFlow {
|
|||
}
|
||||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = ~"TableCaptionFlow: ";
|
||||
let txt = "TableCaptionFlow: ".to_owned();
|
||||
txt.append(self.block_flow.box_.debug_str())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ impl Flow for TableCellFlow {
|
|||
}
|
||||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = ~"TableCellFlow: ";
|
||||
let txt = "TableCellFlow: ".to_owned();
|
||||
txt.append(self.block_flow.box_.debug_str())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ impl Flow for TableColGroupFlow {
|
|||
}
|
||||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = ~"TableColGroupFlow: ";
|
||||
let txt = "TableColGroupFlow: ".to_owned();
|
||||
txt.append(match self.box_ {
|
||||
Some(ref rb) => rb.debug_str(),
|
||||
None => "".to_owned(),
|
||||
|
|
|
@ -222,7 +222,7 @@ impl Flow for TableRowFlow {
|
|||
}
|
||||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = ~"TableRowFlow: ";
|
||||
let txt = "TableRowFlow: ".to_owned();
|
||||
txt.append(self.block_flow.box_.debug_str())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -204,7 +204,7 @@ impl Flow for TableRowGroupFlow {
|
|||
}
|
||||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = ~"TableRowGroupFlow: ";
|
||||
let txt = "TableRowGroupFlow: ".to_owned();
|
||||
txt.append(self.block_flow.box_.debug_str())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -196,9 +196,9 @@ impl Flow for TableWrapperFlow {
|
|||
|
||||
fn debug_str(&self) -> ~str {
|
||||
let txt = if self.is_float() {
|
||||
~"TableWrapperFlow(Float): "
|
||||
"TableWrapperFlow(Float): ".to_owned()
|
||||
} else {
|
||||
~"TableWrapperFlow: "
|
||||
"TableWrapperFlow: ".to_owned()
|
||||
};
|
||||
txt.append(self.block_flow.box_.debug_str())
|
||||
}
|
||||
|
|
|
@ -337,7 +337,7 @@ impl Window {
|
|||
alert.run();
|
||||
let value = alert.prompt_value();
|
||||
if "" == value { // To avoid crashing on Linux.
|
||||
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(~"http://purple.com/"))
|
||||
self.event_queue.borrow_mut().push(LoadUrlWindowEvent("http://purple.com/".to_owned()))
|
||||
} else {
|
||||
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone()))
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ impl WindowMethods<Application> for Window {
|
|||
fn new(_: &Application) -> Rc<Window> {
|
||||
// Create the GLUT window.
|
||||
glut::init_window_size(800, 600);
|
||||
let glut_window = glut::create_window(~"Servo");
|
||||
let glut_window = glut::create_window("Servo".to_owned());
|
||||
|
||||
// Create our window object.
|
||||
let window = Window {
|
||||
|
@ -268,7 +268,7 @@ impl Window {
|
|||
alert.run();
|
||||
let value = alert.prompt_value();
|
||||
if "" == value { // To avoid crashing on Linux.
|
||||
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(~"http://purple.com/"))
|
||||
self.event_queue.borrow_mut().push(LoadUrlWindowEvent("http://purple.com/".to_owned()))
|
||||
} else {
|
||||
self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone()))
|
||||
}
|
||||
|
|
|
@ -192,7 +192,7 @@ fn run(opts: opts::Opts) {
|
|||
let url = if filename.starts_with("data:") {
|
||||
// As a hack for easier command-line testing,
|
||||
// assume that data URLs are not URL-encoded.
|
||||
Url::new(~"data", None, "".to_owned(), None,
|
||||
Url::new("data".to_owned(), None, "".to_owned(), None,
|
||||
filename.slice_from(5).to_owned(), Vec::new(), None)
|
||||
} else {
|
||||
parse_url(*filename, None)
|
||||
|
|
|
@ -108,13 +108,13 @@ fn plain() {
|
|||
#[test]
|
||||
fn plain_ct() {
|
||||
assert_parse("data:text/plain,hello",
|
||||
Some((~"text", ~"plain")), None, Some(bytes!("hello").iter().map(|&x| x).collect()));
|
||||
Some(("text".to_owned(), "plain".to_owned())), None, Some(bytes!("hello").iter().map(|&x| x).collect()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plain_charset() {
|
||||
assert_parse("data:text/plain;charset=latin1,hello",
|
||||
Some((~"text", ~"plain")), Some(~"latin1"), Some(bytes!("hello").iter().map(|&x| x).collect()));
|
||||
Some(("text".to_owned(), "plain".to_owned())), Some("latin1".to_owned()), Some(bytes!("hello").iter().map(|&x| x).collect()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -125,12 +125,12 @@ fn base64() {
|
|||
#[test]
|
||||
fn base64_ct() {
|
||||
assert_parse("data:application/octet-stream;base64,C62+7w==",
|
||||
Some((~"application", ~"octet-stream")), None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));
|
||||
Some(("application".to_owned(), "octet-stream".to_owned())), None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn base64_charset() {
|
||||
assert_parse("data:text/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==",
|
||||
Some((~"text", ~"plain")), Some(~"koi8-r"),
|
||||
Some(("text".to_owned(), "plain".to_owned())), Some("koi8-r".to_owned()),
|
||||
Some(vec!(0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4)));
|
||||
}
|
||||
|
|
|
@ -127,9 +127,9 @@ type LoaderTaskFactory = extern "Rust" fn() -> LoaderTask;
|
|||
/// Create a ResourceTask with the default loaders
|
||||
pub fn ResourceTask() -> ResourceTask {
|
||||
let loaders = ~[
|
||||
(~"file", file_loader::factory),
|
||||
(~"http", http_loader::factory),
|
||||
(~"data", data_loader::factory),
|
||||
("file".to_owned(), file_loader::factory),
|
||||
("http".to_owned(), http_loader::factory),
|
||||
("data".to_owned(), data_loader::factory),
|
||||
];
|
||||
create_resource_task_with_loaders(loaders)
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ fn snicklefritz_loader_factory() -> LoaderTask {
|
|||
|
||||
#[test]
|
||||
fn should_delegate_to_scheme_loader() {
|
||||
let loader_factories = ~[(~"snicklefritz", snicklefritz_loader_factory)];
|
||||
let loader_factories = ~[("snicklefritz".to_owned(), snicklefritz_loader_factory)];
|
||||
let resource_task = create_resource_task_with_loaders(loader_factories);
|
||||
let (start_chan, start) = channel();
|
||||
resource_task.send(Load(FromStr::from_str("snicklefritz://heya").unwrap(), start_chan));
|
||||
|
|
|
@ -82,7 +82,7 @@ pub fn _obj_toString(cx: *JSContext, className: *libc::c_char) -> *JSString {
|
|||
return ptr::null();
|
||||
}
|
||||
|
||||
let result = ~"[object " + name + "]";
|
||||
let result = "[object ".to_owned() + name + "]";
|
||||
for (i, c) in result.chars().enumerate() {
|
||||
*chars.offset(i as int) = c as jschar;
|
||||
}
|
||||
|
|
|
@ -212,16 +212,16 @@ impl Document {
|
|||
Some(string) => string.clone(),
|
||||
None => match is_html_document {
|
||||
// http://dom.spec.whatwg.org/#dom-domimplementation-createhtmldocument
|
||||
HTMLDocument => ~"text/html",
|
||||
HTMLDocument => "text/html".to_owned(),
|
||||
// http://dom.spec.whatwg.org/#concept-document-content-type
|
||||
NonHTMLDocument => ~"application/xml"
|
||||
NonHTMLDocument => "application/xml".to_owned()
|
||||
}
|
||||
},
|
||||
url: Untraceable::new(url),
|
||||
// http://dom.spec.whatwg.org/#concept-document-quirks
|
||||
quirks_mode: Untraceable::new(NoQuirks),
|
||||
// http://dom.spec.whatwg.org/#concept-document-encoding
|
||||
encoding_name: ~"utf-8",
|
||||
encoding_name: "utf-8".to_owned(),
|
||||
is_html_document: is_html_document == HTMLDocument,
|
||||
}
|
||||
}
|
||||
|
@ -346,8 +346,8 @@ impl<'a> DocumentMethods for JSRef<'a, Document> {
|
|||
// http://dom.spec.whatwg.org/#dom-document-compatmode
|
||||
fn CompatMode(&self) -> DOMString {
|
||||
match *self.quirks_mode {
|
||||
NoQuirks => ~"CSS1Compat",
|
||||
LimitedQuirks | FullQuirks => ~"BackCompat"
|
||||
NoQuirks => "CSS1Compat".to_owned(),
|
||||
LimitedQuirks | FullQuirks => "BackCompat".to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -585,7 +585,7 @@ impl<'a> DocumentMethods for JSRef<'a, Document> {
|
|||
assert!(title_node.AppendChild(NodeCast::from_mut_ref(&mut *new_text)).is_ok());
|
||||
},
|
||||
None => {
|
||||
let mut new_title = HTMLTitleElement::new(~"title", self).root();
|
||||
let mut new_title = HTMLTitleElement::new("title".to_owned(), self).root();
|
||||
let new_title: &mut JSRef<Node> = NodeCast::from_mut_ref(&mut *new_title);
|
||||
|
||||
let mut new_text = self.CreateTextNode(title.clone()).root();
|
||||
|
|
|
@ -88,27 +88,27 @@ impl<'a> DOMExceptionMethods for JSRef<'a, DOMException> {
|
|||
// http://dom.spec.whatwg.org/#error-names-0
|
||||
fn Message(&self) -> DOMString {
|
||||
match self.code {
|
||||
IndexSizeError => ~"The index is not in the allowed range.",
|
||||
HierarchyRequestError => ~"The operation would yield an incorrect node tree.",
|
||||
WrongDocumentError => ~"The object is in the wrong document.",
|
||||
InvalidCharacterError => ~"The string contains invalid characters.",
|
||||
NoModificationAllowedError => ~"The object can not be modified.",
|
||||
NotFoundError => ~"The object can not be found here.",
|
||||
NotSupportedError => ~"The operation is not supported.",
|
||||
InvalidStateError => ~"The object is in an invalid state.",
|
||||
SyntaxError => ~"The string did not match the expected pattern.",
|
||||
InvalidModificationError => ~"The object can not be modified in this way.",
|
||||
NamespaceError => ~"The operation is not allowed by Namespaces in XML.",
|
||||
InvalidAccessError => ~"The object does not support the operation or argument.",
|
||||
SecurityError => ~"The operation is insecure.",
|
||||
NetworkError => ~"A network error occurred.",
|
||||
AbortError => ~"The operation was aborted.",
|
||||
URLMismatchError => ~"The given URL does not match another URL.",
|
||||
QuotaExceededError => ~"The quota has been exceeded.",
|
||||
TimeoutError => ~"The operation timed out.",
|
||||
InvalidNodeTypeError => ~"The supplied node is incorrect or has an incorrect ancestor for this operation.",
|
||||
DataCloneError => ~"The object can not be cloned.",
|
||||
EncodingError => ~"The encoding operation (either encoded or decoding) failed."
|
||||
IndexSizeError => "The index is not in the allowed range.".to_owned(),
|
||||
HierarchyRequestError => "The operation would yield an incorrect node tree.".to_owned(),
|
||||
WrongDocumentError => "The object is in the wrong document.".to_owned(),
|
||||
InvalidCharacterError => "The string contains invalid characters.".to_owned(),
|
||||
NoModificationAllowedError => "The object can not be modified.".to_owned(),
|
||||
NotFoundError => "The object can not be found here.".to_owned(),
|
||||
NotSupportedError => "The operation is not supported.".to_owned(),
|
||||
InvalidStateError => "The object is in an invalid state.".to_owned(),
|
||||
SyntaxError => "The string did not match the expected pattern.".to_owned(),
|
||||
InvalidModificationError => "The object can not be modified in this way.".to_owned(),
|
||||
NamespaceError => "The operation is not allowed by Namespaces in XML.".to_owned(),
|
||||
InvalidAccessError => "The object does not support the operation or argument.".to_owned(),
|
||||
SecurityError => "The operation is insecure.".to_owned(),
|
||||
NetworkError => "A network error occurred.".to_owned(),
|
||||
AbortError => "The operation was aborted.".to_owned(),
|
||||
URLMismatchError => "The given URL does not match another URL.".to_owned(),
|
||||
QuotaExceededError => "The quota has been exceeded.".to_owned(),
|
||||
TimeoutError => "The operation timed out.".to_owned(),
|
||||
InvalidNodeTypeError => "The supplied node is incorrect or has an incorrect ancestor for this operation.".to_owned(),
|
||||
DataCloneError => "The object can not be cloned.".to_owned(),
|
||||
EncodingError => "The encoding operation (either encoded or decoding) failed.".to_owned()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -129,18 +129,18 @@ impl<'a> DOMImplementationMethods for JSRef<'a, DOMImplementation> {
|
|||
|
||||
{
|
||||
// Step 3.
|
||||
let mut doc_type = DocumentType::new(~"html", None, None, &*doc).root();
|
||||
let mut doc_type = DocumentType::new("html".to_owned(), None, None, &*doc).root();
|
||||
assert!(doc_node.AppendChild(NodeCast::from_mut_ref(&mut *doc_type)).is_ok());
|
||||
}
|
||||
|
||||
{
|
||||
// Step 4.
|
||||
let mut doc_html = NodeCast::from_unrooted(HTMLHtmlElement::new(~"html", &*doc)).root();
|
||||
let mut doc_html = NodeCast::from_unrooted(HTMLHtmlElement::new("html".to_owned(), &*doc)).root();
|
||||
assert!(doc_node.AppendChild(&mut *doc_html).is_ok());
|
||||
|
||||
{
|
||||
// Step 5.
|
||||
let mut doc_head = NodeCast::from_unrooted(HTMLHeadElement::new(~"head", &*doc)).root();
|
||||
let mut doc_head = NodeCast::from_unrooted(HTMLHeadElement::new("head".to_owned(), &*doc)).root();
|
||||
assert!(doc_html.AppendChild(&mut *doc_head).is_ok());
|
||||
|
||||
// Step 6.
|
||||
|
@ -148,7 +148,7 @@ impl<'a> DOMImplementationMethods for JSRef<'a, DOMImplementation> {
|
|||
None => (),
|
||||
Some(title_str) => {
|
||||
// Step 6.1.
|
||||
let mut doc_title = NodeCast::from_unrooted(HTMLTitleElement::new(~"title", &*doc)).root();
|
||||
let mut doc_title = NodeCast::from_unrooted(HTMLTitleElement::new("title".to_owned(), &*doc)).root();
|
||||
assert!(doc_head.AppendChild(&mut *doc_title).is_ok());
|
||||
|
||||
// Step 6.2.
|
||||
|
@ -159,7 +159,7 @@ impl<'a> DOMImplementationMethods for JSRef<'a, DOMImplementation> {
|
|||
}
|
||||
|
||||
// Step 7.
|
||||
let mut doc_body = HTMLBodyElement::new(~"body", &*doc).root();
|
||||
let mut doc_body = HTMLBodyElement::new("body".to_owned(), &*doc).root();
|
||||
assert!(doc_html.AppendChild(NodeCast::from_mut_ref(&mut *doc_body)).is_ok());
|
||||
}
|
||||
|
||||
|
|
|
@ -48,10 +48,10 @@ impl<'a> DOMParserMethods for JSRef<'a, DOMParser> {
|
|||
let owner = self.owner.root();
|
||||
match ty {
|
||||
Text_html => {
|
||||
Ok(Document::new(&owner.root_ref(), None, HTMLDocument, Some(~"text/html")))
|
||||
Ok(Document::new(&owner.root_ref(), None, HTMLDocument, Some("text/html".to_owned())))
|
||||
}
|
||||
Text_xml => {
|
||||
Ok(Document::new(&owner.root_ref(), None, NonHTMLDocument, Some(~"text/xml")))
|
||||
Ok(Document::new(&owner.root_ref(), None, NonHTMLDocument, Some("text/xml".to_owned())))
|
||||
}
|
||||
_ => {
|
||||
Err(FailureUnknown)
|
||||
|
|
|
@ -571,7 +571,7 @@ impl<'a> ElementMethods for JSRef<'a, Element> {
|
|||
}
|
||||
|
||||
// Step 8.
|
||||
if namespace == namespace::XMLNS && "xmlns" != name && Some(~"xmlns") != prefix {
|
||||
if namespace == namespace::XMLNS && "xmlns" != name && Some("xmlns".to_owned()) != prefix {
|
||||
return Err(NamespaceError);
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ impl<'a> FormDataMethods for JSRef<'a, FormData> {
|
|||
fn Append(&mut self, name: DOMString, value: &JSRef<Blob>, filename: Option<DOMString>) {
|
||||
let blob = BlobData {
|
||||
blob: value.unrooted(),
|
||||
name: filename.unwrap_or(~"default")
|
||||
name: filename.unwrap_or("default".to_owned())
|
||||
};
|
||||
self.data.insert(name.clone(), blob);
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ pub fn serialize(iterator: &mut NodeIterator) -> ~str {
|
|||
|
||||
for node in *iterator {
|
||||
while open_elements.len() > iterator.depth {
|
||||
html.push_str(~"</" + open_elements.pop().unwrap().as_slice() + ">");
|
||||
html.push_str("</".to_owned() + open_elements.pop().unwrap().as_slice() + ">");
|
||||
}
|
||||
html.push_str(
|
||||
match node.type_id() {
|
||||
|
@ -60,13 +60,13 @@ pub fn serialize(iterator: &mut NodeIterator) -> ~str {
|
|||
);
|
||||
}
|
||||
while open_elements.len() > 0 {
|
||||
html.push_str(~"</" + open_elements.pop().unwrap().as_slice() + ">");
|
||||
html.push_str("</".to_owned() + open_elements.pop().unwrap().as_slice() + ">");
|
||||
}
|
||||
html
|
||||
}
|
||||
|
||||
fn serialize_comment(comment: &JSRef<Comment>) -> ~str {
|
||||
~"<!--" + comment.deref().characterdata.data + "-->"
|
||||
"<!--".to_owned() + comment.deref().characterdata.data + "-->"
|
||||
}
|
||||
|
||||
fn serialize_text(text: &JSRef<Text>) -> ~str {
|
||||
|
@ -88,15 +88,15 @@ fn serialize_text(text: &JSRef<Text>) -> ~str {
|
|||
}
|
||||
|
||||
fn serialize_processing_instruction(processing_instruction: &JSRef<ProcessingInstruction>) -> ~str {
|
||||
~"<?" + processing_instruction.deref().target + " " + processing_instruction.deref().characterdata.data + "?>"
|
||||
"<?".to_owned() + processing_instruction.deref().target + " " + processing_instruction.deref().characterdata.data + "?>"
|
||||
}
|
||||
|
||||
fn serialize_doctype(doctype: &JSRef<DocumentType>) -> ~str {
|
||||
~"<!DOCTYPE" + doctype.deref().name + ">"
|
||||
"<!DOCTYPE".to_owned() + doctype.deref().name + ">"
|
||||
}
|
||||
|
||||
fn serialize_elem(elem: &JSRef<Element>, open_elements: &mut Vec<~str>) -> ~str {
|
||||
let mut rv = ~"<" + elem.deref().local_name;
|
||||
let mut rv = "<".to_owned() + elem.deref().local_name;
|
||||
for attr in elem.deref().attrs.iter() {
|
||||
let attr = attr.root();
|
||||
rv.push_str(serialize_attr(&*attr));
|
||||
|
@ -125,18 +125,18 @@ fn serialize_elem(elem: &JSRef<Element>, open_elements: &mut Vec<~str>) -> ~str
|
|||
|
||||
fn serialize_attr(attr: &JSRef<Attr>) -> ~str {
|
||||
let attr_name = if attr.deref().namespace == namespace::XML {
|
||||
~"xml:" + attr.deref().local_name.clone()
|
||||
"xml:".to_owned() + attr.deref().local_name.clone()
|
||||
} else if attr.deref().namespace == namespace::XMLNS &&
|
||||
attr.deref().local_name.as_slice() == "xmlns" {
|
||||
~"xmlns"
|
||||
"xmlns".to_owned()
|
||||
} else if attr.deref().namespace == namespace::XMLNS {
|
||||
~"xmlns:" + attr.deref().local_name.clone()
|
||||
"xmlns:".to_owned() + attr.deref().local_name.clone()
|
||||
} else if attr.deref().namespace == namespace::XLink {
|
||||
~"xlink:" + attr.deref().local_name.clone()
|
||||
"xlink:".to_owned() + attr.deref().local_name.clone()
|
||||
} else {
|
||||
attr.deref().name.clone()
|
||||
};
|
||||
~" " + attr_name + "=\"" + escape(attr.deref().value, true) + "\""
|
||||
" ".to_owned() + attr_name + "=\"" + escape(attr.deref().value, true) + "\""
|
||||
}
|
||||
|
||||
fn escape(string: &str, attr_mode: bool) -> ~str {
|
||||
|
|
|
@ -49,7 +49,7 @@ pub trait NavigatorMethods {
|
|||
|
||||
impl<'a> NavigatorMethods for JSRef<'a, Navigator> {
|
||||
fn DoNotTrack(&self) -> DOMString {
|
||||
~"unspecified"
|
||||
"unspecified".to_owned()
|
||||
}
|
||||
|
||||
fn Vendor(&self) -> DOMString {
|
||||
|
@ -61,7 +61,7 @@ impl<'a> NavigatorMethods for JSRef<'a, Navigator> {
|
|||
}
|
||||
|
||||
fn Product(&self) -> DOMString {
|
||||
~"Gecko"
|
||||
"Gecko".to_owned()
|
||||
}
|
||||
|
||||
fn ProductSub(&self) -> DOMString {
|
||||
|
@ -85,11 +85,11 @@ impl<'a> NavigatorMethods for JSRef<'a, Navigator> {
|
|||
}
|
||||
|
||||
fn AppName(&self) -> DOMString {
|
||||
~"Netscape" // Like Gecko/Webkit
|
||||
"Netscape".to_owned() // Like Gecko/Webkit
|
||||
}
|
||||
|
||||
fn GetAppCodeName(&self) -> Fallible<DOMString> {
|
||||
Ok(~"Mozilla") // Like Gecko/Webkit
|
||||
Ok("Mozilla".to_owned()) // Like Gecko/Webkit
|
||||
}
|
||||
|
||||
fn GetAppVersion(&self) -> Fallible<DOMString> {
|
||||
|
|
|
@ -1387,19 +1387,19 @@ impl<'a> NodeMethods for JSRef<'a, Node> {
|
|||
let elem: &JSRef<Element> = ElementCast::to_ref(self).unwrap();
|
||||
elem.TagName()
|
||||
}
|
||||
TextNodeTypeId => ~"#text",
|
||||
TextNodeTypeId => "#text".to_owned(),
|
||||
ProcessingInstructionNodeTypeId => {
|
||||
let processing_instruction: &JSRef<ProcessingInstruction> =
|
||||
ProcessingInstructionCast::to_ref(self).unwrap();
|
||||
processing_instruction.Target()
|
||||
}
|
||||
CommentNodeTypeId => ~"#comment",
|
||||
CommentNodeTypeId => "#comment".to_owned(),
|
||||
DoctypeNodeTypeId => {
|
||||
let doctype: &JSRef<DocumentType> = DocumentTypeCast::to_ref(self).unwrap();
|
||||
doctype.deref().name.clone()
|
||||
},
|
||||
DocumentFragmentNodeTypeId => ~"#document-fragment",
|
||||
DocumentNodeTypeId => ~"#document"
|
||||
DocumentFragmentNodeTypeId => "#document-fragment".to_owned(),
|
||||
DocumentNodeTypeId => "#document".to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1002,7 +1002,7 @@ impl ScriptTask {
|
|||
// "load" event as soon as we've finished executing all scripts parsed during
|
||||
// the initial load.
|
||||
let mut event = Event::new(&*window).root();
|
||||
event.InitEvent(~"load", false, false);
|
||||
event.InitEvent("load".to_owned(), false, false);
|
||||
let doctarget: &JSRef<EventTarget> = EventTargetCast::from_ref(&*document);
|
||||
let wintarget: &JSRef<EventTarget> = EventTargetCast::from_ref(&*window);
|
||||
let _ = wintarget.dispatch_event_with_target(Some((*doctarget).clone()),
|
||||
|
@ -1069,7 +1069,7 @@ impl ScriptTask {
|
|||
// http://dev.w3.org/csswg/cssom-view/#resizing-viewports
|
||||
// https://dvcs.w3.org/hg/dom3events/raw-file/tip/html/DOM3-Events.html#event-type-resize
|
||||
let mut uievent = UIEvent::new(&*window).root();
|
||||
uievent.InitUIEvent(~"resize", false, false,
|
||||
uievent.InitUIEvent("resize".to_owned(), false, false,
|
||||
Some((*window).clone()), 0i32);
|
||||
let event: &mut JSRef<Event> = EventCast::from_mut_ref(&mut *uievent);
|
||||
|
||||
|
|
|
@ -980,23 +980,23 @@ mod tests {
|
|||
fn test_get_id_name(){
|
||||
let rules_list = get_mock_rules([".intro", "#top"]);
|
||||
assert_eq!(SelectorMap::get_id_name(&rules_list[0][0]), None);
|
||||
assert_eq!(SelectorMap::get_id_name(&rules_list[1][0]), Some(~"top"));
|
||||
assert_eq!(SelectorMap::get_id_name(&rules_list[1][0]), Some("top".to_owned()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_class_name(){
|
||||
let rules_list = get_mock_rules([".intro.foo", "#top"]);
|
||||
assert_eq!(SelectorMap::get_class_name(&rules_list[0][0]), Some(~"intro"));
|
||||
assert_eq!(SelectorMap::get_class_name(&rules_list[0][0]), Some("intro".to_owned()));
|
||||
assert_eq!(SelectorMap::get_class_name(&rules_list[1][0]), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_element_name(){
|
||||
let rules_list = get_mock_rules(["img.foo", "#top", "IMG", "ImG"]);
|
||||
assert_eq!(SelectorMap::get_element_name(&rules_list[0][0]), Some(~"img"));
|
||||
assert_eq!(SelectorMap::get_element_name(&rules_list[0][0]), Some("img".to_owned()));
|
||||
assert_eq!(SelectorMap::get_element_name(&rules_list[1][0]), None);
|
||||
assert_eq!(SelectorMap::get_element_name(&rules_list[2][0]), Some(~"img"));
|
||||
assert_eq!(SelectorMap::get_element_name(&rules_list[3][0]), Some(~"img"));
|
||||
assert_eq!(SelectorMap::get_element_name(&rules_list[2][0]), Some("img".to_owned()));
|
||||
assert_eq!(SelectorMap::get_element_name(&rules_list[3][0]), Some("img".to_owned()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -603,7 +603,7 @@ mod tests {
|
|||
assert!(parse("") == None)
|
||||
assert!(parse("e") == Some(~[Selector{
|
||||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[LocalNameSelector(~"e")],
|
||||
simple_selectors: ~[LocalNameSelector("e".to_owned())],
|
||||
next: None,
|
||||
}),
|
||||
pseudo_element: None,
|
||||
|
@ -611,7 +611,7 @@ mod tests {
|
|||
}]))
|
||||
assert!(parse(".foo") == Some(~[Selector{
|
||||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[ClassSelector(~"foo")],
|
||||
simple_selectors: ~[ClassSelector("foo".to_owned())],
|
||||
next: None,
|
||||
}),
|
||||
pseudo_element: None,
|
||||
|
@ -619,7 +619,7 @@ mod tests {
|
|||
}]))
|
||||
assert!(parse("#bar") == Some(~[Selector{
|
||||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[IDSelector(~"bar")],
|
||||
simple_selectors: ~[IDSelector("bar".to_owned())],
|
||||
next: None,
|
||||
}),
|
||||
pseudo_element: None,
|
||||
|
@ -627,9 +627,9 @@ mod tests {
|
|||
}]))
|
||||
assert!(parse("e.foo#bar") == Some(~[Selector{
|
||||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[LocalNameSelector(~"e"),
|
||||
ClassSelector(~"foo"),
|
||||
IDSelector(~"bar")],
|
||||
simple_selectors: ~[LocalNameSelector("e".to_owned()),
|
||||
ClassSelector("foo".to_owned()),
|
||||
IDSelector("bar".to_owned())],
|
||||
next: None,
|
||||
}),
|
||||
pseudo_element: None,
|
||||
|
@ -637,10 +637,10 @@ mod tests {
|
|||
}]))
|
||||
assert!(parse("e.foo #bar") == Some(~[Selector{
|
||||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[IDSelector(~"bar")],
|
||||
simple_selectors: ~[IDSelector("bar".to_owned())],
|
||||
next: Some((~CompoundSelector {
|
||||
simple_selectors: ~[LocalNameSelector(~"e"),
|
||||
ClassSelector(~"foo")],
|
||||
simple_selectors: ~[LocalNameSelector("e".to_owned()),
|
||||
ClassSelector("foo".to_owned())],
|
||||
next: None,
|
||||
}, Descendant)),
|
||||
}),
|
||||
|
@ -653,8 +653,8 @@ mod tests {
|
|||
assert!(parse_ns("[Foo]", &namespaces) == Some(~[Selector{
|
||||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[AttrExists(AttrSelector {
|
||||
name: ~"Foo",
|
||||
lower_name: ~"foo",
|
||||
name: "Foo".to_owned(),
|
||||
lower_name: "foo".to_owned(),
|
||||
namespace: SpecificNamespace(namespace::Null),
|
||||
})],
|
||||
next: None,
|
||||
|
@ -668,8 +668,8 @@ mod tests {
|
|||
assert!(parse_ns("[Foo]", &namespaces) == Some(~[Selector{
|
||||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[AttrExists(AttrSelector {
|
||||
name: ~"Foo",
|
||||
lower_name: ~"foo",
|
||||
name: "Foo".to_owned(),
|
||||
lower_name: "foo".to_owned(),
|
||||
namespace: SpecificNamespace(namespace::Null),
|
||||
})],
|
||||
next: None,
|
||||
|
@ -682,7 +682,7 @@ mod tests {
|
|||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[
|
||||
NamespaceSelector(namespace::MathML),
|
||||
LocalNameSelector(~"e"),
|
||||
LocalNameSelector("e".to_owned()),
|
||||
],
|
||||
next: None,
|
||||
}),
|
||||
|
@ -702,7 +702,7 @@ mod tests {
|
|||
compound_selectors: Arc::new(CompoundSelector {
|
||||
simple_selectors: ~[],
|
||||
next: Some((~CompoundSelector {
|
||||
simple_selectors: ~[LocalNameSelector(~"div")],
|
||||
simple_selectors: ~[LocalNameSelector("div".to_owned())],
|
||||
next: None,
|
||||
}, Descendant)),
|
||||
}),
|
||||
|
|
|
@ -533,50 +533,50 @@ pub mod tests {
|
|||
#[test]
|
||||
pub fn test_inline() {
|
||||
let mut v = SmallVec16::new();
|
||||
v.push(~"hello");
|
||||
v.push(~"there");
|
||||
assert_eq!(v.as_slice(), &[~"hello", ~"there"]);
|
||||
v.push("hello".to_owned());
|
||||
v.push("there".to_owned());
|
||||
assert_eq!(v.as_slice(), &["hello".to_owned(), "there".to_owned()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_spill() {
|
||||
let mut v = SmallVec2::new();
|
||||
v.push(~"hello");
|
||||
v.push(~"there");
|
||||
v.push(~"burma");
|
||||
v.push(~"shave");
|
||||
assert_eq!(v.as_slice(), &[~"hello", ~"there", ~"burma", ~"shave"]);
|
||||
v.push("hello".to_owned());
|
||||
v.push("there".to_owned());
|
||||
v.push("burma".to_owned());
|
||||
v.push("shave".to_owned());
|
||||
assert_eq!(v.as_slice(), &["hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_double_spill() {
|
||||
let mut v = SmallVec2::new();
|
||||
v.push(~"hello");
|
||||
v.push(~"there");
|
||||
v.push(~"burma");
|
||||
v.push(~"shave");
|
||||
v.push(~"hello");
|
||||
v.push(~"there");
|
||||
v.push(~"burma");
|
||||
v.push(~"shave");
|
||||
v.push("hello".to_owned());
|
||||
v.push("there".to_owned());
|
||||
v.push("burma".to_owned());
|
||||
v.push("shave".to_owned());
|
||||
v.push("hello".to_owned());
|
||||
v.push("there".to_owned());
|
||||
v.push("burma".to_owned());
|
||||
v.push("shave".to_owned());
|
||||
assert_eq!(v.as_slice(), &[
|
||||
~"hello", ~"there", ~"burma", ~"shave", ~"hello", ~"there", ~"burma", ~"shave",
|
||||
"hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(), "hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(),
|
||||
]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_smallvec0() {
|
||||
let mut v = SmallVec0::new();
|
||||
v.push(~"hello");
|
||||
v.push(~"there");
|
||||
v.push(~"burma");
|
||||
v.push(~"shave");
|
||||
v.push(~"hello");
|
||||
v.push(~"there");
|
||||
v.push(~"burma");
|
||||
v.push(~"shave");
|
||||
v.push("hello".to_owned());
|
||||
v.push("there".to_owned());
|
||||
v.push("burma".to_owned());
|
||||
v.push("shave".to_owned());
|
||||
v.push("hello".to_owned());
|
||||
v.push("there".to_owned());
|
||||
v.push("burma".to_owned());
|
||||
v.push("shave".to_owned());
|
||||
assert_eq!(v.as_slice(), &[
|
||||
~"hello", ~"there", ~"burma", ~"shave", ~"hello", ~"there", ~"burma", ~"shave",
|
||||
"hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(), "hello".to_owned(), "there".to_owned(), "burma".to_owned(), "shave".to_owned(),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,12 +27,12 @@ pub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url
|
|||
// Assume we've been given a file path. If it's absolute just return
|
||||
// it, otherwise make it absolute with the cwd.
|
||||
if str_url.starts_with("/") {
|
||||
~"file://" + str_url
|
||||
"file://".to_owned() + str_url
|
||||
} else {
|
||||
let mut path = os::getcwd();
|
||||
path.push(str_url);
|
||||
// FIXME (#1094): not the right way to transform a path
|
||||
~"file://" + path.display().to_str()
|
||||
"file://".to_owned() + path.display().to_str()
|
||||
}
|
||||
} else {
|
||||
let base_url = base_url.unwrap();
|
||||
|
@ -45,7 +45,7 @@ pub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url
|
|||
if str_url.starts_with("//") {
|
||||
new_url.scheme + ":" + str_url
|
||||
} else if base_url.path.is_empty() || str_url.starts_with("/") {
|
||||
new_url.path = ~"/";
|
||||
new_url.path = "/".to_owned();
|
||||
new_url.to_str() + str_url.trim_left_chars(&'/')
|
||||
} else if str_url.starts_with("#") {
|
||||
new_url.to_str() + str_url
|
||||
|
@ -67,7 +67,7 @@ pub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url
|
|||
let mut path = os::self_exe_path().expect("can't get exe path");
|
||||
path.push("../src/test/html/failure.html");
|
||||
// FIXME (#1094): not the right way to transform a path
|
||||
~"file://" + path.display().to_str()
|
||||
"file://".to_owned() + path.display().to_str()
|
||||
}
|
||||
// TODO: handle the rest of the about: pages
|
||||
_ => str_url
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue