about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorHuon Wilson <dbau.pp+github@gmail.com>2013-06-13 03:02:55 +1000
committerHuon Wilson <dbau.pp+github@gmail.com>2013-06-13 10:20:52 +1000
commit096f6f56a8178bd7f4b69a2ea909838e782766fb (patch)
tree37513f0d39fdfb4d5a702a72abd7423c93c51cdf /src/libsyntax/parse
parent641910dc1340b7786fd758282bac88639a58ddcd (diff)
downloadrust-096f6f56a8178bd7f4b69a2ea909838e782766fb.tar.gz
rust-096f6f56a8178bd7f4b69a2ea909838e782766fb.zip
Use @str instead of @~str in libsyntax and librustc. Fixes #5048.
This almost removes the StringRef wrapper, since all strings are
Equiv-alent now. Removes a lot of `/* bad */ copy *`'s, and converts
several things to be &'static str (the lint table and the intrinsics
table).

There are many instances of .to_managed(), unfortunately.
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs4
-rw-r--r--src/libsyntax/parse/comments.rs4
-rw-r--r--src/libsyntax/parse/common.rs2
-rw-r--r--src/libsyntax/parse/lexer.rs28
-rw-r--r--src/libsyntax/parse/mod.rs80
-rw-r--r--src/libsyntax/parse/obsolete.rs2
-rw-r--r--src/libsyntax/parse/parser.rs37
-rw-r--r--src/libsyntax/parse/token.rs38
8 files changed, 89 insertions, 106 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index ed9a83d6b1e..ddcad5c3e8f 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -46,7 +46,7 @@ impl parser_attr for Parser {
               }
               token::DOC_COMMENT(s) => {
                 let attr = ::attr::mk_sugared_doc_attr(
-                    copy *self.id_to_str(s),
+                    self.id_to_str(s),
                     self.span.lo,
                     self.span.hi
                 );
@@ -119,7 +119,7 @@ impl parser_attr for Parser {
               }
               token::DOC_COMMENT(s) => {
                 let attr = ::attr::mk_sugared_doc_attr(
-                    copy *self.id_to_str(s),
+                    self.id_to_str(s),
                     self.span.lo,
                     self.span.hi
                 );
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs
index a6933c16483..5c56ea6c446 100644
--- a/src/libsyntax/parse/comments.rs
+++ b/src/libsyntax/parse/comments.rs
@@ -320,10 +320,10 @@ pub struct lit {
 // probably not a good thing.
 pub fn gather_comments_and_literals(span_diagnostic:
                                     @diagnostic::span_handler,
-                                    path: ~str,
+                                    path: @str,
                                     srdr: @io::Reader)
                                  -> (~[cmnt], ~[lit]) {
-    let src = @str::from_bytes(srdr.read_whole_stream());
+    let src = str::from_bytes(srdr.read_whole_stream()).to_managed();
     let cm = CodeMap::new();
     let filemap = cm.new_filemap(path, src);
     let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index 6027d3b07f2..0956fa7225f 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -158,7 +158,7 @@ impl Parser {
             self.fatal(
                 fmt!(
                     "expected `%s`, found `%s`",
-                    *self.id_to_str(kw.to_ident()),
+                    self.id_to_str(kw.to_ident()),
                     self.this_token_to_str()
                 )
             );
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 474e93ed11a..d71e2763b5c 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -40,7 +40,7 @@ pub struct TokenAndSpan {tok: token::Token, sp: span}
 
 pub struct StringReader {
     span_diagnostic: @span_handler,
-    src: @~str,
+    src: @str,
     // The absolute offset within the codemap of the next character to read
     pos: BytePos,
     // The absolute offset within the codemap of the last character read(curr)
@@ -176,7 +176,7 @@ pub fn with_str_from<T>(rdr: @mut StringReader, start: BytePos, f: &fn(s: &str)
 pub fn bump(rdr: &mut StringReader) {
     rdr.last_pos = rdr.pos;
     let current_byte_offset = byte_offset(rdr, rdr.pos).to_uint();
-    if current_byte_offset < (*rdr.src).len() {
+    if current_byte_offset < (rdr.src).len() {
         assert!(rdr.curr != -1 as char);
         let last_char = rdr.curr;
         let next = rdr.src.char_range_at(current_byte_offset);
@@ -202,7 +202,7 @@ pub fn is_eof(rdr: @mut StringReader) -> bool {
 }
 pub fn nextch(rdr: @mut StringReader) -> char {
     let offset = byte_offset(rdr, rdr.pos).to_uint();
-    if offset < (*rdr.src).len() {
+    if offset < (rdr.src).len() {
         return rdr.src.char_at(offset);
     } else { return -1 as char; }
 }
@@ -801,9 +801,9 @@ mod test {
     }
 
     // open a string reader for the given string
-    fn setup(teststr: ~str) -> Env {
+    fn setup(teststr: @str) -> Env {
         let cm = CodeMap::new();
-        let fm = cm.new_filemap(~"zebra.rs", @teststr);
+        let fm = cm.new_filemap(@"zebra.rs", teststr);
         let span_handler =
             diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
         Env {
@@ -813,7 +813,7 @@ mod test {
 
     #[test] fn t1 () {
         let Env {string_reader} =
-            setup(~"/* my source file */ \
+            setup(@"/* my source file */ \
                     fn main() { io::println(~\"zebra\"); }\n");
         let id = str_to_ident("fn");
         let tok1 = string_reader.next_token();
@@ -849,14 +849,14 @@ mod test {
     }
 
     #[test] fn doublecolonparsing () {
-        let env = setup (~"a b");
+        let env = setup (@"a b");
         check_tokenization (env,
                            ~[mk_ident("a",false),
                              mk_ident("b",false)]);
     }
 
     #[test] fn dcparsing_2 () {
-        let env = setup (~"a::b");
+        let env = setup (@"a::b");
         check_tokenization (env,
                            ~[mk_ident("a",true),
                              token::MOD_SEP,
@@ -864,7 +864,7 @@ mod test {
     }
 
     #[test] fn dcparsing_3 () {
-        let env = setup (~"a ::b");
+        let env = setup (@"a ::b");
         check_tokenization (env,
                            ~[mk_ident("a",false),
                              token::MOD_SEP,
@@ -872,7 +872,7 @@ mod test {
     }
 
     #[test] fn dcparsing_4 () {
-        let env = setup (~"a:: b");
+        let env = setup (@"a:: b");
         check_tokenization (env,
                            ~[mk_ident("a",true),
                              token::MOD_SEP,
@@ -880,28 +880,28 @@ mod test {
     }
 
     #[test] fn character_a() {
-        let env = setup(~"'a'");
+        let env = setup(@"'a'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok,token::LIT_INT('a' as i64, ast::ty_char));
     }
 
     #[test] fn character_space() {
-        let env = setup(~"' '");
+        let env = setup(@"' '");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok, token::LIT_INT(' ' as i64, ast::ty_char));
     }
 
     #[test] fn character_escaped() {
-        let env = setup(~"'\n'");
+        let env = setup(@"'\n'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok, token::LIT_INT('\n' as i64, ast::ty_char));
     }
 
     #[test] fn lifetime_name() {
-        let env = setup(~"'abc");
+        let env = setup(@"'abc");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         let id = token::str_to_ident("abc");
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 559bca34f21..5edd2ec4d47 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -82,38 +82,38 @@ pub fn parse_crate_from_file(
 }
 
 pub fn parse_crate_from_source_str(
-    name: ~str,
-    source: @~str,
+    name: @str,
+    source: @str,
     cfg: ast::crate_cfg,
     sess: @mut ParseSess
 ) -> @ast::crate {
     let p = new_parser_from_source_str(
         sess,
         /*bad*/ copy cfg,
-        /*bad*/ copy name,
+        name,
         source
     );
     maybe_aborted(p.parse_crate_mod(),p)
 }
 
 pub fn parse_expr_from_source_str(
-    name: ~str,
-    source: @~str,
+    name: @str,
+    source: @str,
     cfg: ast::crate_cfg,
     sess: @mut ParseSess
 ) -> @ast::expr {
     let p = new_parser_from_source_str(
         sess,
         cfg,
-        /*bad*/ copy name,
+        name,
         source
     );
     maybe_aborted(p.parse_expr(), p)
 }
 
 pub fn parse_item_from_source_str(
-    name: ~str,
-    source: @~str,
+    name: @str,
+    source: @str,
     cfg: ast::crate_cfg,
     attrs: ~[ast::attribute],
     sess: @mut ParseSess
@@ -121,30 +121,30 @@ pub fn parse_item_from_source_str(
     let p = new_parser_from_source_str(
         sess,
         cfg,
-        /*bad*/ copy name,
+        name,
         source
     );
     maybe_aborted(p.parse_item(attrs),p)
 }
 
 pub fn parse_meta_from_source_str(
-    name: ~str,
-    source: @~str,
+    name: @str,
+    source: @str,
     cfg: ast::crate_cfg,
     sess: @mut ParseSess
 ) -> @ast::meta_item {
     let p = new_parser_from_source_str(
         sess,
         cfg,
-        /*bad*/ copy name,
+        name,
         source
     );
     maybe_aborted(p.parse_meta_item(),p)
 }
 
 pub fn parse_stmt_from_source_str(
-    name: ~str,
-    source: @~str,
+    name: @str,
+    source: @str,
     cfg: ast::crate_cfg,
     attrs: ~[ast::attribute],
     sess: @mut ParseSess
@@ -152,22 +152,22 @@ pub fn parse_stmt_from_source_str(
     let p = new_parser_from_source_str(
         sess,
         cfg,
-        /*bad*/ copy name,
+        name,
         source
     );
     maybe_aborted(p.parse_stmt(attrs),p)
 }
 
 pub fn parse_tts_from_source_str(
-    name: ~str,
-    source: @~str,
+    name: @str,
+    source: @str,
     cfg: ast::crate_cfg,
     sess: @mut ParseSess
 ) -> ~[ast::token_tree] {
     let p = new_parser_from_source_str(
         sess,
         cfg,
-        /*bad*/ copy name,
+        name,
         source
     );
     *p.quote_depth += 1u;
@@ -182,8 +182,8 @@ pub fn parse_tts_from_source_str(
 // result.
 pub fn parse_from_source_str<T>(
     f: &fn(&Parser) -> T,
-    name: ~str, ss: codemap::FileSubstr,
-    source: @~str,
+    name: @str, ss: codemap::FileSubstr,
+    source: @str,
     cfg: ast::crate_cfg,
     sess: @mut ParseSess
 ) -> T {
@@ -213,8 +213,8 @@ pub fn next_node_id(sess: @mut ParseSess) -> node_id {
 // Create a new parser from a source string
 pub fn new_parser_from_source_str(sess: @mut ParseSess,
                                   cfg: ast::crate_cfg,
-                                  name: ~str,
-                                  source: @~str)
+                                  name: @str,
+                                  source: @str)
                                -> Parser {
     filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
 }
@@ -223,9 +223,9 @@ pub fn new_parser_from_source_str(sess: @mut ParseSess,
 // is specified as a substring of another file.
 pub fn new_parser_from_source_substr(sess: @mut ParseSess,
                                   cfg: ast::crate_cfg,
-                                  name: ~str,
+                                  name: @str,
                                   ss: codemap::FileSubstr,
-                                  source: @~str)
+                                  source: @str)
                                -> Parser {
     filemap_to_parser(sess,substring_to_filemap(sess,source,name,ss),cfg)
 }
@@ -275,7 +275,7 @@ pub fn new_parser_from_tts(sess: @mut ParseSess,
 pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
     -> @FileMap {
     match io::read_whole_file_str(path) {
-        Ok(src) => string_to_filemap(sess, @src, path.to_str()),
+        Ok(src) => string_to_filemap(sess, src.to_managed(), path.to_str().to_managed()),
         Err(e) => {
             match spanopt {
                 Some(span) => sess.span_diagnostic.span_fatal(span, e),
@@ -287,14 +287,14 @@ pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
 
 // given a session and a string, add the string to
 // the session's codemap and return the new filemap
-pub fn string_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str)
+pub fn string_to_filemap(sess: @mut ParseSess, source: @str, path: @str)
     -> @FileMap {
     sess.cm.new_filemap(path, source)
 }
 
 // given a session and a string and a path and a FileSubStr, add
 // the string to the CodeMap and return the new FileMap
-pub fn substring_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str,
+pub fn substring_to_filemap(sess: @mut ParseSess, source: @str, path: @str,
                            filesubstr: FileSubstr) -> @FileMap {
     sess.cm.new_filemap_w_substr(path,filesubstr,source)
 }
@@ -349,7 +349,7 @@ mod test {
     use util::parser_testing::{string_to_stmt, strs_to_idents};
 
     // map a string to tts, return the tt without its parsesess
-    fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] {
+    fn string_to_tts_only(source_str : @str) -> ~[ast::token_tree] {
         let (tts,_ps) = string_to_tts_and_sess(source_str);
         tts
     }
@@ -368,7 +368,7 @@ mod test {
     }
 
     #[test] fn path_exprs_1 () {
-        assert_eq!(string_to_expr(@~"a"),
+        assert_eq!(string_to_expr(@"a"),
                    @ast::expr{id:1,
                               node:ast::expr_path(@ast::Path {span:sp(0,1),
                                                               global:false,
@@ -379,7 +379,7 @@ mod test {
     }
 
     #[test] fn path_exprs_2 () {
-        assert_eq!(string_to_expr(@~"::a::b"),
+        assert_eq!(string_to_expr(@"::a::b"),
                    @ast::expr{id:1,
                                node:ast::expr_path(
                                    @ast::Path {span:sp(0,6),
@@ -394,11 +394,11 @@ mod test {
     // marked as `#[should_fail]`.
     /*#[should_fail]
     #[test] fn bad_path_expr_1() {
-        string_to_expr(@~"::abc::def::return");
+        string_to_expr(@"::abc::def::return");
     }*/
 
     #[test] fn string_to_tts_1 () {
-        let (tts,_ps) = string_to_tts_and_sess(@~"fn a (b : int) { b; }");
+        let (tts,_ps) = string_to_tts_and_sess(@"fn a (b : int) { b; }");
         assert_eq!(to_json_str(@tts),
                    ~"[\
                 [\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\
@@ -427,7 +427,7 @@ mod test {
     }
 
     #[test] fn ret_expr() {
-        assert_eq!(string_to_expr(@~"return d"),
+        assert_eq!(string_to_expr(@"return d"),
                    @ast::expr{id:2,
                               node:ast::expr_ret(
                                   Some(@ast::expr{id:1,
@@ -443,7 +443,7 @@ mod test {
     }
 
     #[test] fn parse_stmt_1 () {
-        assert_eq!(string_to_stmt(@~"b;"),
+        assert_eq!(string_to_stmt(@"b;"),
                    @spanned{
                        node: ast::stmt_expr(@ast::expr{
                            id: 1,
@@ -465,7 +465,7 @@ mod test {
     }
 
     #[test] fn parse_ident_pat () {
-        let parser = string_to_parser(@~"b");
+        let parser = string_to_parser(@"b");
         assert_eq!(parser.parse_pat(),
                    @ast::pat{id:1, // fixme
                              node: ast::pat_ident(ast::bind_infer,
@@ -482,7 +482,7 @@ mod test {
     }
 
     #[test] fn parse_arg () {
-        let parser = string_to_parser(@~"b : int");
+        let parser = string_to_parser(@"b : int");
         assert_eq!(parser.parse_arg_general(true),
                    ast::arg{
                        is_mutbl: false,
@@ -515,7 +515,7 @@ mod test {
     #[test] fn parse_fundecl () {
         // this test depends on the intern order of "fn" and "int", and on the
         // assignment order of the node_ids.
-        assert_eq!(string_to_item(@~"fn a (b : int) { b; }"),
+        assert_eq!(string_to_item(@"fn a (b : int) { b; }"),
                   Some(
                       @ast::item{ident:str_to_ident("a"),
                             attrs:~[],
@@ -585,12 +585,12 @@ mod test {
 
     #[test] fn parse_exprs () {
         // just make sure that they parse....
-        string_to_expr(@~"3 + 4");
-        string_to_expr(@~"a::z.froob(b,@(987+3))");
+        string_to_expr(@"3 + 4");
+        string_to_expr(@"a::z.froob(b,@(987+3))");
     }
 
     #[test] fn attrs_fix_bug () {
-        string_to_item(@~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
+        string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
                    -> Result<@Writer, ~str> {
     #[cfg(windows)]
     fn wb() -> c_int {
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index 61b7f1403e6..cc7b7fab07e 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -259,7 +259,7 @@ impl Parser {
                                    -> bool {
         match *token {
             token::IDENT(sid, _) => {
-                str::eq_slice(*self.id_to_str(sid), ident)
+                str::eq_slice(self.id_to_str(sid), ident)
             }
             _ => false
         }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 8dd80be4f9c..47c0827eb23 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -96,7 +96,6 @@ use core::iterator::IteratorUtil;
 use core::either::Either;
 use core::either;
 use core::hashmap::HashSet;
-use core::str;
 use core::vec;
 
 #[deriving(Eq)]
@@ -263,7 +262,7 @@ pub struct Parser {
     /// extra detail when the same error is seen twice
     obsolete_set: @mut HashSet<ObsoleteSyntax>,
     /// Used to determine the path to externally loaded source files
-    mod_path_stack: @mut ~[~str],
+    mod_path_stack: @mut ~[@str],
 
 }
 
@@ -333,7 +332,7 @@ impl Parser {
     }
     pub fn get_id(&self) -> node_id { next_node_id(self.sess) }
 
-    pub fn id_to_str(&self, id: ident) -> @~str {
+    pub fn id_to_str(&self, id: ident) -> @str {
         get_ident_interner().get(id.name)
     }
 
@@ -2886,7 +2885,7 @@ impl Parser {
         loop {
             match *self.token {
                 token::LIFETIME(lifetime) => {
-                    if str::eq_slice(*self.id_to_str(lifetime), "static") {
+                    if "static" == self.id_to_str(lifetime) {
                         result.push(RegionTyParamBound);
                     } else {
                         self.span_err(*self.span,
@@ -2898,11 +2897,11 @@ impl Parser {
                     let obsolete_bound = match *self.token {
                         token::MOD_SEP => false,
                         token::IDENT(sid, _) => {
-                            match *self.id_to_str(sid) {
-                                ~"send" |
-                                ~"copy" |
-                                ~"const" |
-                                ~"owned" => {
+                            match self.id_to_str(sid).as_slice() {
+                                "send" |
+                                "copy" |
+                                "const" |
+                                "owned" => {
                                     self.obsolete(
                                         *self.span,
                                         ObsoleteLowerCaseKindBounds);
@@ -3364,7 +3363,7 @@ impl Parser {
             }
             if fields.len() == 0 {
                 self.fatal(fmt!("Unit-like struct should be written as `struct %s;`",
-                                *get_ident_interner().get(class_name.name)));
+                                get_ident_interner().get(class_name.name)));
             }
             self.bump();
         } else if *self.token == token::LPAREN {
@@ -3580,8 +3579,8 @@ impl Parser {
         let file_path = match ::attr::first_attr_value_str_by_name(
             attrs, "path") {
 
-            Some(d) => copy *d,
-            None => copy *default_path
+            Some(d) => d,
+            None => default_path
         };
         self.mod_path_stack.push(file_path)
     }
@@ -3599,13 +3598,13 @@ impl Parser {
         let prefix = prefix.dir_path();
         let mod_path_stack = &*self.mod_path_stack;
         let mod_path = Path(".").push_many(*mod_path_stack);
-        let default_path = *token::interner_get(id.name) + ".rs";
+        let default_path = token::interner_get(id.name).to_owned() + ".rs";
         let file_path = match ::attr::first_attr_value_str_by_name(
             outer_attrs, "path") {
             Some(d) => {
-                let path = Path(copy *d);
+                let path = Path(d);
                 if !path.is_absolute {
-                    mod_path.push(copy *d)
+                    mod_path.push(d)
                 } else {
                     path
                 }
@@ -3637,9 +3636,9 @@ impl Parser {
         let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
         return (ast::item_mod(m0), mod_attrs);
 
-        fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str {
+        fn cdir_path_opt(default: @str, attrs: ~[ast::attribute]) -> @str {
             match ::attr::first_attr_value_str_by_name(attrs, "path") {
-                Some(d) => copy *d,
+                Some(d) => d,
                 None => default
             }
         }
@@ -4263,7 +4262,7 @@ impl Parser {
 
         let first_ident = self.parse_ident();
         let mut path = ~[first_ident];
-        debug!("parsed view_path: %s", *self.id_to_str(first_ident));
+        debug!("parsed view_path: %s", self.id_to_str(first_ident));
         match *self.token {
           token::EQ => {
             // x = foo::bar
@@ -4528,7 +4527,7 @@ impl Parser {
                                config: copy self.cfg })
     }
 
-    pub fn parse_str(&self) -> @~str {
+    pub fn parse_str(&self) -> @str {
         match *self.token {
             token::LIT_STR(s) => {
                 self.bump();
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 91605db77b5..e7bc67340f0 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -21,10 +21,8 @@ use core::cast;
 use core::char;
 use core::cmp::Equiv;
 use core::local_data;
-use core::str;
 use core::rand;
 use core::rand::RngUtil;
-use core::to_bytes;
 
 #[deriving(Encodable, Decodable, Eq)]
 pub enum binop {
@@ -180,28 +178,28 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
       }
       LIT_INT_UNSUFFIXED(i) => { i.to_str() }
       LIT_FLOAT(ref s, t) => {
-        let mut body = copy *ident_to_str(s);
+        let mut body = ident_to_str(s).to_owned();
         if body.ends_with(".") {
             body += "0";  // `10.f` is not a float literal
         }
         body + ast_util::float_ty_to_str(t)
       }
       LIT_FLOAT_UNSUFFIXED(ref s) => {
-        let mut body = copy *ident_to_str(s);
+        let mut body = ident_to_str(s).to_owned();
         if body.ends_with(".") {
             body += "0";  // `10.f` is not a float literal
         }
         body
       }
-      LIT_STR(ref s) => { ~"\"" + ident_to_str(s).escape_default() + "\"" }
+      LIT_STR(ref s) => { fmt!("\"%s\"", ident_to_str(s).escape_default()) }
 
       /* Name components */
-      IDENT(s, _) => copy *in.get(s.name),
-      LIFETIME(s) => fmt!("'%s", *in.get(s.name)),
+      IDENT(s, _) => in.get(s.name).to_owned(),
+      LIFETIME(s) => fmt!("'%s", in.get(s.name)),
       UNDERSCORE => ~"_",
 
       /* Other */
-      DOC_COMMENT(ref s) => copy *ident_to_str(s),
+      DOC_COMMENT(ref s) => ident_to_str(s).to_owned(),
       EOF => ~"<eof>",
       INTERPOLATED(ref nt) => {
         match nt {
@@ -350,20 +348,6 @@ pub mod special_idents {
     pub static type_self: ident = ident { name: 34, ctxt: 0};    // `Self`
 }
 
-pub struct StringRef<'self>(&'self str);
-
-impl<'self> Equiv<@~str> for StringRef<'self> {
-    #[inline(always)]
-    fn equiv(&self, other: &@~str) -> bool { str::eq_slice(**self, **other) }
-}
-
-impl<'self> to_bytes::IterBytes for StringRef<'self> {
-    #[inline(always)]
-    fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool {
-        (**self).iter_bytes(lsb0, f)
-    }
-}
-
 /**
  * Maps a token to a record specifying the corresponding binary
  * operator
@@ -403,14 +387,14 @@ impl ident_interner {
     pub fn gensym(&self, val: &str) -> Name {
         self.interner.gensym(val)
     }
-    pub fn get(&self, idx: Name) -> @~str {
+    pub fn get(&self, idx: Name) -> @str {
         self.interner.get(idx)
     }
     // is this really something that should be exposed?
     pub fn len(&self) -> uint {
         self.interner.len()
     }
-    pub fn find_equiv<Q:Hash + IterBytes + Equiv<@~str>>(&self, val: &Q)
+    pub fn find_equiv<Q:Hash + IterBytes + Equiv<@str>>(&self, val: &Q)
                                                      -> Option<Name> {
         self.interner.find_equiv(val)
     }
@@ -542,12 +526,12 @@ pub fn gensym(str : &str) -> Name {
 }
 
 // map an interned representation back to a string
-pub fn interner_get(name : Name) -> @~str {
+pub fn interner_get(name : Name) -> @str {
     get_ident_interner().get(name)
 }
 
 // maps an identifier to the string that it corresponds to
-pub fn ident_to_str(id : &ast::ident) -> @~str {
+pub fn ident_to_str(id : &ast::ident) -> @str {
     interner_get(id.name)
 }
 
@@ -715,6 +699,6 @@ mod test {
     #[test] fn t1() {
         let a = fresh_name("ghi");
         io::println(fmt!("interned name: %u,\ntextual name: %s\n",
-                         a,*interner_get(a)));
+                         a,interner_get(a)));
     }
 }