about summary refs log tree commit diff
path: root/src/comp/syntax/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/comp/syntax/parse')
-rw-r--r--src/comp/syntax/parse/eval.rs41
-rw-r--r--src/comp/syntax/parse/lexer.rs139
-rw-r--r--src/comp/syntax/parse/parser.rs473
-rw-r--r--src/comp/syntax/parse/token.rs122
4 files changed, 357 insertions, 418 deletions
diff --git a/src/comp/syntax/parse/eval.rs b/src/comp/syntax/parse/eval.rs
index ee7c5a3d338..6535ecfe586 100644
--- a/src/comp/syntax/parse/eval.rs
+++ b/src/comp/syntax/parse/eval.rs
@@ -19,15 +19,14 @@ tag eval_mode { mode_depend; mode_parse; }
 type ctx =
     @{p: parser,
       mode: eval_mode,
-      mutable deps: [istr],
+      mutable deps: [str],
       sess: parser::parse_sess,
       mutable chpos: uint,
       mutable byte_pos: uint,
       cfg: ast::crate_cfg};
 
 fn eval_crate_directives(cx: ctx, cdirs: &[@ast::crate_directive],
-                         prefix: &istr,
-                         view_items: &mutable [@ast::view_item],
+                         prefix: &str, view_items: &mutable [@ast::view_item],
                          items: &mutable [@ast::item]) {
     for sub_cdir: @ast::crate_directive in cdirs {
         eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
@@ -35,34 +34,27 @@ fn eval_crate_directives(cx: ctx, cdirs: &[@ast::crate_directive],
 }
 
 fn eval_crate_directives_to_mod(cx: ctx, cdirs: &[@ast::crate_directive],
-                                prefix: &istr) -> ast::_mod {
+                                prefix: &str) -> ast::_mod {
     let view_items: [@ast::view_item] = [];
     let items: [@ast::item] = [];
     eval_crate_directives(cx, cdirs, prefix, view_items, items);
     ret {view_items: view_items, items: items};
 }
 
-fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &istr,
+fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &str,
                         view_items: &mutable [@ast::view_item],
                         items: &mutable [@ast::item]) {
     alt cdir.node {
       ast::cdir_src_mod(id, file_opt, attrs) {
-        let file_path = id + ~".rs";
-        alt file_opt {
-          some(f) {
-            file_path = f;
-          }
-          none. { }
-        }
-        let full_path = if std::fs::path_is_absolute(file_path) {
-            file_path
-        } else {
-            prefix + std::fs::path_sep() + file_path
-        };
+        let file_path = id + ".rs";
+        alt file_opt { some(f) { file_path = f; } none. { } }
+        let full_path =
+            if std::fs::path_is_absolute(file_path) {
+                file_path
+            } else { prefix + std::fs::path_sep() + file_path };
         if cx.mode == mode_depend { cx.deps += [full_path]; ret; }
         let p0 =
-            new_parser_from_file(cx.sess, cx.cfg,
-                                 full_path, cx.chpos,
+            new_parser_from_file(cx.sess, cx.cfg, full_path, cx.chpos,
                                  cx.byte_pos, SOURCE_FILE);
         let inner_attrs = parse_inner_attrs_and_next(p0);
         let mod_attrs = attrs + inner_attrs.inner;
@@ -79,18 +71,11 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &istr,
       }
       ast::cdir_dir_mod(id, dir_opt, cdirs, attrs) {
         let path = id;
-        alt dir_opt {
-          some(d) {
-            path = d;
-          }
-          none. { }
-        }
+        alt dir_opt { some(d) { path = d; } none. { } }
         let full_path =
             if std::fs::path_is_absolute(path) {
                 path
-            } else {
-            prefix + std::fs::path_sep() + path
-        };
+            } else { prefix + std::fs::path_sep() + path };
         let m0 = eval_crate_directives_to_mod(cx, cdirs, full_path);
         let i =
             @{ident: id,
diff --git a/src/comp/syntax/parse/lexer.rs b/src/comp/syntax/parse/lexer.rs
index e479377f3d7..f0d5bfeb729 100644
--- a/src/comp/syntax/parse/lexer.rs
+++ b/src/comp/syntax/parse/lexer.rs
@@ -19,29 +19,29 @@ type reader =
         fn next() -> char;
         fn init();
         fn bump();
-        fn get_str_from(uint) -> istr;
-        fn get_interner() -> @interner::interner<istr>;
+        fn get_str_from(uint) -> str;
+        fn get_interner() -> @interner::interner<str>;
         fn get_chpos() -> uint;
         fn get_byte_pos() -> uint;
         fn get_col() -> uint;
         fn get_filemap() -> codemap::filemap;
-        fn err(&istr);
+        fn err(&str);
     };
 
-fn new_reader(cm: &codemap::codemap, src: &istr, filemap: codemap::filemap,
-              itr: @interner::interner<istr>) -> reader {
+fn new_reader(cm: &codemap::codemap, src: &str, filemap: codemap::filemap,
+              itr: @interner::interner<str>) -> reader {
     obj reader(cm: codemap::codemap,
-               src: istr,
+               src: str,
                len: uint,
                mutable col: uint,
                mutable pos: uint,
                mutable ch: char,
                mutable chpos: uint,
-               mutable strs: [istr],
+               mutable strs: [str],
                fm: codemap::filemap,
-               itr: @interner::interner<istr>) {
+               itr: @interner::interner<str>) {
         fn is_eof() -> bool { ret ch == -1 as char; }
-        fn get_str_from(start: uint) -> istr {
+        fn get_str_from(start: uint) -> str {
             // I'm pretty skeptical about this subtraction. What if there's a
             // multi-byte character before the mark?
             ret str::slice(src, start - 1u, pos - 1u);
@@ -74,16 +74,14 @@ fn new_reader(cm: &codemap::codemap, src: &istr, filemap: codemap::filemap,
                 ch = next.ch;
             } else { ch = -1 as char; }
         }
-        fn get_interner() -> @interner::interner<istr> { ret itr; }
+        fn get_interner() -> @interner::interner<str> { ret itr; }
         fn get_col() -> uint { ret col; }
         fn get_filemap() -> codemap::filemap { ret fm; }
-        fn err(m: &istr) {
-            codemap::emit_error(
-                some(ast_util::mk_sp(chpos, chpos)),
-                m, cm);
+        fn err(m: &str) {
+            codemap::emit_error(some(ast_util::mk_sp(chpos, chpos)), m, cm);
         }
     }
-    let strs: [istr] = [];
+    let strs: [str] = [];
     let rd =
         reader(cm, src, str::byte_len(src), 0u, 0u, -1 as char,
                filemap.start_pos.ch, strs, filemap, itr);
@@ -148,9 +146,7 @@ fn consume_any_line_comment(rdr: &reader) {
 fn consume_block_comment(rdr: &reader) {
     let level: int = 1;
     while level > 0 {
-        if rdr.is_eof() {
-            rdr.err(~"unterminated block comment"); fail;
-        }
+        if rdr.is_eof() { rdr.err("unterminated block comment"); fail; }
         if rdr.curr() == '/' && rdr.next() == '*' {
             rdr.bump();
             rdr.bump();
@@ -168,15 +164,15 @@ fn consume_block_comment(rdr: &reader) {
     be consume_whitespace_and_comments(rdr);
 }
 
-fn digits_to_string(s: &istr) -> int {
+fn digits_to_string(s: &str) -> int {
     let accum_int: int = 0;
     for c: u8 in s { accum_int *= 10; accum_int += dec_digit_val(c as char); }
     ret accum_int;
 }
 
-fn scan_exponent(rdr: &reader) -> option::t<istr> {
+fn scan_exponent(rdr: &reader) -> option::t<str> {
     let c = rdr.curr();
-    let rslt = ~"";
+    let rslt = "";
     if c == 'e' || c == 'E' {
         rslt += str::unsafe_from_bytes([c as u8]);
         rdr.bump();
@@ -188,13 +184,13 @@ fn scan_exponent(rdr: &reader) -> option::t<istr> {
         let exponent = scan_dec_digits(rdr);
         if str::byte_len(exponent) > 0u {
             ret some(rslt + exponent);
-        } else { rdr.err(~"scan_exponent: bad fp literal"); fail; }
-    } else { ret none::<istr>; }
+        } else { rdr.err("scan_exponent: bad fp literal"); fail; }
+    } else { ret none::<str>; }
 }
 
-fn scan_dec_digits(rdr: &reader) -> istr {
+fn scan_dec_digits(rdr: &reader) -> str {
     let c = rdr.curr();
-    let rslt: istr = ~"";
+    let rslt: str = "";
     while is_dec_digit(c) || c == '_' {
         if c != '_' { rslt += str::unsafe_from_bytes([c as u8]); }
         rdr.bump();
@@ -205,7 +201,7 @@ fn scan_dec_digits(rdr: &reader) -> istr {
 
 fn scan_number(c: char, rdr: &reader) -> token::token {
     let accum_int = 0;
-    let dec_str: istr = ~"";
+    let dec_str: str = "";
     let is_dec_integer: bool = false;
     let n = rdr.next();
     if c == '0' && n == 'x' {
@@ -276,7 +272,7 @@ fn scan_number(c: char, rdr: &reader) -> token::token {
 
         rdr.bump();
         let dec_part = scan_dec_digits(rdr);
-        let float_str = dec_str + ~"." + dec_part;
+        let float_str = dec_str + "." + dec_part;
         c = rdr.curr();
         let exponent_str = scan_exponent(rdr);
         alt exponent_str { some(s) { float_str += s; } none. { } }
@@ -302,17 +298,15 @@ fn scan_number(c: char, rdr: &reader) -> token::token {
 
             }
         } else {
-            ret token::LIT_FLOAT(interner::intern::<istr>(
-                *rdr.get_interner(),
-                float_str));
+            ret token::LIT_FLOAT(interner::intern::<str>(*rdr.get_interner(),
+                                                         float_str));
         }
     }
     let maybe_exponent = scan_exponent(rdr);
     alt maybe_exponent {
       some(s) {
-        ret token::LIT_FLOAT(interner::intern::<istr>(
-            *rdr.get_interner(),
-            dec_str + s));
+        ret token::LIT_FLOAT(interner::intern::<str>(*rdr.get_interner(),
+                                                     dec_str + s));
       }
       none. { ret token::LIT_INT(accum_int); }
     }
@@ -324,8 +318,7 @@ fn scan_numeric_escape(rdr: &reader, n_hex_digits: uint) -> char {
         let n = rdr.curr();
         rdr.bump();
         if !is_hex_digit(n) {
-            rdr.err(
-                    #fmt["illegal numeric character escape: %d", n as int]);
+            rdr.err(#fmt["illegal numeric character escape: %d", n as int]);
             fail;
         }
         accum_int *= 16;
@@ -344,7 +337,7 @@ fn next_token(rdr: &reader) -> {tok: token::token, chpos: uint, bpos: uint} {
 }
 
 fn next_token_inner(rdr: &reader) -> token::token {
-    let accum_str = ~"";
+    let accum_str = "";
     let c = rdr.curr();
     if is_alpha(c) || c == '_' {
         while is_alnum(c) || c == '_' {
@@ -352,11 +345,10 @@ fn next_token_inner(rdr: &reader) -> token::token {
             rdr.bump();
             c = rdr.curr();
         }
-        if str::eq(accum_str, ~"_") { ret token::UNDERSCORE; }
+        if str::eq(accum_str, "_") { ret token::UNDERSCORE; }
         let is_mod_name = c == ':' && rdr.next() == ':';
-        ret token::IDENT(interner::intern::<istr>(
-            *rdr.get_interner(),
-            accum_str), is_mod_name);
+        ret token::IDENT(interner::intern::<str>(*rdr.get_interner(),
+                                                 accum_str), is_mod_name);
     }
     if is_dec_digit(c) { ret scan_number(c, rdr); }
     fn binop(rdr: &reader, op: token::binop) -> token::token {
@@ -369,6 +361,7 @@ fn next_token_inner(rdr: &reader) -> token::token {
     alt c {
 
 
+
       // One-byte tokens.
       '?' {
         rdr.bump();
@@ -408,6 +401,7 @@ fn next_token_inner(rdr: &reader) -> token::token {
       }
 
 
+
       // Multi-byte tokens.
       '=' {
         rdr.bump();
@@ -468,15 +462,13 @@ fn next_token_inner(rdr: &reader) -> token::token {
               'u' { c2 = scan_numeric_escape(rdr, 4u); }
               'U' { c2 = scan_numeric_escape(rdr, 8u); }
               c2 {
-                rdr.err(
-                    #fmt["unknown character escape: %d",
-                                         c2 as int]);
+                rdr.err(#fmt["unknown character escape: %d", c2 as int]);
                 fail;
               }
             }
         }
         if rdr.curr() != '\'' {
-            rdr.err(~"unterminated character constant");
+            rdr.err("unterminated character constant");
             fail;
         }
         rdr.bump(); // advance curr past token
@@ -509,9 +501,7 @@ fn next_token_inner(rdr: &reader) -> token::token {
                     str::push_char(accum_str, scan_numeric_escape(rdr, 8u));
                   }
                   c2 {
-                    rdr.err(
-                        #fmt["unknown string escape: %d",
-                                             c2 as int]);
+                    rdr.err(#fmt["unknown string escape: %d", c2 as int]);
                     fail;
                   }
                 }
@@ -520,9 +510,8 @@ fn next_token_inner(rdr: &reader) -> token::token {
             }
         }
         rdr.bump();
-        ret token::LIT_STR(interner::intern::<istr>(
-            *rdr.get_interner(),
-            accum_str));
+        ret token::LIT_STR(interner::intern::<str>(*rdr.get_interner(),
+                                                   accum_str));
       }
       '-' {
         if rdr.next() == '>' {
@@ -549,11 +538,7 @@ fn next_token_inner(rdr: &reader) -> token::token {
       '/' { ret binop(rdr, token::SLASH); }
       '^' { ret binop(rdr, token::CARET); }
       '%' { ret binop(rdr, token::PERCENT); }
-      c {
-        rdr.err(
-            #fmt["unkown start of token: %d", c as int]);
-        fail;
-      }
+      c { rdr.err(#fmt["unkown start of token: %d", c as int]); fail; }
     }
 }
 
@@ -564,10 +549,10 @@ tag cmnt_style {
     blank_line; // Just a manual blank line "\n\n", for layout
 }
 
-type cmnt = {style: cmnt_style, lines: [istr], pos: uint};
+type cmnt = {style: cmnt_style, lines: [str], pos: uint};
 
-fn read_to_eol(rdr: &reader) -> istr {
-    let val = ~"";
+fn read_to_eol(rdr: &reader) -> str {
+    let val = "";
     while rdr.curr() != '\n' && !rdr.is_eof() {
         str::push_char(val, rdr.curr());
         rdr.bump();
@@ -576,7 +561,7 @@ fn read_to_eol(rdr: &reader) -> istr {
     ret val;
 }
 
-fn read_one_line_comment(rdr: &reader) -> istr {
+fn read_one_line_comment(rdr: &reader) -> str {
     let val = read_to_eol(rdr);
     assert (val[0] == '/' as u8 && val[1] == '/' as u8);
     ret val;
@@ -594,7 +579,7 @@ fn consume_non_eol_whitespace(rdr: &reader) {
 
 fn push_blank_line_comment(rdr: &reader, comments: &mutable [cmnt]) {
     log ">>> blank-line comment";
-    let v: [istr] = [];
+    let v: [str] = [];
     comments += [{style: blank_line, lines: v, pos: rdr.get_chpos()}];
 }
 
@@ -611,7 +596,7 @@ fn consume_whitespace_counting_blank_lines(rdr: &reader,
 fn read_line_comments(rdr: &reader, code_to_the_left: bool) -> cmnt {
     log ">>> line comments";
     let p = rdr.get_chpos();
-    let lines: [istr] = [];
+    let lines: [str] = [];
     while rdr.curr() == '/' && rdr.next() == '/' {
         let line = read_one_line_comment(rdr);
         log line;
@@ -624,52 +609,52 @@ fn read_line_comments(rdr: &reader, code_to_the_left: bool) -> cmnt {
          pos: p};
 }
 
-fn all_whitespace(s: &istr, begin: uint, end: uint) -> bool {
+fn all_whitespace(s: &str, begin: uint, end: uint) -> bool {
     let i: uint = begin;
     while i != end { if !is_whitespace(s[i] as char) { ret false; } i += 1u; }
     ret true;
 }
 
-fn trim_whitespace_prefix_and_push_line(lines: &mutable [istr], s: &istr,
+fn trim_whitespace_prefix_and_push_line(lines: &mutable [str], s: &str,
                                         col: uint) {
     let s1;
     if all_whitespace(s, 0u, col) {
         if col < str::byte_len(s) {
             s1 = str::slice(s, col, str::byte_len(s));
-        } else { s1 = ~""; }
+        } else { s1 = ""; }
     } else { s1 = s; }
-    log ~"pushing line: " + s1;
+    log "pushing line: " + s1;
     lines += [s1];
 }
 
 fn read_block_comment(rdr: &reader, code_to_the_left: bool) -> cmnt {
     log ">>> block comment";
     let p = rdr.get_chpos();
-    let lines: [istr] = [];
+    let lines: [str] = [];
     let col: uint = rdr.get_col();
     rdr.bump();
     rdr.bump();
-    let curr_line = ~"/*";
+    let curr_line = "/*";
     let level: int = 1;
     while level > 0 {
         log #fmt["=== block comment level %d", level];
-        if rdr.is_eof() { rdr.err(~"unterminated block comment"); fail; }
+        if rdr.is_eof() { rdr.err("unterminated block comment"); fail; }
         if rdr.curr() == '\n' {
             trim_whitespace_prefix_and_push_line(lines, curr_line, col);
-            curr_line = ~"";
+            curr_line = "";
             rdr.bump();
         } else {
             str::push_char(curr_line, rdr.curr());
             if rdr.curr() == '/' && rdr.next() == '*' {
                 rdr.bump();
                 rdr.bump();
-                curr_line += ~"*";
+                curr_line += "*";
                 level += 1;
             } else {
                 if rdr.curr() == '*' && rdr.next() == '/' {
                     rdr.bump();
                     rdr.bump();
-                    curr_line += ~"/";
+                    curr_line += "/";
                     level -= 1;
                 } else { rdr.bump(); }
             }
@@ -717,16 +702,14 @@ fn is_lit(t: &token::token) -> bool {
         }
 }
 
-type lit = {lit: istr, pos: uint};
+type lit = {lit: str, pos: uint};
 
-fn gather_comments_and_literals(cm: &codemap::codemap, path: &istr,
+fn gather_comments_and_literals(cm: &codemap::codemap, path: &str,
                                 srdr: io::reader) ->
    {cmnts: [cmnt], lits: [lit]} {
     let src = str::unsafe_from_bytes(srdr.read_whole_stream());
-    let itr = @interner::mk::<istr>(str::hash, str::eq);
-    let rdr = new_reader(cm, src,
-                         codemap::new_filemap(
-                             path, 0u, 0u), itr);
+    let itr = @interner::mk::<str>(str::hash, str::eq);
+    let rdr = new_reader(cm, src, codemap::new_filemap(path, 0u, 0u), itr);
     let comments: [cmnt] = [];
     let literals: [lit] = [];
     let first_read: bool = true;
@@ -748,7 +731,7 @@ fn gather_comments_and_literals(cm: &codemap::codemap, path: &istr,
         if is_lit(tok.tok) {
             literals += [{lit: rdr.get_str_from(tok.bpos), pos: tok.chpos}];
         }
-        log ~"tok: " + token::to_str(rdr, tok.tok);
+        log "tok: " + token::to_str(rdr, tok.tok);
         first_read = false;
     }
     ret {cmnts: comments, lits: literals};
diff --git a/src/comp/syntax/parse/parser.rs b/src/comp/syntax/parse/parser.rs
index 7be1d78d730..6c40115ecda 100644
--- a/src/comp/syntax/parse/parser.rs
+++ b/src/comp/syntax/parse/parser.rs
@@ -37,8 +37,8 @@ type parser =
         fn bump();
         fn swap(token::token, uint, uint);
         fn look_ahead(uint) -> token::token;
-        fn fatal(&istr) -> ! ;
-        fn warn(&istr);
+        fn fatal(&str) -> ! ;
+        fn warn(&str);
         fn restrict(restriction);
         fn get_restriction() -> restriction;
         fn get_file_type() -> file_type;
@@ -49,22 +49,21 @@ type parser =
         fn get_last_lo_pos() -> uint;
         fn get_last_hi_pos() -> uint;
         fn get_prec_table() -> @[op_spec];
-        fn get_str(token::str_num) -> istr;
+        fn get_str(token::str_num) -> str;
         fn get_reader() -> lexer::reader;
         fn get_filemap() -> codemap::filemap;
-        fn get_bad_expr_words() -> hashmap<istr, ()>;
+        fn get_bad_expr_words() -> hashmap<str, ()>;
         fn get_chpos() -> uint;
         fn get_byte_pos() -> uint;
         fn get_id() -> node_id;
         fn get_sess() -> parse_sess;
     };
 
-fn new_parser_from_file(sess: parse_sess, cfg: &ast::crate_cfg, path: &istr,
+fn new_parser_from_file(sess: parse_sess, cfg: &ast::crate_cfg, path: &str,
                         chpos: uint, byte_pos: uint, ftype: file_type) ->
    parser {
     let src = io::read_whole_file_str(path);
-    let filemap = codemap::new_filemap(
-        path, chpos, byte_pos);
+    let filemap = codemap::new_filemap(path, chpos, byte_pos);
     sess.cm.files += [filemap];
     let itr = @interner::mk(str::hash, str::eq);
     let rdr = lexer::new_reader(sess.cm, src, filemap, itr);
@@ -83,7 +82,7 @@ fn new_parser(sess: parse_sess, cfg: &ast::crate_cfg, rdr: lexer::reader,
                      mutable restr: restriction,
                      rdr: lexer::reader,
                      precs: @[op_spec],
-                     bad_words: hashmap<istr, ()>) {
+                     bad_words: hashmap<str, ()>) {
         fn peek() -> token::token { ret tok; }
         fn bump() {
             last_tok_span = tok_span;
@@ -109,14 +108,12 @@ fn new_parser(sess: parse_sess, cfg: &ast::crate_cfg, rdr: lexer::reader,
             }
             ret buffer[distance - 1u].tok;
         }
-        fn fatal(m: &istr) -> ! {
-            codemap::emit_error(some(self.get_span()),
-                                m, sess.cm);
+        fn fatal(m: &str) -> ! {
+            codemap::emit_error(some(self.get_span()), m, sess.cm);
             fail;
         }
-        fn warn(m: &istr) {
-            codemap::emit_warning(some(self.get_span()),
-                                  m, sess.cm);
+        fn warn(m: &str) {
+            codemap::emit_warning(some(self.get_span()), m, sess.cm);
         }
         fn restrict(r: restriction) { restr = r; }
         fn get_restriction() -> restriction { ret restr; }
@@ -128,12 +125,12 @@ fn new_parser(sess: parse_sess, cfg: &ast::crate_cfg, rdr: lexer::reader,
         fn get_file_type() -> file_type { ret ftype; }
         fn get_cfg() -> ast::crate_cfg { ret cfg; }
         fn get_prec_table() -> @[op_spec] { ret precs; }
-        fn get_str(i: token::str_num) -> istr {
+        fn get_str(i: token::str_num) -> str {
             ret interner::get(*rdr.get_interner(), i);
         }
         fn get_reader() -> lexer::reader { ret rdr; }
         fn get_filemap() -> codemap::filemap { ret rdr.get_filemap(); }
-        fn get_bad_expr_words() -> hashmap<istr, ()> { ret bad_words; }
+        fn get_bad_expr_words() -> hashmap<str, ()> { ret bad_words; }
         fn get_chpos() -> uint { ret rdr.get_chpos(); }
         fn get_byte_pos() -> uint { ret rdr.get_byte_pos(); }
         fn get_id() -> node_id { ret next_node_id(sess); }
@@ -148,48 +145,48 @@ fn new_parser(sess: parse_sess, cfg: &ast::crate_cfg, rdr: lexer::reader,
 // These are the words that shouldn't be allowed as value identifiers,
 // because, if used at the start of a line, they will cause the line to be
 // interpreted as a specific kind of statement, which would be confusing.
-fn bad_expr_word_table() -> hashmap<istr, ()> {
+fn bad_expr_word_table() -> hashmap<str, ()> {
     let words = new_str_hash();
-    words.insert(~"mod", ());
-    words.insert(~"if", ());
-    words.insert(~"else", ());
-    words.insert(~"while", ());
-    words.insert(~"do", ());
-    words.insert(~"alt", ());
-    words.insert(~"for", ());
-    words.insert(~"each", ());
-    words.insert(~"break", ());
-    words.insert(~"cont", ());
-    words.insert(~"put", ());
-    words.insert(~"ret", ());
-    words.insert(~"be", ());
-    words.insert(~"fail", ());
-    words.insert(~"type", ());
-    words.insert(~"resource", ());
-    words.insert(~"check", ());
-    words.insert(~"assert", ());
-    words.insert(~"claim", ());
-    words.insert(~"prove", ());
-    words.insert(~"native", ());
-    words.insert(~"fn", ());
-    words.insert(~"lambda", ());
-    words.insert(~"pure", ());
-    words.insert(~"iter", ());
-    words.insert(~"block", ());
-    words.insert(~"import", ());
-    words.insert(~"export", ());
-    words.insert(~"let", ());
-    words.insert(~"const", ());
-    words.insert(~"log", ());
-    words.insert(~"log_err", ());
-    words.insert(~"tag", ());
-    words.insert(~"obj", ());
-    words.insert(~"copy", ());
+    words.insert("mod", ());
+    words.insert("if", ());
+    words.insert("else", ());
+    words.insert("while", ());
+    words.insert("do", ());
+    words.insert("alt", ());
+    words.insert("for", ());
+    words.insert("each", ());
+    words.insert("break", ());
+    words.insert("cont", ());
+    words.insert("put", ());
+    words.insert("ret", ());
+    words.insert("be", ());
+    words.insert("fail", ());
+    words.insert("type", ());
+    words.insert("resource", ());
+    words.insert("check", ());
+    words.insert("assert", ());
+    words.insert("claim", ());
+    words.insert("prove", ());
+    words.insert("native", ());
+    words.insert("fn", ());
+    words.insert("lambda", ());
+    words.insert("pure", ());
+    words.insert("iter", ());
+    words.insert("block", ());
+    words.insert("import", ());
+    words.insert("export", ());
+    words.insert("let", ());
+    words.insert("const", ());
+    words.insert("log", ());
+    words.insert("log_err", ());
+    words.insert("tag", ());
+    words.insert("obj", ());
+    words.insert("copy", ());
     ret words;
 }
 
 fn unexpected(p: &parser, t: token::token) -> ! {
-    let s: istr = ~"unexpected token: ";
+    let s: str = "unexpected token: ";
     s += token::to_str(p.get_reader(), t);
     p.fatal(s);
 }
@@ -198,9 +195,9 @@ fn expect(p: &parser, t: token::token) {
     if p.peek() == t {
         p.bump();
     } else {
-        let s: istr = ~"expecting ";
+        let s: str = "expecting ";
         s += token::to_str(p.get_reader(), t);
-        s += ~", found ";
+        s += ", found ";
         s += token::to_str(p.get_reader(), p.peek());
         p.fatal(s);
     }
@@ -214,9 +211,9 @@ fn expect_gt(p: &parser) {
     } else if p.peek() == token::BINOP(token::ASR) {
         p.swap(token::BINOP(token::LSR), p.get_lo_pos() + 1u, p.get_hi_pos());
     } else {
-        let s: istr = ~"expecting ";
+        let s: str = "expecting ";
         s += token::to_str(p.get_reader(), token::GT);
-        s += ~", found ";
+        s += ", found ";
         s += token::to_str(p.get_reader(), p.peek());
         p.fatal(s);
     }
@@ -228,11 +225,8 @@ fn spanned<@T>(lo: uint, hi: uint, node: &T) -> spanned<T> {
 
 fn parse_ident(p: &parser) -> ast::ident {
     alt p.peek() {
-      token::IDENT(i, _) {
-        p.bump();
-        ret p.get_str(i);
-      }
-      _ { p.fatal(~"expecting ident"); }
+      token::IDENT(i, _) { p.bump(); ret p.get_str(i); }
+      _ { p.fatal("expecting ident"); }
     }
 }
 
@@ -245,14 +239,14 @@ fn eat(p: &parser, tok: &token::token) -> bool {
     ret if p.peek() == tok { p.bump(); true } else { false };
 }
 
-fn is_word(p: &parser, word: &istr) -> bool {
+fn is_word(p: &parser, word: &str) -> bool {
     ret alt p.peek() {
           token::IDENT(sid, false) { str::eq(word, p.get_str(sid)) }
           _ { false }
         };
 }
 
-fn eat_word(p: &parser, word: &istr) -> bool {
+fn eat_word(p: &parser, word: &str) -> bool {
     alt p.peek() {
       token::IDENT(sid, false) {
         if str::eq(word, p.get_str(sid)) {
@@ -264,10 +258,10 @@ fn eat_word(p: &parser, word: &istr) -> bool {
     }
 }
 
-fn expect_word(p: &parser, word: &istr) {
+fn expect_word(p: &parser, word: &str) {
     if !eat_word(p, word) {
-        p.fatal(~"expecting " + word + ~", found " +
-                token::to_str(p.get_reader(), p.peek()));
+        p.fatal("expecting " + word + ", found " +
+                    token::to_str(p.get_reader(), p.peek()));
     }
 }
 
@@ -276,7 +270,7 @@ fn check_bad_word(p: &parser) {
       token::IDENT(sid, false) {
         let w = p.get_str(sid);
         if p.get_bad_expr_words().contains_key(w) {
-            p.fatal(~"found " + w + ~" in expression position");
+            p.fatal("found " + w + " in expression position");
         }
       }
       _ { }
@@ -294,7 +288,7 @@ fn parse_ty_fn(proto: ast::proto, p: &parser) -> ast::ty_ {
         let mode = ast::val;
         if p.peek() == token::BINOP(token::AND) {
             p.bump();
-            mode = ast::alias(eat_word(p, ~"mutable"));
+            mode = ast::alias(eat_word(p, "mutable"));
         }
         let t = parse_ty(p, false);
         ret spanned(lo, t.span.hi, {mode: mode, ty: t});
@@ -323,11 +317,11 @@ fn parse_ty_fn(proto: ast::proto, p: &parser) -> ast::ty_ {
 }
 
 fn parse_proto(p: &parser) -> ast::proto {
-    if eat_word(p, ~"iter") {
+    if eat_word(p, "iter") {
         ret ast::proto_iter;
-    } else if eat_word(p, ~"fn") {
+    } else if eat_word(p, "fn") {
         ret ast::proto_fn;
-    } else if eat_word(p, ~"block") {
+    } else if eat_word(p, "block") {
         ret ast::proto_block;
     } else { unexpected(p, p.peek()); }
 }
@@ -377,8 +371,7 @@ fn parse_ty_field(p: &parser) -> ast::ty_field {
 fn ident_index(p: &parser, args: &[ast::arg], i: &ast::ident) -> uint {
     let j = 0u;
     for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; }
-    p.fatal(~"Unbound variable " +
-            i + ~" in constraint arg");
+    p.fatal("Unbound variable " + i + " in constraint arg");
 }
 
 fn parse_type_constr_arg(p: &parser) -> @ast::ty_constr_arg {
@@ -467,7 +460,7 @@ fn parse_ty_postfix(orig_t: ast::ty_, p: &parser, colons_before_params: bool)
                                            idents: pth.node.idents,
                                            types: seq}), ann));
       }
-      _ { p.fatal(~"type parameter instantiation only allowed for paths"); }
+      _ { p.fatal("type parameter instantiation only allowed for paths"); }
     }
 }
 
@@ -484,43 +477,43 @@ fn parse_ty(p: &parser, colons_before_params: bool) -> @ast::ty {
     let t: ast::ty_;
     // FIXME: do something with this
 
-    if eat_word(p, ~"bool") {
+    if eat_word(p, "bool") {
         t = ast::ty_bool;
-    } else if eat_word(p, ~"int") {
+    } else if eat_word(p, "int") {
         t = ast::ty_int;
-    } else if eat_word(p, ~"uint") {
+    } else if eat_word(p, "uint") {
         t = ast::ty_uint;
-    } else if eat_word(p, ~"float") {
+    } else if eat_word(p, "float") {
         t = ast::ty_float;
-    } else if eat_word(p, ~"str") {
+    } else if eat_word(p, "str") {
         t = ast::ty_istr;
-    } else if eat_word(p, ~"istr") {
+    } else if eat_word(p, "istr") {
         t = ast::ty_istr;
-    } else if eat_word(p, ~"char") {
+    } else if eat_word(p, "char") {
         t = ast::ty_char;
         /*
             } else if (eat_word(p, "task")) {
                 t = ast::ty_task;
         */
-    } else if eat_word(p, ~"i8") {
+    } else if eat_word(p, "i8") {
         t = ast::ty_machine(ast::ty_i8);
-    } else if eat_word(p, ~"i16") {
+    } else if eat_word(p, "i16") {
         t = ast::ty_machine(ast::ty_i16);
-    } else if eat_word(p, ~"i32") {
+    } else if eat_word(p, "i32") {
         t = ast::ty_machine(ast::ty_i32);
-    } else if eat_word(p, ~"i64") {
+    } else if eat_word(p, "i64") {
         t = ast::ty_machine(ast::ty_i64);
-    } else if eat_word(p, ~"u8") {
+    } else if eat_word(p, "u8") {
         t = ast::ty_machine(ast::ty_u8);
-    } else if eat_word(p, ~"u16") {
+    } else if eat_word(p, "u16") {
         t = ast::ty_machine(ast::ty_u16);
-    } else if eat_word(p, ~"u32") {
+    } else if eat_word(p, "u32") {
         t = ast::ty_machine(ast::ty_u32);
-    } else if eat_word(p, ~"u64") {
+    } else if eat_word(p, "u64") {
         t = ast::ty_machine(ast::ty_u64);
-    } else if eat_word(p, ~"f32") {
+    } else if eat_word(p, "f32") {
         t = ast::ty_machine(ast::ty_f32);
-    } else if eat_word(p, ~"f64") {
+    } else if eat_word(p, "f64") {
         t = ast::ty_machine(ast::ty_f64);
     } else if p.peek() == token::LPAREN {
         p.bump();
@@ -567,19 +560,19 @@ fn parse_ty(p: &parser, colons_before_params: bool) -> @ast::ty {
         t = ast::ty_vec(parse_mt(p));
         hi = p.get_hi_pos();
         expect(p, token::RBRACKET);
-    } else if eat_word(p, ~"fn") {
+    } else if eat_word(p, "fn") {
         t = parse_ty_fn(ast::proto_fn, p);
         alt t { ast::ty_fn(_, _, out, _, _) { hi = out.span.hi; } }
-    } else if eat_word(p, ~"block") {
+    } else if eat_word(p, "block") {
         t = parse_ty_fn(ast::proto_block, p);
         alt t { ast::ty_fn(_, _, out, _, _) { hi = out.span.hi; } }
-    } else if eat_word(p, ~"iter") {
+    } else if eat_word(p, "iter") {
         t = parse_ty_fn(ast::proto_iter, p);
         alt t { ast::ty_fn(_, _, out, _, _) { hi = out.span.hi; } }
-    } else if eat_word(p, ~"obj") {
+    } else if eat_word(p, "obj") {
         t = parse_ty_obj(p, hi);
-    } else if eat_word(p, ~"mutable") {
-        p.warn(~"ignoring deprecated 'mutable' type constructor");
+    } else if eat_word(p, "mutable") {
+        p.warn("ignoring deprecated 'mutable' type constructor");
         let typ = parse_ty(p, false);
         t = typ.node;
         hi = typ.span.hi;
@@ -587,13 +580,13 @@ fn parse_ty(p: &parser, colons_before_params: bool) -> @ast::ty {
         let path = parse_path(p);
         t = ast::ty_path(path, p.get_id());
         hi = path.span.hi;
-    } else { p.fatal(~"expecting type"); }
+    } else { p.fatal("expecting type"); }
     ret parse_ty_postfix(t, p, colons_before_params);
 }
 
 fn parse_arg_mode(p: &parser) -> ast::mode {
     if eat(p, token::BINOP(token::AND)) {
-        ast::alias(eat_word(p, ~"mutable"))
+        ast::alias(eat_word(p, "mutable"))
     } else if eat(p, token::BINOP(token::MINUS)) {
         ast::move
     } else { ast::val }
@@ -685,9 +678,9 @@ fn parse_seq<T>(bra: token::token, ket: token::token,
 fn parse_lit(p: &parser) -> ast::lit {
     let sp = p.get_span();
     let lit: ast::lit_ = ast::lit_nil;
-    if eat_word(p, ~"true") {
+    if eat_word(p, "true") {
         lit = ast::lit_bool(true);
-    } else if eat_word(p, ~"false") {
+    } else if eat_word(p, "false") {
         lit = ast::lit_bool(false);
     } else {
         alt p.peek() {
@@ -706,10 +699,7 @@ fn parse_lit(p: &parser) -> ast::lit {
             lit = ast::lit_mach_float(tm, p.get_str(s));
           }
           token::LIT_CHAR(c) { p.bump(); lit = ast::lit_char(c); }
-          token::LIT_STR(s) {
-            p.bump();
-            lit = ast::lit_str(p.get_str(s));
-          }
+          token::LIT_STR(s) { p.bump(); lit = ast::lit_str(p.get_str(s)); }
           token::LPAREN. {
             p.bump();
             expect(p, token::RPAREN);
@@ -777,7 +767,7 @@ fn parse_path_and_ty_param_substs(p: &parser) -> ast::path {
 }
 
 fn parse_mutability(p: &parser) -> ast::mutability {
-    if eat_word(p, ~"mutable") {
+    if eat_word(p, "mutable") {
         if p.peek() == token::QUES { p.bump(); ret ast::maybe_mut; }
         ret ast::mut;
     }
@@ -825,12 +815,12 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
         } else { ret mk_expr(p, lo, hi, ast::expr_tup(es)); }
     } else if p.peek() == token::LBRACE {
         p.bump();
-        if is_word(p, ~"mutable") ||
+        if is_word(p, "mutable") ||
                is_plain_ident(p) && p.look_ahead(1u) == token::COLON {
             let fields = [parse_field(p, token::COLON)];
             let base = none;
             while p.peek() != token::RBRACE {
-                if eat_word(p, ~"with") { base = some(parse_expr(p)); break; }
+                if eat_word(p, "with") { base = some(parse_expr(p)); break; }
                 expect(p, token::COMMA);
                 fields += [parse_field(p, token::COLON)];
             }
@@ -843,27 +833,27 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
             let blk = parse_block_tail(p, lo, ast::checked);
             ret mk_expr(p, blk.span.lo, blk.span.hi, ast::expr_block(blk));
         }
-    } else if eat_word(p, ~"if") {
+    } else if eat_word(p, "if") {
         ret parse_if_expr(p);
-    } else if eat_word(p, ~"for") {
+    } else if eat_word(p, "for") {
         ret parse_for_expr(p);
-    } else if eat_word(p, ~"while") {
+    } else if eat_word(p, "while") {
         ret parse_while_expr(p);
-    } else if eat_word(p, ~"do") {
+    } else if eat_word(p, "do") {
         ret parse_do_while_expr(p);
-    } else if eat_word(p, ~"alt") {
+    } else if eat_word(p, "alt") {
         ret parse_alt_expr(p);
         /*
             } else if (eat_word(p, "spawn")) {
                 ret parse_spawn_expr(p);
         */
-    } else if eat_word(p, ~"fn") {
+    } else if eat_word(p, "fn") {
         ret parse_fn_expr(p, ast::proto_fn);
-    } else if eat_word(p, ~"block") {
+    } else if eat_word(p, "block") {
         ret parse_fn_expr(p, ast::proto_block);
-    } else if eat_word(p, ~"lambda") {
+    } else if eat_word(p, "lambda") {
         ret parse_fn_expr(p, ast::proto_closure);
-    } else if eat_word(p, ~"unchecked") {
+    } else if eat_word(p, "unchecked") {
         expect(p, token::LBRACE);
         let blk = parse_block_tail(p, lo, ast::unchecked);
         ret mk_expr(p, blk.span.lo, blk.span.hi, ast::expr_block(blk));
@@ -894,14 +884,12 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
           token::LIT_STR(s) {
             let sp = p.get_span();
             p.bump();
-            let lit =
-                @{node: ast::lit_str(p.get_str(s)),
-                  span: sp};
+            let lit = @{node: ast::lit_str(p.get_str(s)), span: sp};
             ex = ast::expr_lit(lit);
           }
           _ { ex = ast::expr_uniq(parse_expr(p)); }
         }
-    } else if eat_word(p, ~"obj") {
+    } else if eat_word(p, "obj") {
         // Anonymous object
 
         // Only make people type () if they're actually adding new fields
@@ -916,7 +904,7 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
         let inner_obj: option::t<@ast::expr> = none;
         expect(p, token::LBRACE);
         while p.peek() != token::RBRACE {
-            if eat_word(p, ~"with") {
+            if eat_word(p, "with") {
                 inner_obj = some(parse_expr(p));
             } else { meths += [parse_method(p)]; }
         }
@@ -930,7 +918,7 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
         // "spanned".
         let ob = {fields: fields, methods: meths, inner_obj: inner_obj};
         ex = ast::expr_anon_obj(ob);
-    } else if eat_word(p, ~"bind") {
+    } else if eat_word(p, "bind") {
         let e = parse_expr_res(p, RESTRICT_NO_CALL_EXPRS);
         fn parse_expr_opt(p: &parser) -> option::t<@ast::expr> {
             alt p.peek() {
@@ -947,25 +935,25 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
         let ex_ext = parse_syntax_ext(p);
         hi = ex_ext.span.hi;
         ex = ex_ext.node;
-    } else if eat_word(p, ~"fail") {
+    } else if eat_word(p, "fail") {
         if can_begin_expr(p.peek()) {
             let e = parse_expr(p);
             hi = e.span.hi;
             ex = ast::expr_fail(some(e));
         } else { ex = ast::expr_fail(none); }
-    } else if eat_word(p, ~"log") {
+    } else if eat_word(p, "log") {
         let e = parse_expr(p);
         ex = ast::expr_log(1, e);
         hi = e.span.hi;
-    } else if eat_word(p, ~"log_err") {
+    } else if eat_word(p, "log_err") {
         let e = parse_expr(p);
         ex = ast::expr_log(0, e);
         hi = e.span.hi;
-    } else if eat_word(p, ~"assert") {
+    } else if eat_word(p, "assert") {
         let e = parse_expr(p);
         ex = ast::expr_assert(e);
         hi = e.span.hi;
-    } else if eat_word(p, ~"check") {
+    } else if eat_word(p, "check") {
         /* Should be a predicate (pure boolean function) applied to
            arguments that are all either slot variables or literals.
            but the typechecker enforces that. */
@@ -973,7 +961,7 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
         let e = parse_expr(p);
         hi = e.span.hi;
         ex = ast::expr_check(ast::checked, e);
-    } else if eat_word(p, ~"claim") {
+    } else if eat_word(p, "claim") {
         /* Same rules as check, except that if check-claims
          is enabled (a command-line flag), then the parser turns
         claims into check */
@@ -981,19 +969,19 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
         let e = parse_expr(p);
         hi = e.span.hi;
         ex = ast::expr_check(ast::unchecked, e);
-    } else if eat_word(p, ~"ret") {
+    } else if eat_word(p, "ret") {
         if can_begin_expr(p.peek()) {
             let e = parse_expr(p);
             hi = e.span.hi;
             ex = ast::expr_ret(some(e));
         } else { ex = ast::expr_ret(none); }
-    } else if eat_word(p, ~"break") {
+    } else if eat_word(p, "break") {
         ex = ast::expr_break;
         hi = p.get_hi_pos();
-    } else if eat_word(p, ~"cont") {
+    } else if eat_word(p, "cont") {
         ex = ast::expr_cont;
         hi = p.get_hi_pos();
-    } else if eat_word(p, ~"put") {
+    } else if eat_word(p, "put") {
         alt p.peek() {
           token::SEMI. { ex = ast::expr_put(none); }
           _ {
@@ -1002,19 +990,19 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
             ex = ast::expr_put(some(e));
           }
         }
-    } else if eat_word(p, ~"be") {
+    } else if eat_word(p, "be") {
         let e = parse_expr(p);
 
         // FIXME: Is this the right place for this check?
         if /*check*/ast_util::is_call_expr(e) {
             hi = e.span.hi;
             ex = ast::expr_be(e);
-        } else { p.fatal(~"Non-call expression in tail call"); }
-    } else if eat_word(p, ~"copy") {
+        } else { p.fatal("Non-call expression in tail call"); }
+    } else if eat_word(p, "copy") {
         let e = parse_expr(p);
         ex = ast::expr_copy(e);
         hi = e.span.hi;
-    } else if eat_word(p, ~"self") {
+    } else if eat_word(p, "self") {
         expect(p, token::DOT);
         // The rest is a call expression.
         let f: @ast::expr = parse_self_method(p);
@@ -1024,8 +1012,8 @@ fn parse_bottom_expr(p: &parser) -> @ast::expr {
         hi = es.span.hi;
         ex = ast::expr_call(f, es.node);
     } else if p.peek() == token::MOD_SEP ||
-                  is_ident(p.peek()) && !is_word(p, ~"true") &&
-                      !is_word(p, ~"false") {
+                  is_ident(p.peek()) && !is_word(p, "true") &&
+                      !is_word(p, "false") {
         check_bad_word(p);
         let pth = parse_path_and_ty_param_substs(p);
         hi = pth.span.hi;
@@ -1047,7 +1035,7 @@ fn parse_syntax_ext(p: &parser) -> @ast::expr {
 fn parse_syntax_ext_naked(p: &parser, lo: uint) -> @ast::expr {
     let pth = parse_path(p);
     if vec::len(pth.node.idents) == 0u {
-        p.fatal(~"expected a syntax expander name");
+        p.fatal("expected a syntax expander name");
     }
     //temporary for a backwards-compatible cycle:
     let es =
@@ -1105,9 +1093,7 @@ fn parse_dot_or_call_expr_with(p: &parser, e: @ast::expr) -> @ast::expr {
               token::IDENT(i, _) {
                 hi = p.get_hi_pos();
                 p.bump();
-                e = mk_expr(p, lo, hi,
-                            ast::expr_field(
-                                e, p.get_str(i)));
+                e = mk_expr(p, lo, hi, ast::expr_field(e, p.get_str(i)));
               }
               t { unexpected(p, t); }
             }
@@ -1119,8 +1105,8 @@ fn parse_dot_or_call_expr_with(p: &parser, e: @ast::expr) -> @ast::expr {
 }
 
 fn parse_prefix_expr(p: &parser) -> @ast::expr {
-    if eat_word(p, ~"mutable") {
-        p.warn(~"ignoring deprecated 'mutable' prefix operator");
+    if eat_word(p, "mutable") {
+        p.warn("ignoring deprecated 'mutable' prefix operator");
     }
     let lo = p.get_lo_pos();
     let hi = p.get_hi_pos();
@@ -1223,7 +1209,7 @@ fn parse_more_binops(p: &parser, lhs: @ast::expr, min_prec: int) ->
             ret parse_more_binops(p, bin, min_prec);
         }
     }
-    if as_prec > min_prec && eat_word(p, ~"as") {
+    if as_prec > min_prec && eat_word(p, "as") {
         let rhs = parse_ty(p, true);
         let _as =
             mk_expr(p, lhs.span.lo, rhs.span.hi, ast::expr_cast(lhs, rhs));
@@ -1286,7 +1272,7 @@ fn parse_if_expr_1(p: &parser) ->
     let thn = parse_block(p);
     let els: option::t<@ast::expr> = none;
     let hi = thn.span.hi;
-    if eat_word(p, ~"else") {
+    if eat_word(p, "else") {
         let elexpr = parse_else_expr(p);
         els = some(elexpr);
         hi = elexpr.span.hi;
@@ -1295,7 +1281,7 @@ fn parse_if_expr_1(p: &parser) ->
 }
 
 fn parse_if_expr(p: &parser) -> @ast::expr {
-    if eat_word(p, ~"check") {
+    if eat_word(p, "check") {
         let q = parse_if_expr_1(p);
         ret mk_expr(p, q.lo, q.hi, ast::expr_if_check(q.cond, q.then, q.els));
     } else {
@@ -1321,7 +1307,7 @@ fn parse_fn_block_expr(p: &parser) -> @ast::expr {
 }
 
 fn parse_else_expr(p: &parser) -> @ast::expr {
-    if eat_word(p, ~"if") {
+    if eat_word(p, "if") {
         ret parse_if_expr(p);
     } else {
         let blk = parse_block(p);
@@ -1331,9 +1317,9 @@ fn parse_else_expr(p: &parser) -> @ast::expr {
 
 fn parse_for_expr(p: &parser) -> @ast::expr {
     let lo = p.get_last_lo_pos();
-    let is_each = eat_word(p, ~"each");
+    let is_each = eat_word(p, "each");
     let decl = parse_local(p, false);
-    expect_word(p, ~"in");
+    expect_word(p, "in");
     let seq = parse_expr(p);
     let body = parse_block(p);
     let hi = body.span.hi;
@@ -1353,7 +1339,7 @@ fn parse_while_expr(p: &parser) -> @ast::expr {
 fn parse_do_while_expr(p: &parser) -> @ast::expr {
     let lo = p.get_last_lo_pos();
     let body = parse_block(p);
-    expect_word(p, ~"while");
+    expect_word(p, "while");
     let cond = parse_expr(p);
     let hi = cond.span.hi;
     ret mk_expr(p, lo, hi, ast::expr_do_while(body, cond));
@@ -1367,9 +1353,7 @@ fn parse_alt_expr(p: &parser) -> @ast::expr {
     while p.peek() != token::RBRACE {
         let pats = parse_pats(p);
         let guard = none;
-        if eat_word(p, ~"when") {
-            guard = some(parse_expr(p));
-        }
+        if eat_word(p, "when") { guard = some(parse_expr(p)); }
         let blk = parse_block(p);
         arms += [{pats: pats, guard: guard, body: blk}];
     }
@@ -1402,6 +1386,7 @@ fn parse_initializer(p: &parser) -> option::t<ast::initializer> {
       }
 
 
+
       // Now that the the channel is the first argument to receive,
       // combining it with an initializer doesn't really make sense.
       // case (token::RECV) {
@@ -1447,8 +1432,8 @@ fn parse_pat(p: &parser) -> @ast::pat {
             if p.peek() == token::UNDERSCORE {
                 p.bump();
                 if p.peek() != token::RBRACE {
-                    p.fatal(~"expecting }, found " +
-                            token::to_str(p.get_reader(), p.peek()));
+                    p.fatal("expecting }, found " +
+                                token::to_str(p.get_reader(), p.peek()));
                 }
                 etc = true;
                 break;
@@ -1461,8 +1446,7 @@ fn parse_pat(p: &parser) -> @ast::pat {
                 subpat = parse_pat(p);
             } else {
                 if p.get_bad_expr_words().contains_key(fieldname) {
-                    p.fatal(~"found " + fieldname
-                            + ~" in binding position");
+                    p.fatal("found " + fieldname + " in binding position");
                 }
                 subpat =
                     @{id: p.get_id(),
@@ -1501,17 +1485,15 @@ fn parse_pat(p: &parser) -> @ast::pat {
           token::LIT_STR(s) {
             let sp = p.get_span();
             p.bump();
-            let lit =
-                @{node: ast::lit_str(p.get_str(s)),
-                  span: sp};
+            let lit = @{node: ast::lit_str(p.get_str(s)), span: sp};
             hi = lit.span.hi;
             pat = ast::pat_lit(lit);
           }
-          _ { p.fatal(~"expected string literal"); }
+          _ { p.fatal("expected string literal"); }
         }
       }
       tok {
-        if !is_ident(tok) || is_word(p, ~"true") || is_word(p, ~"false") {
+        if !is_ident(tok) || is_word(p, "true") || is_word(p, "false") {
             let lit = parse_lit(p);
             hi = lit.span.hi;
             pat = ast::pat_lit(@lit);
@@ -1580,7 +1562,7 @@ fn parse_crate_stmt(p: &parser) -> @ast::stmt {
 
 fn parse_source_stmt(p: &parser) -> @ast::stmt {
     let lo = p.get_lo_pos();
-    if eat_word(p, ~"let") {
+    if eat_word(p, "let") {
         let decl = parse_let(p);
         ret @spanned(lo, decl.span.hi, ast::stmt_decl(decl, p.get_id()));
     } else {
@@ -1600,7 +1582,7 @@ fn parse_source_stmt(p: &parser) -> @ast::stmt {
         if vec::len(item_attrs) > 0u {
             alt maybe_item {
               some(_) {/* fallthrough */ }
-              _ { ret p.fatal(~"expected item"); }
+              _ { ret p.fatal("expected item"); }
             }
         }
 
@@ -1616,7 +1598,7 @@ fn parse_source_stmt(p: &parser) -> @ast::stmt {
             let e = parse_expr(p);
             ret @spanned(lo, e.span.hi, ast::stmt_expr(e, p.get_id()));
           }
-          _ { p.fatal(~"expected statement"); }
+          _ { p.fatal("expected statement"); }
         }
     }
 }
@@ -1677,6 +1659,7 @@ fn stmt_ends_with_semi(stmt: &ast::stmt) -> bool {
       }
 
 
+
       // We should not be calling this on a cdir.
       ast::stmt_crate_directive(cdir) {
         fail;
@@ -1686,10 +1669,9 @@ fn stmt_ends_with_semi(stmt: &ast::stmt) -> bool {
 
 fn parse_block(p: &parser) -> ast::blk {
     let lo = p.get_lo_pos();
-    if eat_word(p, ~"unchecked") {
+    if eat_word(p, "unchecked") {
         be parse_block_tail(p, lo, ast::unchecked);
-    }
-    else {
+    } else {
         expect(p, token::LBRACE);
         be parse_block_tail(p, lo, ast::checked);
     }
@@ -1716,8 +1698,8 @@ fn parse_block_tail(p: &parser, lo: uint, s: ast::check_mode) -> ast::blk {
                   token::RBRACE. { expr = some(e); }
                   t {
                     if stmt_ends_with_semi(*stmt) {
-                        p.fatal(~"expected ';' or '}' after " +
-                                    ~"expression but found " +
+                        p.fatal("expected ';' or '}' after " +
+                                    "expression but found " +
                                     token::to_str(p.get_reader(), t));
                     }
                     stmts += [stmt];
@@ -1935,7 +1917,7 @@ fn parse_mod_items(p: &parser, term: token::token,
         alt parse_item(p, attrs) {
           some(i) { items += [i]; }
           _ {
-            p.fatal(~"expected item but found " +
+            p.fatal("expected item but found " +
                         token::to_str(p.get_reader(), p.peek()));
           }
         }
@@ -1985,10 +1967,7 @@ fn parse_item_native_fn(p: &parser, attrs: &[ast::attribute]) ->
     let t = parse_fn_header(p);
     let decl = parse_fn_decl(p, ast::impure_fn, ast::il_normal);
     let link_name = none;
-    if p.peek() == token::EQ {
-        p.bump();
-        link_name = some(parse_str(p));
-    }
+    if p.peek() == token::EQ { p.bump(); link_name = some(parse_str(p)); }
     let hi = p.get_hi_pos();
     expect(p, token::SEMI);
     ret @{ident: t.ident,
@@ -2000,15 +1979,14 @@ fn parse_item_native_fn(p: &parser, attrs: &[ast::attribute]) ->
 
 fn parse_native_item(p: &parser, attrs: &[ast::attribute]) ->
    @ast::native_item {
-    if eat_word(p, ~"type") {
+    if eat_word(p, "type") {
         ret parse_item_native_type(p, attrs);
-    } else if eat_word(p, ~"fn") {
+    } else if eat_word(p, "fn") {
         ret parse_item_native_fn(p, attrs);
     } else { unexpected(p, p.peek()); }
 }
 
-fn parse_native_mod_items(p: &parser, native_name: &istr,
-                          abi: ast::native_abi,
+fn parse_native_mod_items(p: &parser, native_name: &str, abi: ast::native_abi,
                           first_item_attrs: &[ast::attribute]) ->
    ast::native_mod {
     // Shouldn't be any view items since we've already parsed an item attr
@@ -2032,20 +2010,20 @@ fn parse_native_mod_items(p: &parser, native_name: &istr,
 fn parse_item_native_mod(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
     let lo = p.get_last_lo_pos();
     let abi = ast::native_abi_cdecl;
-    if !is_word(p, ~"mod") {
+    if !is_word(p, "mod") {
         let t = parse_str(p);
-        if str::eq(t, ~"cdecl") {
-        } else if str::eq(t, ~"rust") {
+        if str::eq(t, "cdecl") {
+        } else if str::eq(t, "rust") {
             abi = ast::native_abi_rust;
-        } else if str::eq(t, ~"llvm") {
+        } else if str::eq(t, "llvm") {
             abi = ast::native_abi_llvm;
-        } else if str::eq(t, ~"rust-intrinsic") {
+        } else if str::eq(t, "rust-intrinsic") {
             abi = ast::native_abi_rust_intrinsic;
-        } else if str::eq(t, ~"x86stdcall") {
+        } else if str::eq(t, "x86stdcall") {
             abi = ast::native_abi_x86stdcall;
-        } else { p.fatal(~"unsupported abi: " + t); }
+        } else { p.fatal("unsupported abi: " + t); }
     }
-    expect_word(p, ~"mod");
+    expect_word(p, "mod");
     let id = parse_ident(p);
     let native_name;
     if p.peek() == token::EQ {
@@ -2087,8 +2065,7 @@ fn parse_item_tag(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
     // Newtype syntax
     if p.peek() == token::EQ {
         if p.get_bad_expr_words().contains_key(id) {
-            p.fatal(~"found " + id
-                    + ~" in tag constructor position");
+            p.fatal("found " + id + " in tag constructor position");
         }
         p.bump();
         let ty = parse_ty(p, false);
@@ -2125,13 +2102,12 @@ fn parse_item_tag(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
             }
             expect(p, token::SEMI);
             p.get_id();
-            let vr = {name: p.get_str(name),
-                      args: args, id: p.get_id()};
+            let vr = {name: p.get_str(name), args: args, id: p.get_id()};
             variants += [spanned(vlo, vhi, vr)];
           }
           token::RBRACE. {/* empty */ }
           _ {
-            p.fatal(~"expected name of variant or '}' but found " +
+            p.fatal("expected name of variant or '}' but found " +
                         token::to_str(p.get_reader(), tok));
           }
         }
@@ -2142,42 +2118,42 @@ fn parse_item_tag(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
 }
 
 fn parse_auth(p: &parser) -> ast::_auth {
-    if eat_word(p, ~"unsafe") {
+    if eat_word(p, "unsafe") {
         ret ast::auth_unsafe;
     } else { unexpected(p, p.peek()); }
 }
 
 fn parse_item(p: &parser, attrs: &[ast::attribute]) -> option::t<@ast::item> {
-    if eat_word(p, ~"const") {
+    if eat_word(p, "const") {
         ret some(parse_item_const(p, attrs));
-    } else if eat_word(p, ~"inline") {
-        expect_word(p, ~"fn");
+    } else if eat_word(p, "inline") {
+        expect_word(p, "fn");
         ret some(parse_item_fn_or_iter(p, ast::impure_fn, ast::proto_fn,
                                        attrs, ast::il_inline));
-    } else if is_word(p, ~"fn") && p.look_ahead(1u) != token::LPAREN {
+    } else if is_word(p, "fn") && p.look_ahead(1u) != token::LPAREN {
         p.bump();
         ret some(parse_item_fn_or_iter(p, ast::impure_fn, ast::proto_fn,
                                        attrs, ast::il_normal));
-    } else if eat_word(p, ~"pure") {
-        expect_word(p, ~"fn");
+    } else if eat_word(p, "pure") {
+        expect_word(p, "fn");
         ret some(parse_item_fn_or_iter(p, ast::pure_fn, ast::proto_fn, attrs,
                                        ast::il_normal));
-    } else if eat_word(p, ~"iter") {
+    } else if eat_word(p, "iter") {
         ret some(parse_item_fn_or_iter(p, ast::impure_fn, ast::proto_iter,
                                        attrs, ast::il_normal));
-    } else if eat_word(p, ~"mod") {
+    } else if eat_word(p, "mod") {
         ret some(parse_item_mod(p, attrs));
-    } else if eat_word(p, ~"native") {
+    } else if eat_word(p, "native") {
         ret some(parse_item_native_mod(p, attrs));
     }
-    if eat_word(p, ~"type") {
+    if eat_word(p, "type") {
         ret some(parse_item_type(p, attrs));
-    } else if eat_word(p, ~"tag") {
+    } else if eat_word(p, "tag") {
         ret some(parse_item_tag(p, attrs));
-    } else if is_word(p, ~"obj") && p.look_ahead(1u) != token::LPAREN {
+    } else if is_word(p, "obj") && p.look_ahead(1u) != token::LPAREN {
         p.bump();
         ret some(parse_item_obj(p, attrs));
-    } else if eat_word(p, ~"resource") {
+    } else if eat_word(p, "resource") {
         ret some(parse_item_res(p, attrs));
     } else { ret none; }
 }
@@ -2297,18 +2273,19 @@ fn parse_rest_import_name(p: &parser, first: &ast::ident,
         alt p.peek() {
           token::SEMI. { break; }
           token::MOD_SEP. {
-            if glob { p.fatal(~"cannot path into a glob"); }
+            if glob { p.fatal("cannot path into a glob"); }
             if option::is_some(from_idents) {
-                p.fatal(~"cannot path into import list");
+                p.fatal("cannot path into import list");
             }
             p.bump();
           }
-          _ { p.fatal(~"expecting '::' or ';'"); }
+          _ { p.fatal("expecting '::' or ';'"); }
         }
         alt p.peek() {
           token::IDENT(_, _) { identifiers += [parse_ident(p)]; }
 
 
+
           //the lexer can't tell the different kinds of stars apart ) :
           token::BINOP(token::STAR.) {
             glob = true;
@@ -2316,6 +2293,7 @@ fn parse_rest_import_name(p: &parser, first: &ast::ident,
           }
 
 
+
           token::LBRACE. {
             fn parse_import_ident(p: &parser) -> ast::import_ident {
                 let lo = p.get_lo_pos();
@@ -2327,22 +2305,23 @@ fn parse_rest_import_name(p: &parser, first: &ast::ident,
                 parse_seq(token::LBRACE, token::RBRACE, some(token::COMMA),
                           parse_import_ident, p).node;
             if vec::is_empty(from_idents_) {
-                p.fatal(~"at least one import is required");
+                p.fatal("at least one import is required");
             }
             from_idents = some(from_idents_);
           }
 
 
+
           _ {
-            p.fatal(~"expecting an identifier, or '*'");
+            p.fatal("expecting an identifier, or '*'");
           }
         }
     }
     alt def_ident {
       some(i) {
-        if glob { p.fatal(~"globbed imports can't be renamed"); }
+        if glob { p.fatal("globbed imports can't be renamed"); }
         if option::is_some(from_idents) {
-            p.fatal(~"can't rename import list");
+            p.fatal("can't rename import list");
         }
         ret ast::view_item_import(i, identifiers, p.get_id());
       }
@@ -2367,10 +2346,9 @@ fn parse_full_import_name(p: &parser, def_ident: &ast::ident) ->
     alt p.peek() {
       token::IDENT(i, _) {
         p.bump();
-        ret parse_rest_import_name(
-            p, p.get_str(i), some(def_ident));
+        ret parse_rest_import_name(p, p.get_str(i), some(def_ident));
       }
-      _ { p.fatal(~"expecting an identifier"); }
+      _ { p.fatal("expecting an identifier"); }
     }
 }
 
@@ -2383,13 +2361,10 @@ fn parse_import(p: &parser) -> ast::view_item_ {
             p.bump();
             ret parse_full_import_name(p, p.get_str(i));
           }
-          _ {
-            ret parse_rest_import_name(
-                p, p.get_str(i), none);
-          }
+          _ { ret parse_rest_import_name(p, p.get_str(i), none); }
         }
       }
-      _ { p.fatal(~"expecting an identifier"); }
+      _ { p.fatal("expecting an identifier"); }
     }
 }
 
@@ -2403,11 +2378,11 @@ fn parse_export(p: &parser) -> ast::view_item_ {
 fn parse_view_item(p: &parser) -> @ast::view_item {
     let lo = p.get_lo_pos();
     let the_item =
-        if eat_word(p, ~"use") {
+        if eat_word(p, "use") {
             parse_use(p)
-        } else if eat_word(p, ~"import") {
+        } else if eat_word(p, "import") {
             parse_import(p)
-        } else if eat_word(p, ~"export") { parse_export(p) } else { fail };
+        } else if eat_word(p, "export") { parse_export(p) } else { fail };
     let hi = p.get_lo_pos();
     expect(p, token::SEMI);
     ret @spanned(lo, hi, the_item);
@@ -2417,8 +2392,8 @@ fn is_view_item(p: &parser) -> bool {
     alt p.peek() {
       token::IDENT(sid, false) {
         let st = p.get_str(sid);
-        ret str::eq(st, ~"use") || str::eq(st, ~"import") ||
-                str::eq(st, ~"export");
+        ret str::eq(st, "use") || str::eq(st, "import") ||
+                str::eq(st, "export");
       }
       _ { ret false; }
     }
@@ -2436,21 +2411,19 @@ fn parse_native_view(p: &parser) -> [@ast::view_item] {
     ret items;
 }
 
-fn parse_crate_from_source_file(input: &istr, cfg: &ast::crate_cfg,
+fn parse_crate_from_source_file(input: &str, cfg: &ast::crate_cfg,
                                 sess: &parse_sess) -> @ast::crate {
     let p = new_parser_from_file(sess, cfg, input, 0u, 0u, SOURCE_FILE);
     ret parse_crate_mod(p, cfg);
 }
 
-fn parse_crate_from_source_str(name: &istr, source: &istr,
-                               cfg: &ast::crate_cfg,
+fn parse_crate_from_source_str(name: &str, source: &str, cfg: &ast::crate_cfg,
                                sess: &parse_sess) -> @ast::crate {
     let ftype = SOURCE_FILE;
     let filemap = codemap::new_filemap(name, 0u, 0u);
     sess.cm.files += [filemap];
     let itr = @interner::mk(str::hash, str::eq);
-    let rdr = lexer::new_reader(sess.cm, source,
-                                filemap, itr);
+    let rdr = lexer::new_reader(sess.cm, source, filemap, itr);
     let p = new_parser(sess, cfg, rdr, ftype);
     ret parse_crate_mod(p, cfg);
 }
@@ -2468,7 +2441,7 @@ fn parse_crate_mod(p: &parser, _cfg: &ast::crate_cfg) -> @ast::crate {
                   config: p.get_cfg()});
 }
 
-fn parse_str(p: &parser) -> istr {
+fn parse_str(p: &parser) -> str {
     alt p.peek() {
       token::LIT_STR(s) { p.bump(); ret p.get_str(s); }
       _ { fail; }
@@ -2489,8 +2462,8 @@ fn parse_crate_directive(p: &parser, first_outer_attr: &[ast::attribute]) ->
     let expect_mod = vec::len(outer_attrs) > 0u;
 
     let lo = p.get_lo_pos();
-    if expect_mod || is_word(p, ~"mod") {
-        expect_word(p, ~"mod");
+    if expect_mod || is_word(p, "mod") {
+        expect_word(p, "mod");
         let id = parse_ident(p);
         let file_opt =
             alt p.peek() {
@@ -2500,6 +2473,7 @@ fn parse_crate_directive(p: &parser, first_outer_attr: &[ast::attribute]) ->
         alt p.peek() {
 
 
+
           // mod x = "foo.rs";
           token::SEMI. {
             let hi = p.get_hi_pos();
@@ -2508,6 +2482,7 @@ fn parse_crate_directive(p: &parser, first_outer_attr: &[ast::attribute]) ->
           }
 
 
+
           // mod x = "foo_dir" { ...directives... }
           token::LBRACE. {
             p.bump();
@@ -2523,7 +2498,7 @@ fn parse_crate_directive(p: &parser, first_outer_attr: &[ast::attribute]) ->
           }
           t { unexpected(p, t); }
         }
-    } else if eat_word(p, ~"auth") {
+    } else if eat_word(p, "auth") {
         let n = parse_path(p);
         expect(p, token::EQ);
         let a = parse_auth(p);
@@ -2533,7 +2508,7 @@ fn parse_crate_directive(p: &parser, first_outer_attr: &[ast::attribute]) ->
     } else if is_view_item(p) {
         let vi = parse_view_item(p);
         ret spanned(lo, vi.span.hi, ast::cdir_view_item(vi));
-    } else { ret p.fatal(~"expected crate directive"); }
+    } else { ret p.fatal("expected crate directive"); }
 }
 
 fn parse_crate_directives(p: &parser, term: token::token,
@@ -2544,7 +2519,7 @@ fn parse_crate_directives(p: &parser, term: token::token,
     // seeing the terminator next, so if we do see it then fail the same way
     // parse_crate_directive would
     if vec::len(first_outer_attr) > 0u && p.peek() == term {
-        expect_word(p, ~"mod");
+        expect_word(p, "mod");
     }
 
     let cdirs: [@ast::crate_directive] = [];
@@ -2555,17 +2530,16 @@ fn parse_crate_directives(p: &parser, term: token::token,
     ret cdirs;
 }
 
-fn parse_crate_from_crate_file(input: &istr, cfg: &ast::crate_cfg,
+fn parse_crate_from_crate_file(input: &str, cfg: &ast::crate_cfg,
                                sess: &parse_sess) -> @ast::crate {
     let p = new_parser_from_file(sess, cfg, input, 0u, 0u, CRATE_FILE);
     let lo = p.get_lo_pos();
-    let prefix =
-        std::fs::dirname(p.get_filemap().name);
+    let prefix = std::fs::dirname(p.get_filemap().name);
     let leading_attrs = parse_inner_attrs_and_next(p);
     let crate_attrs = leading_attrs.inner;
     let first_cdir_attr = leading_attrs.next;
     let cdirs = parse_crate_directives(p, token::EOF, first_cdir_attr);
-    let deps: [istr] = [];
+    let deps: [str] = [];
     let cx =
         @{p: p,
           mode: eval::mode_parse,
@@ -2584,15 +2558,14 @@ fn parse_crate_from_crate_file(input: &istr, cfg: &ast::crate_cfg,
                   config: p.get_cfg()});
 }
 
-fn parse_crate_from_file(input: &istr, cfg: &ast::crate_cfg,
-                         sess: &parse_sess) -> @ast::crate {
-    if str::ends_with(input, ~".rc") {
+fn parse_crate_from_file(input: &str, cfg: &ast::crate_cfg, sess: &parse_sess)
+   -> @ast::crate {
+    if str::ends_with(input, ".rc") {
         parse_crate_from_crate_file(input, cfg, sess)
-    } else if str::ends_with(input, ~".rs") {
+    } else if str::ends_with(input, ".rs") {
         parse_crate_from_source_file(input, cfg, sess)
     } else {
-        codemap::emit_error(none, ~"unknown input file type: "
-                            + input,
+        codemap::emit_error(none, "unknown input file type: " + input,
                             sess.cm);
         fail
     }
diff --git a/src/comp/syntax/parse/token.rs b/src/comp/syntax/parse/token.rs
index 24d2a3b9a6f..153f5236c4b 100644
--- a/src/comp/syntax/parse/token.rs
+++ b/src/comp/syntax/parse/token.rs
@@ -84,62 +84,64 @@ tag token {
     EOF;
 }
 
-fn binop_to_str(o: binop) -> istr {
+fn binop_to_str(o: binop) -> str {
     alt o {
-      PLUS. { ret ~"+"; }
-      MINUS. { ret ~"-"; }
-      STAR. { ret ~"*"; }
-      SLASH. { ret ~"/"; }
-      PERCENT. { ret ~"%"; }
-      CARET. { ret ~"^"; }
-      AND. { ret ~"&"; }
-      OR. { ret ~"|"; }
-      LSL. { ret ~"<<"; }
-      LSR. { ret ~">>"; }
-      ASR. { ret ~">>>"; }
+      PLUS. { ret "+"; }
+      MINUS. { ret "-"; }
+      STAR. { ret "*"; }
+      SLASH. { ret "/"; }
+      PERCENT. { ret "%"; }
+      CARET. { ret "^"; }
+      AND. { ret "&"; }
+      OR. { ret "|"; }
+      LSL. { ret "<<"; }
+      LSR. { ret ">>"; }
+      ASR. { ret ">>>"; }
     }
 }
 
-fn to_str(r: lexer::reader, t: token) -> istr {
+fn to_str(r: lexer::reader, t: token) -> str {
     alt t {
-      EQ. { ret ~"="; }
-      LT. { ret ~"<"; }
-      LE. { ret ~"<="; }
-      EQEQ. { ret ~"=="; }
-      NE. { ret ~"!="; }
-      GE. { ret ~">="; }
-      GT. { ret ~">"; }
-      NOT. { ret ~"!"; }
-      TILDE. { ret ~"~"; }
-      OROR. { ret ~"||"; }
-      ANDAND. { ret ~"&&"; }
+      EQ. { ret "="; }
+      LT. { ret "<"; }
+      LE. { ret "<="; }
+      EQEQ. { ret "=="; }
+      NE. { ret "!="; }
+      GE. { ret ">="; }
+      GT. { ret ">"; }
+      NOT. { ret "!"; }
+      TILDE. { ret "~"; }
+      OROR. { ret "||"; }
+      ANDAND. { ret "&&"; }
       BINOP(op) { ret binop_to_str(op); }
-      BINOPEQ(op) { ret binop_to_str(op) + ~"="; }
+      BINOPEQ(op) { ret binop_to_str(op) + "="; }
+
 
 
       /* Structural symbols */
       AT. {
-        ret ~"@";
+        ret "@";
       }
-      DOT. { ret ~"."; }
-      ELLIPSIS. { ret ~"..."; }
-      COMMA. { ret ~","; }
-      SEMI. { ret ~";"; }
-      COLON. { ret ~":"; }
-      MOD_SEP. { ret ~"::"; }
-      QUES. { ret ~"?"; }
-      RARROW. { ret ~"->"; }
-      LARROW. { ret ~"<-"; }
-      DARROW. { ret ~"<->"; }
-      LPAREN. { ret ~"("; }
-      RPAREN. { ret ~")"; }
-      LBRACKET. { ret ~"["; }
-      RBRACKET. { ret ~"]"; }
-      LBRACE. { ret ~"{"; }
-      RBRACE. { ret ~"}"; }
-      POUND. { ret ~"#"; }
-      POUND_LBRACE. { ret ~"#{"; }
-      POUND_LT. { ret ~"#<"; }
+      DOT. { ret "."; }
+      ELLIPSIS. { ret "..."; }
+      COMMA. { ret ","; }
+      SEMI. { ret ";"; }
+      COLON. { ret ":"; }
+      MOD_SEP. { ret "::"; }
+      QUES. { ret "?"; }
+      RARROW. { ret "->"; }
+      LARROW. { ret "<-"; }
+      DARROW. { ret "<->"; }
+      LPAREN. { ret "("; }
+      RPAREN. { ret ")"; }
+      LBRACKET. { ret "["; }
+      RBRACKET. { ret "]"; }
+      LBRACE. { ret "{"; }
+      RBRACE. { ret "}"; }
+      POUND. { ret "#"; }
+      POUND_LBRACE. { ret "#{"; }
+      POUND_LT. { ret "#<"; }
+
 
 
       /* Literals */
@@ -148,39 +150,35 @@ fn to_str(r: lexer::reader, t: token) -> istr {
       }
       LIT_UINT(u) { ret uint::to_str(u, 10u); }
       LIT_MACH_INT(tm, i) {
-        ret int::to_str(i, 10u) + ~"_" + ty_mach_to_str(tm);
+        ret int::to_str(i, 10u) + "_" + ty_mach_to_str(tm);
       }
       LIT_MACH_FLOAT(tm, s) {
-        ret interner::get::<istr>(
-            *r.get_interner(), s) + ~"_" +
-            ty_mach_to_str(tm);
-      }
-      LIT_FLOAT(s) {
-        ret interner::get::<istr>(*r.get_interner(), s);
+        ret interner::get::<str>(*r.get_interner(), s) + "_" +
+                ty_mach_to_str(tm);
       }
+      LIT_FLOAT(s) { ret interner::get::<str>(*r.get_interner(), s); }
       LIT_STR(s) { // FIXME: escape.
-        ret ~"\"" +
-            interner::get::<istr>(*r.get_interner(), s)
-            + ~"\"";
+        ret "\"" + interner::get::<str>(*r.get_interner(), s) + "\"";
       }
       LIT_CHAR(c) {
         // FIXME: escape.
-        let tmp = ~"'";
+        let tmp = "'";
         str::push_char(tmp, c);
         str::push_byte(tmp, '\'' as u8);
         ret tmp;
       }
-      LIT_BOOL(b) { if b { ret ~"true"; } else { ret ~"false"; } }
+      LIT_BOOL(b) { if b { ret "true"; } else { ret "false"; } }
+
 
 
       /* Name components */
       IDENT(s, _) {
-        ret interner::get::<istr>(*r.get_interner(), s);
+        ret interner::get::<str>(*r.get_interner(), s);
       }
-      IDX(i) { ret ~"_" + int::to_str(i, 10u); }
-      UNDERSCORE. { ret ~"_"; }
-      BRACEQUOTE(_) { ret ~"<bracequote>"; }
-      EOF. { ret ~"<eof>"; }
+      IDX(i) { ret "_" + int::to_str(i, 10u); }
+      UNDERSCORE. { ret "_"; }
+      BRACEQUOTE(_) { ret "<bracequote>"; }
+      EOF. { ret "<eof>"; }
     }
 }