about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorMichael Sullivan <sully@msully.net>2012-07-13 22:57:48 -0700
committerMichael Sullivan <sully@msully.net>2012-07-14 01:03:43 -0700
commit92743dc2a6a14d042d4b278e4a4dde5ca198c886 (patch)
tree2626211c99906387257880f127f96fee66a0bb4e /src/libsyntax
parent5c5065e8bdd1a7b28810fea4b940577ff17c112c (diff)
downloadrust-92743dc2a6a14d042d4b278e4a4dde5ca198c886.tar.gz
rust-92743dc2a6a14d042d4b278e4a4dde5ca198c886.zip
Move the world over to using the new style string literals and types. Closes #2907.
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs6
-rw-r--r--src/libsyntax/ast_map.rs10
-rw-r--r--src/libsyntax/ast_util.rs80
-rw-r--r--src/libsyntax/attr.rs55
-rw-r--r--src/libsyntax/codemap.rs28
-rw-r--r--src/libsyntax/diagnostic.rs98
-rw-r--r--src/libsyntax/ext/auto_serialize.rs62
-rw-r--r--src/libsyntax/ext/base.rs70
-rw-r--r--src/libsyntax/ext/build.rs4
-rw-r--r--src/libsyntax/ext/concat_idents.rs6
-rw-r--r--src/libsyntax/ext/env.rs6
-rw-r--r--src/libsyntax/ext/expand.rs22
-rw-r--r--src/libsyntax/ext/fmt.rs96
-rw-r--r--src/libsyntax/ext/ident_to_str.rs4
-rw-r--r--src/libsyntax/ext/log_syntax.rs4
-rw-r--r--src/libsyntax/ext/pipes/ast_builder.rs2
-rw-r--r--src/libsyntax/ext/pipes/parse_proto.rs4
-rw-r--r--src/libsyntax/ext/pipes/pipec.rs70
-rw-r--r--src/libsyntax/ext/qquote.rs85
-rw-r--r--src/libsyntax/ext/simplext.rs104
-rw-r--r--src/libsyntax/ext/source_util.rs24
-rw-r--r--src/libsyntax/ext/tt/earley_parser.rs40
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs18
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs10
-rw-r--r--src/libsyntax/parse.rs46
-rw-r--r--src/libsyntax/parse/attr.rs2
-rw-r--r--src/libsyntax/parse/comments.rs80
-rw-r--r--src/libsyntax/parse/common.rs46
-rw-r--r--src/libsyntax/parse/eval.rs22
-rw-r--r--src/libsyntax/parse/lexer.rs56
-rw-r--r--src/libsyntax/parse/parser.rs294
-rw-r--r--src/libsyntax/parse/token.rs160
-rw-r--r--src/libsyntax/print/pp.rs32
-rw-r--r--src/libsyntax/print/pprust.rs510
-rw-r--r--src/libsyntax/visit.rs4
35 files changed, 1083 insertions, 1077 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 095038f02e3..30535b45529 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -32,7 +32,7 @@ fn deserialize_span<D>(_d: D) -> span {
 type spanned<T> = {node: T, span: span};
 
 #[auto_serialize]
-type ident = @str/~;
+type ident = @~str;
 
 // Functions may or may not have names.
 #[auto_serialize]
@@ -428,11 +428,11 @@ type lit = spanned<lit_>;
 
 #[auto_serialize]
 enum lit_ {
-    lit_str(@str/~),
+    lit_str(@~str),
     lit_int(i64, int_ty),
     lit_uint(u64, uint_ty),
     lit_int_unsuffixed(i64),
-    lit_float(@str/~, float_ty),
+    lit_float(@~str, float_ty),
     lit_nil,
     lit_bool(bool),
 }
diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs
index a5ae45d54ee..7f1899792d7 100644
--- a/src/libsyntax/ast_map.rs
+++ b/src/libsyntax/ast_map.rs
@@ -10,7 +10,7 @@ enum path_elt { path_mod(ident), path_name(ident) }
 type path = ~[path_elt];
 
 /* FIXMEs that say "bad" are as per #2543 */
-fn path_to_str_with_sep(p: path, sep: str) -> str {
+fn path_to_str_with_sep(p: path, sep: ~str) -> ~str {
     let strs = do vec::map(p) |e| {
         alt e {
           path_mod(s) { /* FIXME (#2543) */ copy *s }
@@ -20,7 +20,7 @@ fn path_to_str_with_sep(p: path, sep: str) -> str {
     str::connect(strs, sep)
 }
 
-fn path_ident_to_str(p: path, i: ident) -> str {
+fn path_ident_to_str(p: path, i: ident) -> ~str {
     if vec::is_empty(p) {
         /* FIXME (#2543) */ copy *i
     } else {
@@ -28,8 +28,8 @@ fn path_ident_to_str(p: path, i: ident) -> str {
     }
 }
 
-fn path_to_str(p: path) -> str {
-    path_to_str_with_sep(p, "::")
+fn path_to_str(p: path) -> ~str {
+    path_to_str_with_sep(p, ~"::")
 }
 
 enum ast_node {
@@ -267,7 +267,7 @@ fn map_expr(ex: @expr, cx: ctx, v: vt) {
     visit::visit_expr(ex, cx, v);
 }
 
-fn node_id_to_str(map: map, id: node_id) -> str {
+fn node_id_to_str(map: map, id: node_id) -> ~str {
     alt map.find(id) {
       none {
         #fmt["unknown node (id=%d)", id]
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index cfaa7103a58..556239769da 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -21,11 +21,11 @@ pure fn mk_sp(lo: uint, hi: uint) -> span {
 // make this a const, once the compiler supports it
 pure fn dummy_sp() -> span { ret mk_sp(0u, 0u); }
 
-pure fn path_name(p: @path) -> str { path_name_i(p.idents) }
+pure fn path_name(p: @path) -> ~str { path_name_i(p.idents) }
 
-pure fn path_name_i(idents: ~[ident]) -> str {
+pure fn path_name_i(idents: ~[ident]) -> ~str {
     // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
-    str::connect(idents.map(|i|*i), "::")
+    str::connect(idents.map(|i|*i), ~"::")
 }
 
 pure fn path_to_ident(p: @path) -> ident { vec::last(p.idents) }
@@ -45,7 +45,7 @@ pure fn stmt_id(s: stmt) -> node_id {
 fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
     alt d { def_variant(enum_id, var_id) {
             ret {enm: enum_id, var: var_id}; }
-        _ { fail "non-variant in variant_def_ids"; } }
+        _ { fail ~"non-variant in variant_def_ids"; } }
 }
 
 pure fn def_id_of_def(d: def) -> def_id {
@@ -63,26 +63,26 @@ pure fn def_id_of_def(d: def) -> def_id {
     }
 }
 
-pure fn binop_to_str(op: binop) -> str {
+pure fn binop_to_str(op: binop) -> ~str {
     alt op {
-      add { ret "+"; }
-      subtract { ret "-"; }
-      mul { ret "*"; }
-      div { ret "/"; }
-      rem { ret "%"; }
-      and { ret "&&"; }
-      or { ret "||"; }
-      bitxor { ret "^"; }
-      bitand { ret "&"; }
-      bitor { ret "|"; }
-      shl { ret "<<"; }
-      shr { ret ">>"; }
-      eq { ret "=="; }
-      lt { ret "<"; }
-      le { ret "<="; }
-      ne { ret "!="; }
-      ge { ret ">="; }
-      gt { ret ">"; }
+      add { ret ~"+"; }
+      subtract { ret ~"-"; }
+      mul { ret ~"*"; }
+      div { ret ~"/"; }
+      rem { ret ~"%"; }
+      and { ret ~"&&"; }
+      or { ret ~"||"; }
+      bitxor { ret ~"^"; }
+      bitand { ret ~"&"; }
+      bitor { ret ~"|"; }
+      shl { ret ~"<<"; }
+      shr { ret ~">>"; }
+      eq { ret ~"=="; }
+      lt { ret ~"<"; }
+      le { ret ~"<="; }
+      ne { ret ~"!="; }
+      ge { ret ~">="; }
+      gt { ret ~">"; }
     }
 }
 
@@ -98,13 +98,13 @@ pure fn is_shift_binop(b: binop) -> bool {
     }
 }
 
-pure fn unop_to_str(op: unop) -> str {
+pure fn unop_to_str(op: unop) -> ~str {
     alt op {
-      box(mt) { if mt == m_mutbl { ret "@mut "; } ret "@"; }
-      uniq(mt) { if mt == m_mutbl { ret "~mut "; } ret "~"; }
-      deref { ret "*"; }
-      not { ret "!"; }
-      neg { ret "-"; }
+      box(mt) { if mt == m_mutbl { ret ~"@mut "; } ret ~"@"; }
+      uniq(mt) { if mt == m_mutbl { ret ~"~mut "; } ret ~"~"; }
+      deref { ret ~"*"; }
+      not { ret ~"!"; }
+      neg { ret ~"-"; }
     }
 }
 
@@ -112,11 +112,11 @@ pure fn is_path(e: @expr) -> bool {
     ret alt e.node { expr_path(_) { true } _ { false } };
 }
 
-pure fn int_ty_to_str(t: int_ty) -> str {
+pure fn int_ty_to_str(t: int_ty) -> ~str {
     alt t {
-      ty_char { "u8" } // ???
-      ty_i { "" } ty_i8 { "i8" } ty_i16 { "i16" }
-      ty_i32 { "i32" } ty_i64 { "i64" }
+      ty_char { ~"u8" } // ???
+      ty_i { ~"" } ty_i8 { ~"i8" } ty_i16 { ~"i16" }
+      ty_i32 { ~"i32" } ty_i64 { ~"i64" }
     }
 }
 
@@ -129,10 +129,10 @@ pure fn int_ty_max(t: int_ty) -> u64 {
     }
 }
 
-pure fn uint_ty_to_str(t: uint_ty) -> str {
+pure fn uint_ty_to_str(t: uint_ty) -> ~str {
     alt t {
-      ty_u { "u" } ty_u8 { "u8" } ty_u16 { "u16" }
-      ty_u32 { "u32" } ty_u64 { "u64" }
+      ty_u { ~"u" } ty_u8 { ~"u8" } ty_u16 { ~"u16" }
+      ty_u32 { ~"u32" } ty_u64 { ~"u64" }
     }
 }
 
@@ -145,8 +145,8 @@ pure fn uint_ty_max(t: uint_ty) -> u64 {
     }
 }
 
-pure fn float_ty_to_str(t: float_ty) -> str {
-    alt t { ty_f { "" } ty_f32 { "f32" } ty_f64 { "f64" } }
+pure fn float_ty_to_str(t: float_ty) -> ~str {
+    alt t { ty_f { ~"" } ty_f32 { ~"f32" } ty_f64 { ~"f64" } }
 }
 
 fn is_exported(i: ident, m: _mod) -> bool {
@@ -191,7 +191,7 @@ fn is_exported(i: ident, m: _mod) -> bool {
                             if id.node.name == i { ret true; }
                         }
                     } else {
-                        fail "export of path-qualified list";
+                        fail ~"export of path-qualified list";
                     }
                   }
 
@@ -381,7 +381,7 @@ fn dtor_dec() -> fn_decl {
     let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
     // dtor has one argument, of type ()
     {inputs: ~[{mode: ast::expl(ast::by_ref),
-               ty: nil_t, ident: @"_"/~, id: 0}],
+               ty: nil_t, ident: @~"_", id: 0}],
      output: nil_t, purity: impure_fn, cf: return_val, constraints: ~[]}
 }
 
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 114daa900aa..859bc70bfd6 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -49,7 +49,8 @@ export require_unique_names;
 
 /* Constructors */
 
-fn mk_name_value_item_str(+name: ast::ident, +value: str) -> @ast::meta_item {
+fn mk_name_value_item_str(+name: ast::ident, +value: ~str) ->
+    @ast::meta_item {
     let value_lit = dummy_spanned(ast::lit_str(@value));
     ret mk_name_value_item(name, value_lit);
 }
@@ -73,11 +74,11 @@ fn mk_attr(item: @ast::meta_item) -> ast::attribute {
                        is_sugared_doc: false});
 }
 
-fn mk_sugared_doc_attr(text: str, lo: uint, hi: uint) -> ast::attribute {
+fn mk_sugared_doc_attr(text: ~str, lo: uint, hi: uint) -> ast::attribute {
     let lit = spanned(lo, hi, ast::lit_str(@text));
     let attr = {
         style: doc_comment_style(text),
-        value: spanned(lo, hi, ast::meta_name_value(@"doc"/~, lit)),
+        value: spanned(lo, hi, ast::meta_name_value(@~"doc", lit)),
         is_sugared_doc: true
     };
     ret spanned(lo, hi, attr);
@@ -97,7 +98,7 @@ fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
 fn desugar_doc_attr(attr: ast::attribute) -> ast::attribute {
     if attr.node.is_sugared_doc {
         let comment = get_meta_item_value_str(@attr.node.value).get();
-        let meta = mk_name_value_item_str(@"doc"/~,
+        let meta = mk_name_value_item_str(@~"doc",
                                      strip_doc_comment_decoration(*comment));
         ret mk_attr(meta);
     } else {
@@ -124,7 +125,7 @@ fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
  * Gets the string value if the meta_item is a meta_name_value variant
  * containing a string, otherwise none
  */
-fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@str/~> {
+fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@~str> {
     alt meta.node {
       ast::meta_name_value(_, v) {
         alt v.node {
@@ -154,7 +155,7 @@ fn get_meta_item_list(meta: @ast::meta_item) -> option<~[@ast::meta_item]> {
  */
 fn get_name_value_str_pair(
     item: @ast::meta_item
-) -> option<(ast::ident, @str/~)> {
+) -> option<(ast::ident, @~str)> {
     alt attr::get_meta_item_value_str(item) {
       some(value) {
         let name = attr::get_meta_item_name(item);
@@ -168,7 +169,7 @@ fn get_name_value_str_pair(
 /* Searching */
 
 /// Search a list of attributes and return only those with a specific name
-fn find_attrs_by_name(attrs: ~[ast::attribute], +name: str) ->
+fn find_attrs_by_name(attrs: ~[ast::attribute], +name: ~str) ->
    ~[ast::attribute] {
     let filter = (
         fn@(a: ast::attribute) -> option<ast::attribute> {
@@ -181,7 +182,7 @@ fn find_attrs_by_name(attrs: ~[ast::attribute], +name: str) ->
 }
 
 /// Searcha list of meta items and return only those with a specific name
-fn find_meta_items_by_name(metas: ~[@ast::meta_item], +name: str) ->
+fn find_meta_items_by_name(metas: ~[@ast::meta_item], +name: ~str) ->
    ~[@ast::meta_item] {
     let filter = fn@(&&m: @ast::meta_item) -> option<@ast::meta_item> {
         if *get_meta_item_name(m) == name {
@@ -224,22 +225,22 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
             // FIXME (#607): Needs implementing
             // This involves probably sorting the list by name and
             // meta_item variant
-            fail "unimplemented meta_item variant"
+            fail ~"unimplemented meta_item variant"
           }
         }
 }
 
-fn contains_name(metas: ~[@ast::meta_item], +name: str) -> bool {
+fn contains_name(metas: ~[@ast::meta_item], +name: ~str) -> bool {
     let matches = find_meta_items_by_name(metas, name);
     ret vec::len(matches) > 0u;
 }
 
-fn attrs_contains_name(attrs: ~[ast::attribute], +name: str) -> bool {
+fn attrs_contains_name(attrs: ~[ast::attribute], +name: ~str) -> bool {
     vec::is_not_empty(find_attrs_by_name(attrs, name))
 }
 
-fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: str)
-    -> option<@str/~> {
+fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: ~str)
+    -> option<@~str> {
     let mattrs = find_attrs_by_name(attrs, name);
     if vec::len(mattrs) > 0u {
         ret get_meta_item_value_str(attr_meta(mattrs[0]));
@@ -249,7 +250,7 @@ fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: str)
 
 fn last_meta_item_by_name(
     items: ~[@ast::meta_item],
-    +name: str
+    +name: ~str
 ) -> option<@ast::meta_item> {
     let items = attr::find_meta_items_by_name(items, name);
     vec::last_opt(items)
@@ -257,8 +258,8 @@ fn last_meta_item_by_name(
 
 fn last_meta_item_value_str_by_name(
     items: ~[@ast::meta_item],
-    +name: str
-) -> option<@str/~> {
+    +name: ~str
+) -> option<@~str> {
     alt last_meta_item_by_name(items, name) {
       some(item) {
         alt attr::get_meta_item_value_str(item) {
@@ -272,7 +273,7 @@ fn last_meta_item_value_str_by_name(
 
 fn last_meta_item_list_by_name(
     items: ~[@ast::meta_item],
-    +name: str
+    +name: ~str
 ) -> option<~[@ast::meta_item]> {
     alt last_meta_item_by_name(items, name) {
       some(item) {
@@ -319,7 +320,7 @@ fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ast::ident) ->
 
 fn find_linkage_attrs(attrs: ~[ast::attribute]) -> ~[ast::attribute] {
     let mut found = ~[];
-    for find_attrs_by_name(attrs, "link").each |attr| {
+    for find_attrs_by_name(attrs, ~"link").each |attr| {
         alt attr.node.value.node {
           ast::meta_list(_, _) { vec::push(found, attr) }
           _ { #debug("ignoring link attribute that has incorrect type"); }
@@ -340,22 +341,22 @@ fn find_linkage_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
     }
 }
 
-fn foreign_abi(attrs: ~[ast::attribute]) -> either<str, ast::foreign_abi> {
-    ret alt attr::first_attr_value_str_by_name(attrs, "abi") {
+fn foreign_abi(attrs: ~[ast::attribute]) -> either<~str, ast::foreign_abi> {
+    ret alt attr::first_attr_value_str_by_name(attrs, ~"abi") {
       option::none {
         either::right(ast::foreign_abi_cdecl)
       }
-      option::some(@"rust-intrinsic"/~) {
+      option::some(@~"rust-intrinsic") {
         either::right(ast::foreign_abi_rust_intrinsic)
       }
-      option::some(@"cdecl"/~) {
+      option::some(@~"cdecl") {
         either::right(ast::foreign_abi_cdecl)
       }
-      option::some(@"stdcall"/~) {
+      option::some(@~"stdcall") {
         either::right(ast::foreign_abi_stdcall)
       }
       option::some(t) {
-        either::left("unsupported abi: " + *t)
+        either::left(~"unsupported abi: " + *t)
       }
     };
 }
@@ -371,9 +372,9 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
     // FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
     do vec::foldl(ia_none, attrs) |ia,attr| {
         alt attr.node.value.node {
-          ast::meta_word(@"inline"/~) { ia_hint }
-          ast::meta_list(@"inline"/~, items) {
-            if !vec::is_empty(find_meta_items_by_name(items, "always")) {
+          ast::meta_word(@~"inline") { ia_hint }
+          ast::meta_list(@~"inline", items) {
+            if !vec::is_empty(find_meta_items_by_name(items, ~"always")) {
                 ia_always
             } else {
                 ia_hint
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index 867d2d7e45a..76625d3f918 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -28,7 +28,7 @@ export loc;
 export get_filemap;
 export new_codemap;
 
-type filename = str;
+type filename = ~str;
 
 type file_pos = {ch: uint, byte: uint};
 
@@ -41,11 +41,11 @@ type file_pos = {ch: uint, byte: uint};
 enum file_substr {
     fss_none,
     fss_internal(span),
-    fss_external({filename: str, line: uint, col: uint})
+    fss_external({filename: ~str, line: uint, col: uint})
 }
 
 type filemap =
-    @{name: filename, substr: file_substr, src: @str/~,
+    @{name: filename, substr: file_substr, src: @~str,
       start_pos: file_pos, mut lines: ~[file_pos]};
 
 type codemap = @{files: dvec<filemap>};
@@ -55,7 +55,7 @@ type loc = {file: filemap, line: uint, col: uint};
 fn new_codemap() -> codemap { @{files: dvec()} }
 
 fn new_filemap_w_substr(+filename: filename, +substr: file_substr,
-                        src: @str/~,
+                        src: @~str,
                         start_pos_ch: uint, start_pos_byte: uint)
    -> filemap {
     ret @{name: filename, substr: substr, src: src,
@@ -63,14 +63,14 @@ fn new_filemap_w_substr(+filename: filename, +substr: file_substr,
           mut lines: ~[{ch: start_pos_ch, byte: start_pos_byte}]};
 }
 
-fn new_filemap(+filename: filename, src: @str/~,
+fn new_filemap(+filename: filename, src: @~str,
                start_pos_ch: uint, start_pos_byte: uint)
     -> filemap {
     ret new_filemap_w_substr(filename, fss_none, src,
                              start_pos_ch, start_pos_byte);
 }
 
-fn mk_substr_filename(cm: codemap, sp: span) -> str
+fn mk_substr_filename(cm: codemap, sp: span) -> ~str
 {
     let pos = lookup_char_pos(cm, sp.lo);
     ret #fmt("<%s:%u:%u>", pos.file.name, pos.line, pos.col);
@@ -121,7 +121,7 @@ fn lookup_byte_pos(map: codemap, pos: uint) -> loc {
 }
 
 fn lookup_char_pos_adj(map: codemap, pos: uint)
-    -> {filename: str, line: uint, col: uint, file: option<filemap>}
+    -> {filename: ~str, line: uint, col: uint, file: option<filemap>}
 {
     let loc = lookup_char_pos(map, pos);
     alt (loc.file.substr) {
@@ -158,19 +158,19 @@ fn adjust_span(map: codemap, sp: span) -> span {
 
 enum expn_info_ {
     expanded_from({call_site: span,
-                   callie: {name: str, span: option<span>}})
+                   callie: {name: ~str, span: option<span>}})
 }
 type expn_info = option<@expn_info_>;
 type span = {lo: uint, hi: uint, expn_info: expn_info};
 
-fn span_to_str_no_adj(sp: span, cm: codemap) -> str {
+fn span_to_str_no_adj(sp: span, cm: codemap) -> ~str {
     let lo = lookup_char_pos(cm, sp.lo);
     let hi = lookup_char_pos(cm, sp.hi);
     ret #fmt("%s:%u:%u: %u:%u", lo.file.name,
              lo.line, lo.col, hi.line, hi.col)
 }
 
-fn span_to_str(sp: span, cm: codemap) -> str {
+fn span_to_str(sp: span, cm: codemap) -> ~str {
     let lo = lookup_char_pos_adj(cm, sp.lo);
     let hi = lookup_char_pos_adj(cm, sp.hi);
     ret #fmt("%s:%u:%u: %u:%u", lo.filename,
@@ -194,7 +194,7 @@ fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
     ret @{file: lo.file, lines: lines};
 }
 
-fn get_line(fm: filemap, line: int) -> str unsafe {
+fn get_line(fm: filemap, line: int) -> ~str unsafe {
     let begin: uint = fm.lines[line].byte - fm.start_pos.byte;
     let end = alt str::find_char_from(*fm.src, '\n', begin) {
       some(e) { e }
@@ -213,20 +213,20 @@ fn lookup_byte_offset(cm: codemap::codemap, chpos: uint)
     {fm: fm, pos: line_offset + col_offset}
 }
 
-fn span_to_snippet(sp: span, cm: codemap::codemap) -> str {
+fn span_to_snippet(sp: span, cm: codemap::codemap) -> ~str {
     let begin = lookup_byte_offset(cm, sp.lo);
     let end = lookup_byte_offset(cm, sp.hi);
     assert begin.fm == end.fm;
     ret str::slice(*begin.fm.src, begin.pos, end.pos);
 }
 
-fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> str
+fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> ~str
 {
     let fm = cm.files[fidx];
     ret str::slice(*fm.src, lo, hi)
 }
 
-fn get_filemap(cm: codemap, filename: str) -> filemap {
+fn get_filemap(cm: codemap, filename: ~str) -> filemap {
     for cm.files.each |fm| { if fm.name == filename { ret fm; } }
     //XXjdm the following triggers a mismatched type bug
     //      (or expected function, found _|_)
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index 377d0864151..cda49de8dd0 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -10,30 +10,30 @@ export ice_msg;
 export expect;
 
 type emitter = fn@(cmsp: option<(codemap::codemap, span)>,
-                   msg: str, lvl: level);
+                   msg: ~str, lvl: level);
 
 
 iface span_handler {
-    fn span_fatal(sp: span, msg: str) -> !;
-    fn span_err(sp: span, msg: str);
-    fn span_warn(sp: span, msg: str);
-    fn span_note(sp: span, msg: str);
-    fn span_bug(sp: span, msg: str) -> !;
-    fn span_unimpl(sp: span, msg: str) -> !;
+    fn span_fatal(sp: span, msg: ~str) -> !;
+    fn span_err(sp: span, msg: ~str);
+    fn span_warn(sp: span, msg: ~str);
+    fn span_note(sp: span, msg: ~str);
+    fn span_bug(sp: span, msg: ~str) -> !;
+    fn span_unimpl(sp: span, msg: ~str) -> !;
     fn handler() -> handler;
 }
 
 iface handler {
-    fn fatal(msg: str) -> !;
-    fn err(msg: str);
+    fn fatal(msg: ~str) -> !;
+    fn err(msg: ~str);
     fn bump_err_count();
     fn has_errors() -> bool;
     fn abort_if_errors();
-    fn warn(msg: str);
-    fn note(msg: str);
-    fn bug(msg: str) -> !;
-    fn unimpl(msg: str) -> !;
-    fn emit(cmsp: option<(codemap::codemap, span)>, msg: str, lvl: level);
+    fn warn(msg: ~str);
+    fn note(msg: ~str);
+    fn bug(msg: ~str) -> !;
+    fn unimpl(msg: ~str) -> !;
+    fn emit(cmsp: option<(codemap::codemap, span)>, msg: ~str, lvl: level);
 }
 
 type handler_t = @{
@@ -47,25 +47,25 @@ type codemap_t = @{
 };
 
 impl codemap_span_handler of span_handler for codemap_t {
-    fn span_fatal(sp: span, msg: str) -> ! {
+    fn span_fatal(sp: span, msg: ~str) -> ! {
         self.handler.emit(some((self.cm, sp)), msg, fatal);
         fail;
     }
-    fn span_err(sp: span, msg: str) {
+    fn span_err(sp: span, msg: ~str) {
         self.handler.emit(some((self.cm, sp)), msg, error);
         self.handler.bump_err_count();
     }
-    fn span_warn(sp: span, msg: str) {
+    fn span_warn(sp: span, msg: ~str) {
         self.handler.emit(some((self.cm, sp)), msg, warning);
     }
-    fn span_note(sp: span, msg: str) {
+    fn span_note(sp: span, msg: ~str) {
         self.handler.emit(some((self.cm, sp)), msg, note);
     }
-    fn span_bug(sp: span, msg: str) -> ! {
+    fn span_bug(sp: span, msg: ~str) -> ! {
         self.span_fatal(sp, ice_msg(msg));
     }
-    fn span_unimpl(sp: span, msg: str) -> ! {
-        self.span_bug(sp, "unimplemented " + msg);
+    fn span_unimpl(sp: span, msg: ~str) -> ! {
+        self.span_bug(sp, ~"unimplemented " + msg);
     }
     fn handler() -> handler {
         self.handler
@@ -73,11 +73,11 @@ impl codemap_span_handler of span_handler for codemap_t {
 }
 
 impl codemap_handler of handler for handler_t {
-    fn fatal(msg: str) -> ! {
+    fn fatal(msg: ~str) -> ! {
         self.emit(none, msg, fatal);
         fail;
     }
-    fn err(msg: str) {
+    fn err(msg: ~str) {
         self.emit(none, msg, error);
         self.bump_err_count();
     }
@@ -89,28 +89,28 @@ impl codemap_handler of handler for handler_t {
         let s;
         alt self.err_count {
           0u { ret; }
-          1u { s = "aborting due to previous error"; }
+          1u { s = ~"aborting due to previous error"; }
           _  { s = #fmt["aborting due to %u previous errors",
                         self.err_count]; }
         }
         self.fatal(s);
     }
-    fn warn(msg: str) {
+    fn warn(msg: ~str) {
         self.emit(none, msg, warning);
     }
-    fn note(msg: str) {
+    fn note(msg: ~str) {
         self.emit(none, msg, note);
     }
-    fn bug(msg: str) -> ! {
+    fn bug(msg: ~str) -> ! {
         self.fatal(ice_msg(msg));
     }
-    fn unimpl(msg: str) -> ! { self.bug("unimplemented " + msg); }
-    fn emit(cmsp: option<(codemap::codemap, span)>, msg: str, lvl: level) {
+    fn unimpl(msg: ~str) -> ! { self.bug(~"unimplemented " + msg); }
+    fn emit(cmsp: option<(codemap::codemap, span)>, msg: ~str, lvl: level) {
         self.emit(cmsp, msg, lvl);
     }
 }
 
-fn ice_msg(msg: str) -> str {
+fn ice_msg(msg: ~str) -> ~str {
     #fmt["internal compiler error: %s", msg]
 }
 
@@ -124,7 +124,7 @@ fn mk_handler(emitter: option<emitter>) -> handler {
       some(e) { e }
       none {
         let f = fn@(cmsp: option<(codemap::codemap, span)>,
-            msg: str, t: level) {
+            msg: ~str, t: level) {
             emit(cmsp, msg, t);
         };
         f
@@ -144,12 +144,12 @@ enum level {
     note,
 }
 
-fn diagnosticstr(lvl: level) -> str {
+fn diagnosticstr(lvl: level) -> ~str {
     alt lvl {
-      fatal { "error" }
-      error { "error" }
-      warning { "warning" }
-      note { "note" }
+      fatal { ~"error" }
+      error { ~"error" }
+      warning { ~"warning" }
+      note { ~"note" }
     }
 }
 
@@ -162,7 +162,7 @@ fn diagnosticcolor(lvl: level) -> u8 {
     }
 }
 
-fn print_diagnostic(topic: str, lvl: level, msg: str) {
+fn print_diagnostic(topic: ~str, lvl: level, msg: ~str) {
     if str::is_not_empty(topic) {
         io::stderr().write_str(#fmt["%s ", topic]);
     }
@@ -177,7 +177,7 @@ fn print_diagnostic(topic: str, lvl: level, msg: str) {
 }
 
 fn emit(cmsp: option<(codemap::codemap, span)>,
-        msg: str, lvl: level) {
+        msg: ~str, lvl: level) {
     alt cmsp {
       some((cm, sp)) {
         let sp = codemap::adjust_span(cm,sp);
@@ -188,7 +188,7 @@ fn emit(cmsp: option<(codemap::codemap, span)>,
         print_macro_backtrace(cm, sp);
       }
       none {
-        print_diagnostic("", lvl, msg);
+        print_diagnostic(~"", lvl, msg);
       }
     }
 }
@@ -209,16 +209,16 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
     // Print the offending lines
     for display_lines.each |line| {
         io::stderr().write_str(#fmt["%s:%u ", fm.name, line + 1u]);
-        let s = codemap::get_line(fm, line as int) + "\n";
+        let s = codemap::get_line(fm, line as int) + ~"\n";
         io::stderr().write_str(s);
     }
     if elided {
         let last_line = display_lines[vec::len(display_lines) - 1u];
         let s = #fmt["%s:%u ", fm.name, last_line + 1u];
         let mut indent = str::len(s);
-        let mut out = "";
-        while indent > 0u { out += " "; indent -= 1u; }
-        out += "...\n";
+        let mut out = ~"";
+        while indent > 0u { out += ~" "; indent -= 1u; }
+        out += ~"...\n";
         io::stderr().write_str(out);
     }
 
@@ -234,34 +234,34 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
 
         // indent past |name:## | and the 0-offset column location
         let mut left = str::len(fm.name) + digits + lo.col + 3u;
-        let mut s = "";
+        let mut s = ~"";
         while left > 0u { str::push_char(s, ' '); left -= 1u; }
 
-        s += "^";
+        s += ~"^";
         let hi = codemap::lookup_char_pos(cm, sp.hi);
         if hi.col != lo.col {
             // the ^ already takes up one space
             let mut width = hi.col - lo.col - 1u;
             while width > 0u { str::push_char(s, '~'); width -= 1u; }
         }
-        io::stderr().write_str(s + "\n");
+        io::stderr().write_str(s + ~"\n");
     }
 }
 
 fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
     do option::iter (sp.expn_info) |ei| {
-        let ss = option::map_default(ei.callie.span, @""/~,
+        let ss = option::map_default(ei.callie.span, @~"",
                                      |span| @codemap::span_to_str(span, cm));
         print_diagnostic(*ss, note,
                          #fmt("in expansion of #%s", ei.callie.name));
         let ss = codemap::span_to_str(ei.call_site, cm);
-        print_diagnostic(ss, note, "expansion site");
+        print_diagnostic(ss, note, ~"expansion site");
         print_macro_backtrace(cm, ei.call_site);
     }
 }
 
 fn expect<T: copy>(diag: span_handler,
-                   opt: option<T>, msg: fn() -> str) -> T {
+                   opt: option<T>, msg: fn() -> ~str) -> T {
     alt opt {
        some(t) { t }
        none { diag.handler().bug(msg()); }
diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs
index d3286fbc80f..ac2829643be 100644
--- a/src/libsyntax/ext/auto_serialize.rs
+++ b/src/libsyntax/ext/auto_serialize.rs
@@ -84,15 +84,15 @@ mod syntax {
     export parse;
 }
 
-type ser_tps_map = map::hashmap<str, fn@(@ast::expr) -> ~[@ast::stmt]>;
-type deser_tps_map = map::hashmap<str, fn@() -> @ast::expr>;
+type ser_tps_map = map::hashmap<~str, fn@(@ast::expr) -> ~[@ast::stmt]>;
+type deser_tps_map = map::hashmap<~str, fn@() -> @ast::expr>;
 
 fn expand(cx: ext_ctxt,
           span: span,
           _mitem: ast::meta_item,
           in_items: ~[@ast::item]) -> ~[@ast::item] {
     fn not_auto_serialize(a: ast::attribute) -> bool {
-        attr::get_attr_name(a) != @"auto_serialize"/~
+        attr::get_attr_name(a) != @~"auto_serialize"
     }
 
     fn filter_attrs(item: @ast::item) -> @ast::item {
@@ -114,7 +114,7 @@ fn expand(cx: ext_ctxt,
           }
 
           _ {
-            cx.span_err(span, "#[auto_serialize] can only be \
+            cx.span_err(span, ~"#[auto_serialize] can only be \
                                applied to type and enum \
                                definitions");
             ~[in_item]
@@ -125,11 +125,11 @@ fn expand(cx: ext_ctxt,
 
 impl helpers for ext_ctxt {
     fn helper_path(base_path: @ast::path,
-                   helper_name: str) -> @ast::path {
+                   helper_name: ~str) -> @ast::path {
         let head = vec::init(base_path.idents);
         let tail = vec::last(base_path.idents);
         self.path(base_path.span,
-                  vec::append(head, ~[@(helper_name + "_" + *tail)]))
+                  vec::append(head, ~[@(helper_name + ~"_" + *tail)]))
     }
 
     fn path(span: span, strs: ~[ast::ident]) -> @ast::path {
@@ -154,7 +154,7 @@ impl helpers for ext_ctxt {
         let args = do vec::map(input_tys) |ty| {
             {mode: ast::expl(ast::by_ref),
              ty: ty,
-             ident: @""/~,
+             ident: @~"",
              id: self.next_id()}
         };
 
@@ -219,7 +219,7 @@ impl helpers for ext_ctxt {
                 ast::expr_alt(v, arms, ast::alt_exhaustive)))
     }
 
-    fn lit_str(span: span, s: @str/~) -> @ast::expr {
+    fn lit_str(span: span, s: @~str) -> @ast::expr {
         self.expr(
             span,
             ast::expr_vstore(
@@ -297,7 +297,7 @@ fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path,
         cx.expr(
             path.span,
             ast::expr_path(
-                cx.helper_path(path, "serialize")));
+                cx.helper_path(path, ~"serialize")));
 
     let ty_args = do vec::map(path.types) |ty| {
         let sv_stmts = ser_ty(cx, tps, ty, cx.clone(s), #ast{ __v });
@@ -354,7 +354,7 @@ fn is_vec_or_str(ty: @ast::ty) -> bool {
       // This may be wrong if the user has shadowed (!) str
       ast::ty_path(@{span: _, global: _, idents: ids,
                              rp: none, types: _}, _)
-      if ids == ~[@"str"/~] { true }
+      if ids == ~[@~"str"] { true }
       _ { false }
     }
 }
@@ -392,7 +392,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
       }
 
       ast::ty_ptr(_) | ast::ty_rptr(_, _) {
-        cx.span_err(ty.span, "cannot serialize pointer types");
+        cx.span_err(ty.span, ~"cannot serialize pointer types");
         ~[]
       }
 
@@ -414,7 +414,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
       }
 
       ast::ty_fn(_, _) {
-        cx.span_err(ty.span, "cannot serialize function types");
+        cx.span_err(ty.span, ~"cannot serialize function types");
         ~[]
       }
 
@@ -471,12 +471,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
       }
 
       ast::ty_mac(_) {
-        cx.span_err(ty.span, "cannot serialize macro types");
+        cx.span_err(ty.span, ~"cannot serialize macro types");
         ~[]
       }
 
       ast::ty_infer {
-        cx.span_err(ty.span, "cannot serialize inferred types");
+        cx.span_err(ty.span, ~"cannot serialize inferred types");
         ~[]
       }
 
@@ -503,7 +503,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
       }
 
       ast::ty_vstore(_, _) {
-        cx.span_unimpl(ty.span, "serialization for vstore types");
+        cx.span_unimpl(ty.span, ~"serialization for vstore types");
       }
 
     }
@@ -525,7 +525,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
              ty: cx.ty_fn(span,
                           ~[cx.ty_path(span, ~[tp.ident], ~[])],
                           cx.ty_nil(span)),
-             ident: @("__s" + *tp.ident),
+             ident: @(~"__s" + *tp.ident),
              id: cx.next_id()});
 
     #debug["tp_inputs = %?", tp_inputs];
@@ -533,12 +533,12 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
 
     let ser_inputs: ~[ast::arg] =
         vec::append(~[{mode: ast::expl(ast::by_ref),
-                      ty: cx.ty_path(span, ~[@"__S"/~], ~[]),
-                      ident: @"__s"/~,
+                      ty: cx.ty_path(span, ~[@~"__S"], ~[]),
+                      ident: @~"__s",
                       id: cx.next_id()},
                      {mode: ast::expl(ast::by_ref),
                       ty: v_ty,
-                      ident: @"__v"/~,
+                      ident: @~"__v",
                       id: cx.next_id()}],
                     tp_inputs);
 
@@ -556,12 +556,12 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
 
     let ser_bnds = @~[
         ast::bound_trait(cx.ty_path(span,
-                                    ~[@"std"/~, @"serialization"/~,
-                                     @"serializer"/~],
+                                    ~[@~"std", @~"serialization",
+                                     @~"serializer"],
                                     ~[]))];
 
     let ser_tps: ~[ast::ty_param] =
-        vec::append(~[{ident: @"__S"/~,
+        vec::append(~[{ident: @~"__S",
                       id: cx.next_id(),
                       bounds: ser_bnds}],
                     vec::map(tps, |tp| cx.clone_ty_param(tp)));
@@ -573,7 +573,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
     let ser_blk = cx.blk(span,
                          f(cx, tps_map, #ast{ __s }, #ast{ __v }));
 
-    @{ident: @("serialize_" + *name),
+    @{ident: @(~"serialize_" + *name),
       attrs: ~[],
       id: cx.next_id(),
       node: ast::item_fn({inputs: ser_inputs,
@@ -598,7 +598,7 @@ fn deser_path(cx: ext_ctxt, tps: deser_tps_map, path: @ast::path,
         cx.expr(
             path.span,
             ast::expr_path(
-                cx.helper_path(path, "deserialize")));
+                cx.helper_path(path, ~"deserialize")));
 
     let ty_args = do vec::map(path.types) |ty| {
         let dv_expr = deser_ty(cx, tps, ty, cx.clone(d));
@@ -726,7 +726,7 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
       }
 
       ast::ty_vstore(_, _) {
-        cx.span_unimpl(ty.span, "deserialization for vstore types");
+        cx.span_unimpl(ty.span, ~"deserialization for vstore types");
       }
     }
 }
@@ -746,15 +746,15 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
              ty: cx.ty_fn(span,
                           ~[],
                           cx.ty_path(span, ~[tp.ident], ~[])),
-             ident: @("__d" + *tp.ident),
+             ident: @(~"__d" + *tp.ident),
              id: cx.next_id()});
 
     #debug["tp_inputs = %?", tp_inputs];
 
     let deser_inputs: ~[ast::arg] =
         vec::append(~[{mode: ast::expl(ast::by_ref),
-                      ty: cx.ty_path(span, ~[@"__D"/~], ~[]),
-                      ident: @"__d"/~,
+                      ty: cx.ty_path(span, ~[@~"__D"], ~[]),
+                      ident: @~"__d",
                       id: cx.next_id()}],
                     tp_inputs);
 
@@ -772,11 +772,11 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
     let deser_bnds = @~[
         ast::bound_trait(cx.ty_path(
             span,
-            ~[@"std"/~, @"serialization"/~, @"deserializer"/~],
+            ~[@~"std", @~"serialization", @~"deserializer"],
             ~[]))];
 
     let deser_tps: ~[ast::ty_param] =
-        vec::append(~[{ident: @"__D"/~,
+        vec::append(~[{ident: @~"__D",
                       id: cx.next_id(),
                       bounds: deser_bnds}],
                     vec::map(tps, |tp| {
@@ -788,7 +788,7 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
 
     let deser_blk = cx.expr_blk(f(cx, tps_map, #ast(expr){__d}));
 
-    @{ident: @("deserialize_" + *name),
+    @{ident: @(~"deserialize_" + *name),
       attrs: ~[],
       id: cx.next_id(),
       node: ast::item_fn({inputs: deser_inputs,
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 7b08b18596e..b7d17a9d652 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -42,47 +42,47 @@ enum syntax_extension {
 
 // A temporary hard-coded map of methods for expanding syntax extension
 // AST nodes into full ASTs
-fn syntax_expander_table() -> hashmap<str, syntax_extension> {
+fn syntax_expander_table() -> hashmap<~str, syntax_extension> {
     fn builtin(f: syntax_expander_) -> syntax_extension
         {normal({expander: f, span: none})}
     fn builtin_item_tt(f: syntax_expander_tt_item_) -> syntax_extension {
         item_tt({expander: f, span: none})
     }
     let syntax_expanders = str_hash::<syntax_extension>();
-    syntax_expanders.insert("macro",
+    syntax_expanders.insert(~"macro",
                             macro_defining(ext::simplext::add_new_extension));
-    syntax_expanders.insert("macro_rules",
+    syntax_expanders.insert(~"macro_rules",
                             builtin_item_tt(
                                 ext::tt::macro_rules::add_new_extension));
-    syntax_expanders.insert("fmt", builtin(ext::fmt::expand_syntax_ext));
-    syntax_expanders.insert("auto_serialize",
+    syntax_expanders.insert(~"fmt", builtin(ext::fmt::expand_syntax_ext));
+    syntax_expanders.insert(~"auto_serialize",
                             item_decorator(ext::auto_serialize::expand));
-    syntax_expanders.insert("env", builtin(ext::env::expand_syntax_ext));
-    syntax_expanders.insert("concat_idents",
+    syntax_expanders.insert(~"env", builtin(ext::env::expand_syntax_ext));
+    syntax_expanders.insert(~"concat_idents",
                             builtin(ext::concat_idents::expand_syntax_ext));
-    syntax_expanders.insert("ident_to_str",
+    syntax_expanders.insert(~"ident_to_str",
                             builtin(ext::ident_to_str::expand_syntax_ext));
-    syntax_expanders.insert("log_syntax",
+    syntax_expanders.insert(~"log_syntax",
                             builtin(ext::log_syntax::expand_syntax_ext));
-    syntax_expanders.insert("ast",
+    syntax_expanders.insert(~"ast",
                             builtin(ext::qquote::expand_ast));
-    syntax_expanders.insert("line",
+    syntax_expanders.insert(~"line",
                             builtin(ext::source_util::expand_line));
-    syntax_expanders.insert("col",
+    syntax_expanders.insert(~"col",
                             builtin(ext::source_util::expand_col));
-    syntax_expanders.insert("file",
+    syntax_expanders.insert(~"file",
                             builtin(ext::source_util::expand_file));
-    syntax_expanders.insert("stringify",
+    syntax_expanders.insert(~"stringify",
                             builtin(ext::source_util::expand_stringify));
-    syntax_expanders.insert("include",
+    syntax_expanders.insert(~"include",
                             builtin(ext::source_util::expand_include));
-    syntax_expanders.insert("include_str",
+    syntax_expanders.insert(~"include_str",
                             builtin(ext::source_util::expand_include_str));
-    syntax_expanders.insert("include_bin",
+    syntax_expanders.insert(~"include_bin",
                             builtin(ext::source_util::expand_include_bin));
-    syntax_expanders.insert("mod",
+    syntax_expanders.insert(~"mod",
                             builtin(ext::source_util::expand_mod));
-    syntax_expanders.insert("proto",
+    syntax_expanders.insert(~"proto",
                             builtin_item_tt(ext::pipes::expand_proto));
     ret syntax_expanders;
 }
@@ -98,11 +98,11 @@ iface ext_ctxt {
     fn mod_path() -> ~[ast::ident];
     fn bt_push(ei: codemap::expn_info_);
     fn bt_pop();
-    fn span_fatal(sp: span, msg: str) -> !;
-    fn span_err(sp: span, msg: str);
-    fn span_unimpl(sp: span, msg: str) -> !;
-    fn span_bug(sp: span, msg: str) -> !;
-    fn bug(msg: str) -> !;
+    fn span_fatal(sp: span, msg: ~str) -> !;
+    fn span_err(sp: span, msg: ~str);
+    fn span_unimpl(sp: span, msg: ~str) -> !;
+    fn span_bug(sp: span, msg: ~str) -> !;
+    fn bug(msg: ~str) -> !;
     fn next_id() -> ast::node_id;
 }
 
@@ -137,26 +137,26 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
               some(@expanded_from({call_site: {expn_info: prev, _}, _})) {
                 self.backtrace = prev
               }
-              _ { self.bug("tried to pop without a push"); }
+              _ { self.bug(~"tried to pop without a push"); }
             }
         }
-        fn span_fatal(sp: span, msg: str) -> ! {
+        fn span_fatal(sp: span, msg: ~str) -> ! {
             self.print_backtrace();
             self.parse_sess.span_diagnostic.span_fatal(sp, msg);
         }
-        fn span_err(sp: span, msg: str) {
+        fn span_err(sp: span, msg: ~str) {
             self.print_backtrace();
             self.parse_sess.span_diagnostic.span_err(sp, msg);
         }
-        fn span_unimpl(sp: span, msg: str) -> ! {
+        fn span_unimpl(sp: span, msg: ~str) -> ! {
             self.print_backtrace();
             self.parse_sess.span_diagnostic.span_unimpl(sp, msg);
         }
-        fn span_bug(sp: span, msg: str) -> ! {
+        fn span_bug(sp: span, msg: ~str) -> ! {
             self.print_backtrace();
             self.parse_sess.span_diagnostic.span_bug(sp, msg);
         }
-        fn bug(msg: str) -> ! {
+        fn bug(msg: ~str) -> ! {
             self.print_backtrace();
             self.parse_sess.span_diagnostic.handler().bug(msg);
         }
@@ -173,7 +173,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
     ret imp as ext_ctxt
 }
 
-fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: str) -> str {
+fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ~str {
     alt expr.node {
       ast::expr_lit(l) {
         alt l.node {
@@ -185,7 +185,7 @@ fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: str) -> str {
     }
 }
 
-fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: str) -> ast::ident {
+fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ast::ident {
     alt expr.node {
       ast::expr_path(p) {
         if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
@@ -197,12 +197,12 @@ fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: str) -> ast::ident {
 }
 
 fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                       min: uint, name: str) -> ~[@ast::expr] {
+                       min: uint, name: ~str) -> ~[@ast::expr] {
     ret get_mac_args(cx, sp, arg, min, none, name);
 }
 
 fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
-                min: uint, max: option<uint>, name: str) -> ~[@ast::expr] {
+                min: uint, max: option<uint>, name: ~str) -> ~[@ast::expr] {
     alt arg {
       some(expr) {
         alt expr.node {
@@ -235,7 +235,7 @@ fn get_mac_body(cx: ext_ctxt, sp: span, args: ast::mac_body)
 {
     alt (args) {
       some(body) {body}
-      none {cx.span_fatal(sp, "missing macro body")}
+      none {cx.span_fatal(sp, ~"missing macro body")}
     }
 }
 
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index a2ed799819c..5eca1e8e17c 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -77,11 +77,11 @@ fn mk_fixed_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) ->
    @ast::expr {
     mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::vstore_fixed(none))
 }
-fn mk_base_str(cx: ext_ctxt, sp: span, s: str) -> @ast::expr {
+fn mk_base_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr {
     let lit = ast::lit_str(@s);
     ret mk_lit(cx, sp, lit);
 }
-fn mk_uniq_str(cx: ext_ctxt, sp: span, s: str) -> @ast::expr {
+fn mk_uniq_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr {
     mk_vstore_e(cx, sp, mk_base_str(cx, sp, s), ast::vstore_uniq)
 }
 
diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs
index f5d13df75d6..b3545cc635d 100644
--- a/src/libsyntax/ext/concat_idents.rs
+++ b/src/libsyntax/ext/concat_idents.rs
@@ -2,10 +2,10 @@ import base::*;
 
 fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
                      _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args_no_max(cx,sp,arg,1u,"concat_idents");
-    let mut res = "";
+    let args = get_mac_args_no_max(cx,sp,arg,1u,~"concat_idents");
+    let mut res = ~"";
     for args.each |e| {
-        res += *expr_to_ident(cx, e, "expected an ident");
+        res += *expr_to_ident(cx, e, ~"expected an ident");
     }
 
     ret @{id: cx.next_id(),
diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs
index 26f5bbc7715..e03fc2ce47b 100644
--- a/src/libsyntax/ext/env.rs
+++ b/src/libsyntax/ext/env.rs
@@ -10,14 +10,14 @@ export expand_syntax_ext;
 
 fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
                      _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), "env");
+    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), ~"env");
 
     // FIXME (#2248): if this was more thorough it would manufacture an
     // option<str> rather than just an maybe-empty string.
 
-    let var = expr_to_str(cx, args[0], "#env requires a string");
+    let var = expr_to_str(cx, args[0], ~"#env requires a string");
     alt os::getenv(var) {
-      option::none { ret mk_uniq_str(cx, sp, ""); }
+      option::none { ret mk_uniq_str(cx, sp, ~""); }
       option::some(s) { ret mk_uniq_str(cx, sp, s); }
     }
 }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 05c7e6f1c5a..2a41afc9743 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -10,7 +10,7 @@ import parse::{parser, parse_expr_from_source_str, new_parser_from_tt};
 
 import codemap::{span, expanded_from};
 
-fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
+fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
                e: expr_, s: span, fld: ast_fold,
                orig: fn@(expr_, span, ast_fold) -> (expr_, span))
     -> (expr_, span)
@@ -54,7 +54,7 @@ fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
                   }
                   some(item_tt(*)) {
                     cx.span_fatal(pth.span,
-                                  "cannot use item macros in this context");
+                                  ~"cannot use item macros in this context");
                   }
                 }
               }
@@ -91,14 +91,14 @@ fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
 
                 }
               }
-              _ { cx.span_bug(mac.span, "naked syntactic bit") }
+              _ { cx.span_bug(mac.span, ~"naked syntactic bit") }
             }
           }
           _ { orig(e, s, fld) }
         };
 }
 
-fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
+fn expand_mod_items(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
                     module: ast::_mod, fld: ast_fold,
                     orig: fn@(ast::_mod, ast_fold) -> ast::_mod)
     -> ast::_mod
@@ -133,7 +133,7 @@ fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
 }
 
 /* record module we enter for `#mod` */
-fn expand_item(exts: hashmap<str, syntax_extension>,
+fn expand_item(exts: hashmap<~str, syntax_extension>,
                cx: ext_ctxt, &&it: @ast::item, fld: ast_fold,
                orig: fn@(&&@ast::item, ast_fold) -> option<@ast::item>)
     -> option<@ast::item>
@@ -160,7 +160,7 @@ fn expand_item(exts: hashmap<str, syntax_extension>,
     }
 }
 
-fn expand_item_mac(exts: hashmap<str, syntax_extension>,
+fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
                    cx: ext_ctxt, &&it: @ast::item,
                    fld: ast_fold) -> option<@ast::item> {
     alt it.node {
@@ -179,7 +179,7 @@ fn expand_item_mac(exts: hashmap<str, syntax_extension>,
             let maybe_it = alt expanded {
               mr_item(it) { fld.fold_item(it) }
               mr_expr(e) { cx.span_fatal(pth.span,
-                                         "expr macro in item position: " +
+                                         ~"expr macro in item position: " +
                                          *extname) }
               mr_def(mdef) {
                 exts.insert(*mdef.ident, mdef.ext);
@@ -194,7 +194,7 @@ fn expand_item_mac(exts: hashmap<str, syntax_extension>,
         }
       }
       _ {
-        cx.span_bug(it.span, "invalid item macro invocation");
+        cx.span_bug(it.span, ~"invalid item macro invocation");
       }
     }
 }
@@ -209,9 +209,9 @@ fn new_span(cx: ext_ctxt, sp: span) -> span {
 // is substantially more mature, these should move from here, into a
 // compiled part of libcore at very least.
 
-fn core_macros() -> str {
+fn core_macros() -> ~str {
     ret
-"{
+~"{
     #macro([#error[f, ...], log(core::error, #fmt[f, ...])]);
     #macro([#warn[f, ...], log(core::warn, #fmt[f, ...])]);
     #macro([#info[f, ...], log(core::info, #fmt[f, ...])]);
@@ -231,7 +231,7 @@ fn expand_crate(parse_sess: parse::parse_sess,
           new_span: |a|new_span(cx, a)
           with *afp};
     let f = make_fold(f_pre);
-    let cm = parse_expr_from_source_str("<core-macros>",
+    let cm = parse_expr_from_source_str(~"<core-macros>",
                                         @core_macros(),
                                         cfg,
                                         parse_sess);
diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs
index 16357ec7e26..53b5db8b6a6 100644
--- a/src/libsyntax/ext/fmt.rs
+++ b/src/libsyntax/ext/fmt.rs
@@ -13,17 +13,17 @@ export expand_syntax_ext;
 
 fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                      _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args_no_max(cx, sp, arg, 1u, "fmt");
+    let args = get_mac_args_no_max(cx, sp, arg, 1u, ~"fmt");
     let fmt =
         expr_to_str(cx, args[0],
-                    "first argument to #fmt must be a string literal.");
+                    ~"first argument to #fmt must be a string literal.");
     let fmtspan = args[0].span;
     #debug("Format string:");
     log(debug, fmt);
-    fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: str) -> ! {
+    fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: ~str) -> ! {
         cx.span_fatal(sp, msg);
     }
-    let parse_fmt_err = fn@(s: str) -> ! {
+    let parse_fmt_err = fn@(s: ~str) -> ! {
         parse_fmt_err_(cx, fmtspan, s)
     };
     let pieces = parse_fmt_string(fmt, parse_fmt_err);
@@ -38,7 +38,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
                   pieces: ~[piece], args: ~[@ast::expr])
    -> @ast::expr {
     fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> ~[ast::ident] {
-        ret ~[@"extfmt"/~, @"rt"/~, ident];
+        ret ~[@~"extfmt", @~"rt", ident];
     }
     fn make_rt_path_expr(cx: ext_ctxt, sp: span,
                          ident: ast::ident) -> @ast::expr {
@@ -50,14 +50,14 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
 
     fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
         fn make_flags(cx: ext_ctxt, sp: span, flags: ~[flag]) -> @ast::expr {
-            let mut tmp_expr = make_rt_path_expr(cx, sp, @"flag_none"/~);
+            let mut tmp_expr = make_rt_path_expr(cx, sp, @~"flag_none");
             for flags.each |f| {
                 let fstr = alt f {
-                  flag_left_justify { "flag_left_justify" }
-                  flag_left_zero_pad { "flag_left_zero_pad" }
-                  flag_space_for_sign { "flag_space_for_sign" }
-                  flag_sign_always { "flag_sign_always" }
-                  flag_alternate { "flag_alternate" }
+                  flag_left_justify { ~"flag_left_justify" }
+                  flag_left_zero_pad { ~"flag_left_zero_pad" }
+                  flag_space_for_sign { ~"flag_space_for_sign" }
+                  flag_sign_always { ~"flag_sign_always" }
+                  flag_alternate { ~"flag_alternate" }
                 };
                 tmp_expr = mk_binary(cx, sp, ast::bitor, tmp_expr,
                                      make_rt_path_expr(cx, sp, @fstr));
@@ -67,15 +67,15 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
         fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
             alt cnt {
               count_implied {
-                ret make_rt_path_expr(cx, sp, @"count_implied"/~);
+                ret make_rt_path_expr(cx, sp, @~"count_implied");
               }
               count_is(c) {
                 let count_lit = mk_int(cx, sp, c);
-                let count_is_path = make_path_vec(cx, @"count_is"/~);
+                let count_is_path = make_path_vec(cx, @~"count_is");
                 let count_is_args = ~[count_lit];
                 ret mk_call(cx, sp, count_is_path, count_is_args);
               }
-              _ { cx.span_unimpl(sp, "unimplemented #fmt conversion"); }
+              _ { cx.span_unimpl(sp, ~"unimplemented #fmt conversion"); }
             }
         }
         fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr {
@@ -83,13 +83,13 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
             alt t {
               ty_hex(c) {
                 alt c {
-                  case_upper { rt_type = "ty_hex_upper"; }
-                  case_lower { rt_type = "ty_hex_lower"; }
+                  case_upper { rt_type = ~"ty_hex_upper"; }
+                  case_lower { rt_type = ~"ty_hex_lower"; }
                 }
               }
-              ty_bits { rt_type = "ty_bits"; }
-              ty_octal { rt_type = "ty_octal"; }
-              _ { rt_type = "ty_default"; }
+              ty_bits { rt_type = ~"ty_bits"; }
+              ty_octal { rt_type = ~"ty_octal"; }
+              _ { rt_type = ~"ty_default"; }
             }
             ret make_rt_path_expr(cx, sp, @rt_type);
         }
@@ -97,10 +97,10 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
                          width_expr: @ast::expr, precision_expr: @ast::expr,
                          ty_expr: @ast::expr) -> @ast::expr {
             ret mk_rec_e(cx, sp,
-                         ~[{ident: @"flags"/~, ex: flags_expr},
-                          {ident: @"width"/~, ex: width_expr},
-                          {ident: @"precision"/~, ex: precision_expr},
-                          {ident: @"ty"/~, ex: ty_expr}]);
+                         ~[{ident: @~"flags", ex: flags_expr},
+                          {ident: @~"width", ex: width_expr},
+                          {ident: @~"precision", ex: precision_expr},
+                          {ident: @~"ty", ex: ty_expr}]);
         }
         let rt_conv_flags = make_flags(cx, sp, cnv.flags);
         let rt_conv_width = make_count(cx, sp, cnv.width);
@@ -109,9 +109,9 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
         ret make_conv_rec(cx, sp, rt_conv_flags, rt_conv_width,
                           rt_conv_precision, rt_conv_ty);
     }
-    fn make_conv_call(cx: ext_ctxt, sp: span, conv_type: str, cnv: conv,
+    fn make_conv_call(cx: ext_ctxt, sp: span, conv_type: ~str, cnv: conv,
                       arg: @ast::expr) -> @ast::expr {
-        let fname = "conv_" + conv_type;
+        let fname = ~"conv_" + conv_type;
         let path = make_path_vec(cx, @fname);
         let cnv_expr = make_rt_conv_expr(cx, sp, cnv);
         let args = ~[cnv_expr, arg];
@@ -131,7 +131,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
               _ { ret false; }
             }
         }
-        let unsupported = "conversion not supported in #fmt string";
+        let unsupported = ~"conversion not supported in #fmt string";
         alt cnv.param {
           option::none { }
           _ { cx.span_unimpl(sp, unsupported); }
@@ -142,15 +142,15 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
               flag_sign_always {
                 if !is_signed_type(cnv) {
                     cx.span_fatal(sp,
-                                  "+ flag only valid in " +
-                                      "signed #fmt conversion");
+                                  ~"+ flag only valid in " +
+                                      ~"signed #fmt conversion");
                 }
               }
               flag_space_for_sign {
                 if !is_signed_type(cnv) {
                     cx.span_fatal(sp,
-                                  "space flag only valid in " +
-                                      "signed #fmt conversions");
+                                  ~"space flag only valid in " +
+                                      ~"signed #fmt conversions");
                 }
               }
               flag_left_zero_pad { }
@@ -168,27 +168,27 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
           _ { cx.span_unimpl(sp, unsupported); }
         }
         alt cnv.ty {
-          ty_str { ret make_conv_call(cx, arg.span, "str", cnv, arg); }
+          ty_str { ret make_conv_call(cx, arg.span, ~"str", cnv, arg); }
           ty_int(sign) {
             alt sign {
-              signed { ret make_conv_call(cx, arg.span, "int", cnv, arg); }
+              signed { ret make_conv_call(cx, arg.span, ~"int", cnv, arg); }
               unsigned {
-                ret make_conv_call(cx, arg.span, "uint", cnv, arg);
+                ret make_conv_call(cx, arg.span, ~"uint", cnv, arg);
               }
             }
           }
-          ty_bool { ret make_conv_call(cx, arg.span, "bool", cnv, arg); }
-          ty_char { ret make_conv_call(cx, arg.span, "char", cnv, arg); }
-          ty_hex(_) { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_bits { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_octal { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_float { ret make_conv_call(cx, arg.span, "float", cnv, arg); }
-          ty_poly { ret make_conv_call(cx, arg.span, "poly", cnv, arg); }
+          ty_bool { ret make_conv_call(cx, arg.span, ~"bool", cnv, arg); }
+          ty_char { ret make_conv_call(cx, arg.span, ~"char", cnv, arg); }
+          ty_hex(_) { ret make_conv_call(cx, arg.span, ~"uint", cnv, arg); }
+          ty_bits { ret make_conv_call(cx, arg.span, ~"uint", cnv, arg); }
+          ty_octal { ret make_conv_call(cx, arg.span, ~"uint", cnv, arg); }
+          ty_float { ret make_conv_call(cx, arg.span, ~"float", cnv, arg); }
+          ty_poly { ret make_conv_call(cx, arg.span, ~"poly", cnv, arg); }
         }
     }
     fn log_conv(c: conv) {
         alt c.param {
-          some(p) { log(debug, "param: " + int::to_str(p, 10u)); }
+          some(p) { log(debug, ~"param: " + int::to_str(p, 10u)); }
           _ { #debug("param: none"); }
         }
         for c.flags.each |f| {
@@ -202,20 +202,20 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
         }
         alt c.width {
           count_is(i) { log(debug,
-                                 "width: count is " + int::to_str(i, 10u)); }
+                                 ~"width: count is " + int::to_str(i, 10u)); }
           count_is_param(i) {
             log(debug,
-                     "width: count is param " + int::to_str(i, 10u));
+                     ~"width: count is param " + int::to_str(i, 10u));
           }
           count_is_next_param { #debug("width: count is next param"); }
           count_implied { #debug("width: count is implied"); }
         }
         alt c.precision {
           count_is(i) { log(debug,
-                                 "prec: count is " + int::to_str(i, 10u)); }
+                                 ~"prec: count is " + int::to_str(i, 10u)); }
           count_is_param(i) {
             log(debug,
-                     "prec: count is param " + int::to_str(i, 10u));
+                     ~"prec: count is param " + int::to_str(i, 10u));
           }
           count_is_next_param { #debug("prec: count is next param"); }
           count_implied { #debug("prec: count is implied"); }
@@ -255,8 +255,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
             n += 1u;
             if n >= nargs {
                 cx.span_fatal(sp,
-                              "not enough arguments to #fmt " +
-                                  "for the given format string");
+                              ~"not enough arguments to #fmt " +
+                                  ~"for the given format string");
             }
             #debug("Building conversion:");
             log_conv(conv);
@@ -275,7 +275,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
     }
 
     let arg_vec = mk_fixed_vec_e(cx, fmt_sp, piece_exprs);
-    ret mk_call(cx, fmt_sp, ~[@"str"/~, @"concat"/~], ~[arg_vec]);
+    ret mk_call(cx, fmt_sp, ~[@~"str", @~"concat"], ~[arg_vec]);
 }
 //
 // Local Variables:
diff --git a/src/libsyntax/ext/ident_to_str.rs b/src/libsyntax/ext/ident_to_str.rs
index 233089b9874..54f97912f3d 100644
--- a/src/libsyntax/ext/ident_to_str.rs
+++ b/src/libsyntax/ext/ident_to_str.rs
@@ -4,8 +4,8 @@ import option;
 
 fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
                      _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),"ident_to_str");
+    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),~"ident_to_str");
 
     ret mk_uniq_str(cx, sp, *expr_to_ident(cx, args[0u],
-                                           "expected an ident"));
+                                           ~"expected an ident"));
 }
diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs
index 06941fc5d38..70d83b164c8 100644
--- a/src/libsyntax/ext/log_syntax.rs
+++ b/src/libsyntax/ext/log_syntax.rs
@@ -3,11 +3,11 @@ import io::writer_util;
 
 fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
                      _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args_no_max(cx,sp,arg,0u,"log_syntax");
+    let args = get_mac_args_no_max(cx,sp,arg,0u,~"log_syntax");
     cx.print_backtrace();
     io::stdout().write_line(
         str::connect(vec::map(args,
-                              |&&ex| print::pprust::expr_to_str(ex)), ", ")
+                              |&&ex| print::pprust::expr_to_str(ex)), ~", ")
     );
 
     //trivial expression
diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs
index fda557f15b6..cb0abe9e8e5 100644
--- a/src/libsyntax/ext/pipes/ast_builder.rs
+++ b/src/libsyntax/ext/pipes/ast_builder.rs
@@ -7,7 +7,7 @@ import ast::{ident, node_id};
 import codemap::span;
 import ext::base::mk_ctxt;
 
-fn ident(s: str) -> ast::ident {
+fn ident(s: ~str) -> ast::ident {
     @(copy s)
 }
 
diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs
index 4a3c9e4afec..cf29c409e6b 100644
--- a/src/libsyntax/ext/pipes/parse_proto.rs
+++ b/src/libsyntax/ext/pipes/parse_proto.rs
@@ -28,8 +28,8 @@ impl proto_parser for parser {
         };
         self.bump();
         let dir = alt dir {
-          @"send"/~ { send }
-          @"recv"/~ { recv }
+          @~"send" { send }
+          @~"recv" { recv }
           _ { fail }
         };
 
diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs
index e55900968e8..9406458d1ac 100644
--- a/src/libsyntax/ext/pipes/pipec.rs
+++ b/src/libsyntax/ext/pipes/pipec.rs
@@ -23,10 +23,10 @@ enum direction {
 }
 
 impl of to_str for direction {
-    fn to_str() -> str {
+    fn to_str() -> ~str {
         alt self {
-          send { "send" }
-          recv { "recv" }
+          send { ~"send" }
+          recv { ~"recv" }
         }
     }
 }
@@ -68,35 +68,35 @@ impl methods for message {
           message(id, tys, this, next, next_tys) {
             let next = this.proto.get_state(next);
             assert next_tys.len() == next.ty_params.len();
-            let arg_names = tys.mapi(|i, _ty| @("x_" + i.to_str()));
+            let arg_names = tys.mapi(|i, _ty| @(~"x_" + i.to_str()));
 
             let args_ast = (arg_names, tys).map(
                 |n, t| cx.arg_mode(n, t, ast::by_copy)
             );
 
             let args_ast = vec::append(
-                ~[cx.arg_mode(@"pipe"/~,
+                ~[cx.arg_mode(@~"pipe",
                               cx.ty_path(path(this.data_name())
                                         .add_tys(cx.ty_vars(this.ty_params))),
                               ast::by_copy)],
                 args_ast);
 
             let pat = alt (this.dir, next.dir) {
-              (send, send) { "(c, s)" }
-              (send, recv) { "(s, c)" }
-              (recv, send) { "(s, c)" }
-              (recv, recv) { "(c, s)" }
+              (send, send) { ~"(c, s)" }
+              (send, recv) { ~"(s, c)" }
+              (recv, send) { ~"(s, c)" }
+              (recv, recv) { ~"(c, s)" }
             };
 
             let mut body = #fmt("{ let %s = pipes::entangle();\n", pat);
             body += #fmt("let message = %s::%s(%s);\n",
                          *this.proto.name,
                          *self.name(),
-                         str::connect(vec::append_one(arg_names, @"s"/~)
+                         str::connect(vec::append_one(arg_names, @~"s")
                                       .map(|x| *x),
-                                      ", "));
+                                      ~", "));
             body += #fmt("pipes::send(pipe, message);\n");
-            body += "c }";
+            body += ~"c }";
 
             let body = cx.parse_expr(body);
 
@@ -127,7 +127,7 @@ impl methods for state {
         self.messages.push(message(name, data, self, next, next_tys));
     }
 
-    fn filename() -> str {
+    fn filename() -> ~str {
         (*self).proto.filename()
     }
 
@@ -158,8 +158,8 @@ impl methods for state {
             let next_name = next.data_name();
 
             let dir = alt this.dir {
-              send { @"server"/~ }
-              recv { @"client"/~ }
+              send { @~"server" }
+              recv { @~"client" }
             };
 
             let v = cx.variant(name,
@@ -190,7 +190,7 @@ impl methods for state {
                   cx.item_ty_poly(
                       self.data_name(),
                       cx.ty_path(
-                          (@"pipes"/~ + @(dir.to_str() + "_packet"/~))
+                          (@~"pipes" + @(dir.to_str() + ~"_packet"))
                           .add_ty(cx.ty_path(
                               (self.proto.name + self.data_name())
                               .add_tys(cx.ty_vars(self.ty_params))))),
@@ -236,17 +236,17 @@ impl methods for protocol {
         state
     }
 
-    fn filename() -> str {
-        "proto://" + *self.name
+    fn filename() -> ~str {
+        ~"proto://" + *self.name
     }
 
     fn gen_init(cx: ext_ctxt) -> @ast::item {
         let start_state = self.states[0];
 
         let body = alt start_state.dir {
-          send { cx.parse_expr("pipes::entangle()") }
+          send { cx.parse_expr(~"pipes::entangle()") }
           recv {
-            cx.parse_expr("{ \
+            cx.parse_expr(~"{ \
                            let (s, c) = pipes::entangle(); \
                            (c, s) \
                            }")
@@ -281,10 +281,10 @@ impl methods for protocol {
         }
 
         vec::push(items,
-                  cx.item_mod(@"client"/~,
+                  cx.item_mod(@~"client",
                               client_states));
         vec::push(items,
-                  cx.item_mod(@"server"/~,
+                  cx.item_mod(@~"server",
                               server_states));
 
         cx.item_mod(self.name, items)
@@ -293,49 +293,49 @@ impl methods for protocol {
 
 iface to_source {
     // Takes a thing and generates a string containing rust code for it.
-    fn to_source() -> str;
+    fn to_source() -> ~str;
 }
 
 impl of to_source for @ast::item {
-    fn to_source() -> str {
+    fn to_source() -> ~str {
         item_to_str(self)
     }
 }
 
 impl of to_source for ~[@ast::item] {
-    fn to_source() -> str {
-        str::connect(self.map(|i| i.to_source()), "\n\n")
+    fn to_source() -> ~str {
+        str::connect(self.map(|i| i.to_source()), ~"\n\n")
     }
 }
 
 impl of to_source for @ast::ty {
-    fn to_source() -> str {
+    fn to_source() -> ~str {
         ty_to_str(self)
     }
 }
 
 impl of to_source for ~[@ast::ty] {
-    fn to_source() -> str {
-        str::connect(self.map(|i| i.to_source()), ", ")
+    fn to_source() -> ~str {
+        str::connect(self.map(|i| i.to_source()), ~", ")
     }
 }
 
 impl of to_source for ~[ast::ty_param] {
-    fn to_source() -> str {
+    fn to_source() -> ~str {
         pprust::typarams_to_str(self)
     }
 }
 
 impl of to_source for @ast::expr {
-    fn to_source() -> str {
+    fn to_source() -> ~str {
         pprust::expr_to_str(self)
     }
 }
 
 impl parse_utils for ext_ctxt {
-    fn parse_item(s: str) -> @ast::item {
+    fn parse_item(s: ~str) -> @ast::item {
         let res = parse::parse_item_from_source_str(
-            "***protocol expansion***",
+            ~"***protocol expansion***",
             @(copy s),
             self.cfg(),
             ~[],
@@ -350,9 +350,9 @@ impl parse_utils for ext_ctxt {
         }
     }
 
-    fn parse_expr(s: str) -> @ast::expr {
+    fn parse_expr(s: ~str) -> @ast::expr {
         parse::parse_expr_from_source_str(
-            "***protocol expansion***",
+            ~"***protocol expansion***",
             @(copy s),
             self.cfg(),
             self.parse_sess())
diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs
index 77e67f6da20..4fd896e65bb 100644
--- a/src/libsyntax/ext/qquote.rs
+++ b/src/libsyntax/ext/qquote.rs
@@ -16,7 +16,7 @@ import codemap::span;
 type aq_ctxt = @{lo: uint,
                  gather: dvec<{lo: uint, hi: uint,
                                e: @ast::expr,
-                               constr: str}>};
+                               constr: ~str}>};
 enum fragment {
     from_expr(@ast::expr),
     from_ty(@ast::ty)
@@ -27,7 +27,7 @@ iface qq_helper {
     fn visit(aq_ctxt, vt<aq_ctxt>);
     fn extract_mac() -> option<ast::mac_>;
     fn mk_parse_fn(ext_ctxt,span) -> @ast::expr;
-    fn get_fold_fn() -> str;
+    fn get_fold_fn() -> ~str;
 }
 
 impl of qq_helper for @ast::crate {
@@ -36,9 +36,9 @@ impl of qq_helper for @ast::crate {
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
         mk_path(cx, sp,
-                ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_crate"/~])
+                ~[@~"syntax", @~"ext", @~"qquote", @~"parse_crate"])
     }
-    fn get_fold_fn() -> str {"fold_crate"}
+    fn get_fold_fn() -> ~str {~"fold_crate"}
 }
 impl of qq_helper for @ast::expr {
     fn span() -> span {self.span}
@@ -51,9 +51,9 @@ impl of qq_helper for @ast::expr {
     }
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
         mk_path(cx, sp,
-                ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_expr"/~])
+                ~[@~"syntax", @~"ext", @~"qquote", @~"parse_expr"])
     }
-    fn get_fold_fn() -> str {"fold_expr"}
+    fn get_fold_fn() -> ~str {~"fold_expr"}
 }
 impl of qq_helper for @ast::ty {
     fn span() -> span {self.span}
@@ -66,9 +66,9 @@ impl of qq_helper for @ast::ty {
     }
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
         mk_path(cx, sp,
-                ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_ty"/~])
+                ~[@~"syntax", @~"ext", @~"qquote", @~"parse_ty"])
     }
-    fn get_fold_fn() -> str {"fold_ty"}
+    fn get_fold_fn() -> ~str {~"fold_ty"}
 }
 impl of qq_helper for @ast::item {
     fn span() -> span {self.span}
@@ -76,9 +76,9 @@ impl of qq_helper for @ast::item {
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
         mk_path(cx, sp,
-                ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_item"/~])
+                ~[@~"syntax", @~"ext", @~"qquote", @~"parse_item"])
     }
-    fn get_fold_fn() -> str {"fold_item"}
+    fn get_fold_fn() -> ~str {~"fold_item"}
 }
 impl of qq_helper for @ast::stmt {
     fn span() -> span {self.span}
@@ -86,24 +86,24 @@ impl of qq_helper for @ast::stmt {
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
         mk_path(cx, sp,
-                ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_stmt"/~])
+                ~[@~"syntax", @~"ext", @~"qquote", @~"parse_stmt"])
     }
-    fn get_fold_fn() -> str {"fold_stmt"}
+    fn get_fold_fn() -> ~str {~"fold_stmt"}
 }
 impl of qq_helper for @ast::pat {
     fn span() -> span {self.span}
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
     fn extract_mac() -> option<ast::mac_> {fail}
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
-        mk_path(cx, sp, ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"parse_pat"/~])
+        mk_path(cx, sp, ~[@~"syntax", @~"ext", @~"qquote", @~"parse_pat"])
     }
-    fn get_fold_fn() -> str {"fold_pat"}
+    fn get_fold_fn() -> ~str {~"fold_pat"}
 }
 
 fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt
 {
-    let v = @{visit_expr: |node, &&cx, v| visit_aq(node, "from_expr", cx, v),
-              visit_ty: |node, &&cx, v| visit_aq(node, "from_ty", cx, v)
+    let v = @{visit_expr: |node, &&cx, v| visit_aq(node, ~"from_expr", cx, v),
+              visit_ty: |node, &&cx, v| visit_aq(node, ~"from_ty", cx, v)
               with *default_visitor()};
     let cx = @{lo:lo, gather: dvec()};
     node.visit(cx, mk_vt(v));
@@ -115,7 +115,7 @@ fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt
     ret cx;
 }
 
-fn visit_aq<T:qq_helper>(node: T, constr: str, &&cx: aq_ctxt, v: vt<aq_ctxt>)
+fn visit_aq<T:qq_helper>(node: T, constr: ~str, &&cx: aq_ctxt, v: vt<aq_ctxt>)
 {
     alt (node.extract_mac()) {
       some(mac_aq(sp, e)) {
@@ -134,35 +134,35 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
               arg: ast::mac_arg, body: ast::mac_body)
     -> @ast::expr
 {
-    let mut what = "expr";
+    let mut what = ~"expr";
     do option::iter(arg) |arg| {
         let args: ~[@ast::expr] =
             alt arg.node {
               ast::expr_vec(elts, _) { elts }
               _ {
                 ecx.span_fatal
-                    (_sp, "#ast requires arguments of the form `~[...]`.")
+                    (_sp, ~"#ast requires arguments of the form `~[...]`.")
               }
             };
         if vec::len::<@ast::expr>(args) != 1u {
-            ecx.span_fatal(_sp, "#ast requires exactly one arg");
+            ecx.span_fatal(_sp, ~"#ast requires exactly one arg");
         }
         alt (args[0].node) {
           ast::expr_path(@{idents: id, _}) if vec::len(id) == 1u
               {what = *id[0]}
-          _ {ecx.span_fatal(args[0].span, "expected an identifier");}
+          _ {ecx.span_fatal(args[0].span, ~"expected an identifier");}
         }
     }
     let body = get_mac_body(ecx,_sp,body);
 
     ret alt what {
-      "crate" {finish(ecx, body, parse_crate)}
-      "expr" {finish(ecx, body, parse_expr)}
-      "ty" {finish(ecx, body, parse_ty)}
-      "item" {finish(ecx, body, parse_item)}
-      "stmt" {finish(ecx, body, parse_stmt)}
-      "pat" {finish(ecx, body, parse_pat)}
-      _ {ecx.span_fatal(_sp, "unsupported ast type")}
+      ~"crate" {finish(ecx, body, parse_crate)}
+      ~"expr" {finish(ecx, body, parse_expr)}
+      ~"ty" {finish(ecx, body, parse_ty)}
+      ~"item" {finish(ecx, body, parse_item)}
+      ~"stmt" {finish(ecx, body, parse_stmt)}
+      ~"pat" {finish(ecx, body, parse_pat)}
+      _ {ecx.span_fatal(_sp, ~"unsupported ast type")}
     };
 }
 
@@ -175,7 +175,7 @@ fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() }
 fn parse_item(p: parser) -> @ast::item {
     alt p.parse_item(~[], ast::public) {
       some(item) { item }
-      none       { fail "parse_item: parsing an item failed"; }
+      none       { fail ~"parse_item: parsing an item failed"; }
     }
 }
 
@@ -203,7 +203,7 @@ fn finish<T: qq_helper>
         // ^^ check that the spans are non-overlapping
     }
 
-    let mut str2 = "";
+    let mut str2 = ~"";
     enum state {active, skip(uint), blank};
     let mut state = active;
     let mut i = 0u, j = 0u;
@@ -233,19 +233,19 @@ fn finish<T: qq_helper>
     let cx = ecx;
 
     let cfg_call = || mk_call_(
-        cx, sp, mk_access(cx, sp, ~[@"ext_cx"/~], @"cfg"/~), ~[]);
+        cx, sp, mk_access(cx, sp, ~[@~"ext_cx"], @~"cfg"), ~[]);
 
     let parse_sess_call = || mk_call_(
-        cx, sp, mk_access(cx, sp, ~[@"ext_cx"/~], @"parse_sess"/~), ~[]);
+        cx, sp, mk_access(cx, sp, ~[@~"ext_cx"], @~"parse_sess"), ~[]);
 
     let pcall = mk_call(cx,sp,
-                       ~[@"syntax"/~, @"parse"/~, @"parser"/~,
-                        @"parse_from_source_str"/~],
+                       ~[@~"syntax", @~"parse", @~"parser",
+                        @~"parse_from_source_str"],
                        ~[node.mk_parse_fn(cx,sp),
                         mk_uniq_str(cx,sp, fname),
                         mk_call(cx,sp,
-                                ~[@"syntax"/~,@"ext"/~,
-                                 @"qquote"/~, @"mk_file_substr"/~],
+                                ~[@~"syntax",@~"ext",
+                                 @~"qquote", @~"mk_file_substr"],
                                 ~[mk_uniq_str(cx,sp, loc.file.name),
                                  mk_uint(cx,sp, loc.line),
                                  mk_uint(cx,sp, loc.col)]),
@@ -257,15 +257,15 @@ fn finish<T: qq_helper>
     let mut rcall = pcall;
     if (g_len > 0u) {
         rcall = mk_call(cx,sp,
-                        ~[@"syntax"/~, @"ext"/~, @"qquote"/~, @"replace"/~],
+                        ~[@~"syntax", @~"ext", @~"qquote", @~"replace"],
                         ~[pcall,
                           mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec(|g| {
                              mk_call(cx,sp,
-                                     ~[@"syntax"/~, @"ext"/~,
-                                      @"qquote"/~, @g.constr],
+                                     ~[@~"syntax", @~"ext",
+                                      @~"qquote", @g.constr],
                                      ~[g.e])})),
                          mk_path(cx,sp,
-                                 ~[@"syntax"/~, @"ext"/~, @"qquote"/~,
+                                 ~[@~"syntax", @~"ext", @~"qquote",
                                   @node.get_fold_fn()])]);
     }
     ret rcall;
@@ -326,10 +326,11 @@ fn print_expr(expr: @ast::expr) {
     let pp = pprust::rust_printer(stdout);
     pprust::print_expr(pp, expr);
     pp::eof(pp.s);
-    stdout.write_str("\n");
+    stdout.write_str(~"\n");
 }
 
-fn mk_file_substr(fname: str, line: uint, col: uint) -> codemap::file_substr {
+fn mk_file_substr(fname: ~str, line: uint, col: uint) ->
+    codemap::file_substr {
     codemap::fss_external({filename: fname, line: line, col: col})
 }
 
diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs
index 6ccbabd748e..333c510b7cd 100644
--- a/src/libsyntax/ext/simplext.rs
+++ b/src/libsyntax/ext/simplext.rs
@@ -35,29 +35,29 @@ enum matchable {
 }
 
 /* for when given an incompatible bit of AST */
-fn match_error(cx: ext_ctxt, m: matchable, expected: str) -> ! {
+fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! {
     alt m {
       match_expr(x) {
         cx.span_fatal(x.span,
-                      "this argument is an expr, expected " + expected);
+                      ~"this argument is an expr, expected " + expected);
       }
       match_path(x) {
         cx.span_fatal(x.span,
-                      "this argument is a path, expected " + expected);
+                      ~"this argument is a path, expected " + expected);
       }
       match_ident(x) {
         cx.span_fatal(x.span,
-                      "this argument is an ident, expected " + expected);
+                      ~"this argument is an ident, expected " + expected);
       }
       match_ty(x) {
         cx.span_fatal(x.span,
-                      "this argument is a type, expected " + expected);
+                      ~"this argument is a type, expected " + expected);
       }
       match_block(x) {
         cx.span_fatal(x.span,
-                      "this argument is a block, expected " + expected);
+                      ~"this argument is a block, expected " + expected);
       }
-      match_exact { cx.bug("what is a match_exact doing in a bindings?"); }
+      match_exact { cx.bug(~"what is a match_exact doing in a bindings?"); }
     }
 }
 
@@ -80,7 +80,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) ->
             alt m.node {
               ast::mac_ellipsis {
                 if res != none {
-                    cx.span_fatal(m.span, "only one ellipsis allowed");
+                    cx.span_fatal(m.span, ~"only one ellipsis allowed");
                 }
                 res =
                     some({pre: vec::slice(elts, 0u, idx - 1u),
@@ -234,8 +234,8 @@ fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
         ret alt follow(m, idx_path) {
               seq(_, sp) {
                 cx.span_fatal(sp,
-                              "syntax matched under ... but not " +
-                                  "used that way.")
+                              ~"syntax matched under ... but not " +
+                                  ~"used that way.")
               }
               leaf(m) { ret some(m) }
             }
@@ -302,8 +302,8 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
             alt repeat {
               none {
                 cx.span_fatal(repeat_me.span,
-                              "'...' surrounds an expression without any" +
-                                  " repeating syntax variables");
+                              ~"'...' surrounds an expression without any" +
+                                  ~" repeating syntax variables");
               }
               some({rep_count: rc, _}) {
                 /* Whew, we now know how how many times to repeat */
@@ -331,7 +331,7 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
                     &&i: ident, _fld: ast_fold) -> ident {
     ret alt follow_for_trans(cx, b.find(i), idx_path) {
           some(match_ident(a_id)) { a_id.node }
-          some(m) { match_error(cx, m, "an identifier") }
+          some(m) { match_error(cx, m, ~"an identifier") }
           none { i }
         }
 }
@@ -347,7 +347,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
          rp: none, types: ~[]}
       }
       some(match_path(a_pth)) { *a_pth }
-      some(m) { match_error(cx, m, "a path") }
+      some(m) { match_error(cx, m, ~"a path") }
       none { p }
     }
 }
@@ -374,7 +374,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
               }
               some(match_path(a_pth)) { (expr_path(a_pth), s) }
               some(match_expr(a_exp)) { (a_exp.node, a_exp.span) }
-              some(m) { match_error(cx, m, "an expression") }
+              some(m) { match_error(cx, m, ~"an expression") }
               none { orig(e, s, fld) }
             }
           }
@@ -393,7 +393,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
               some(id) {
                 alt follow_for_trans(cx, b.find(id), idx_path) {
                   some(match_ty(ty)) { (ty.node, ty.span) }
-                  some(m) { match_error(cx, m, "a type") }
+                  some(m) { match_error(cx, m, ~"a type") }
                   none { orig(t, s, fld) }
                 }
               }
@@ -424,7 +424,7 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
 
               // possibly allow promotion of ident/path/expr to blocks?
               some(m) {
-                match_error(cx, m, "a block")
+                match_error(cx, m, ~"a block")
               }
               none { orig(blk, s, fld) }
             }
@@ -455,12 +455,12 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
 
                 if vec::len(post) > 0u {
                     cx.span_unimpl(e.span,
-                                   "matching after `...` not yet supported");
+                                   ~"matching after `...` not yet supported");
                 }
               }
               {pre: pre, rep: none, post: post} {
                 if post != ~[] {
-                    cx.bug("elts_to_ell provided an invalid result");
+                    cx.bug(~"elts_to_ell provided an invalid result");
                 }
                 p_t_s_r_length(cx, vec::len(pre), false, s, b);
                 p_t_s_r_actual_vector(cx, pre, false, s, b);
@@ -478,7 +478,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
                       match_expr(e) {
                         if e == pat { some(leaf(match_exact)) } else { none }
                       }
-                      _ { cx.bug("broken traversal in p_t_s_r") }
+                      _ { cx.bug(~"broken traversal in p_t_s_r") }
                     }
             }
             b.literal_ast_matchers.push(|x| select(cx, x, e));
@@ -486,7 +486,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
         }
       }
       _ {
-          cx.bug("undocumented invariant in p_t_s_rec");
+          cx.bug(~"undocumented invariant in p_t_s_rec");
       }
     }
 }
@@ -517,11 +517,11 @@ fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
         fn select(cx: ext_ctxt, m: matchable) -> match_result {
             ret alt m {
                   match_expr(e) { some(leaf(specialize_match(m))) }
-                  _ { cx.bug("broken traversal in p_t_s_r") }
+                  _ { cx.bug(~"broken traversal in p_t_s_r") }
                 }
         }
         if b.real_binders.contains_key(p_id) {
-            cx.span_fatal(p.span, "duplicate binding identifier");
+            cx.span_fatal(p.span, ~"duplicate binding identifier");
         }
         b.real_binders.insert(p_id, compose_sels(s, |x| select(cx, x)));
       }
@@ -546,16 +546,16 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
               match_expr(e) {
                 alt e.node { expr_mac(mac) { fn_m(mac) } _ { none } }
               }
-              _ { cx.bug("broken traversal in p_t_s_r") }
+              _ { cx.bug(~"broken traversal in p_t_s_r") }
             }
     }
-    fn no_des(cx: ext_ctxt, sp: span, syn: str) -> ! {
-        cx.span_fatal(sp, "destructuring " + syn + " is not yet supported");
+    fn no_des(cx: ext_ctxt, sp: span, syn: ~str) -> ! {
+        cx.span_fatal(sp, ~"destructuring " + syn + ~" is not yet supported");
     }
     alt mac.node {
-      ast::mac_ellipsis { cx.span_fatal(mac.span, "misused `...`"); }
-      ast::mac_invoc(_, _, _) { no_des(cx, mac.span, "macro calls"); }
-      ast::mac_invoc_tt(_, _) { no_des(cx, mac.span, "macro calls"); }
+      ast::mac_ellipsis { cx.span_fatal(mac.span, ~"misused `...`"); }
+      ast::mac_invoc(_, _, _) { no_des(cx, mac.span, ~"macro calls"); }
+      ast::mac_invoc_tt(_, _) { no_des(cx, mac.span, ~"macro calls"); }
       ast::mac_embed_type(ty) {
         alt ty.node {
           ast::ty_path(pth, _) {
@@ -571,10 +571,10 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
                 let final_step = |x| select_pt_1(cx, x, select_pt_2);
                 b.real_binders.insert(id, compose_sels(s, final_step));
               }
-              none { no_des(cx, pth.span, "under `#<>`"); }
+              none { no_des(cx, pth.span, ~"under `#<>`"); }
             }
           }
-          _ { no_des(cx, ty.span, "under `#<>`"); }
+          _ { no_des(cx, ty.span, ~"under `#<>`"); }
         }
       }
       ast::mac_embed_block(blk) {
@@ -591,11 +591,11 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
             let final_step = |x| select_pt_1(cx, x, select_pt_2);
             b.real_binders.insert(id, compose_sels(s, final_step));
           }
-          none { no_des(cx, blk.span, "under `#{}`"); }
+          none { no_des(cx, blk.span, ~"under `#{}`"); }
         }
       }
-      ast::mac_aq(_,_) { no_des(cx, mac.span, "antiquotes"); }
-      ast::mac_var(_) { no_des(cx, mac.span, "antiquote variables"); }
+      ast::mac_aq(_,_) { no_des(cx, mac.span, ~"antiquotes"); }
+      ast::mac_var(_) { no_des(cx, mac.span, ~"antiquote variables"); }
     }
 }
 
@@ -621,7 +621,7 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
                   _ { none }
                 }
               }
-              _ { cx.bug("broken traversal in p_t_s_r") }
+              _ { cx.bug(~"broken traversal in p_t_s_r") }
             }
     }
     p_t_s_rec(cx, match_expr(repeat_me),
@@ -666,7 +666,7 @@ fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool,
                       _ { none }
                     }
                   }
-                  _ { cx.bug("broken traversal in p_t_s_r") }
+                  _ { cx.bug(~"broken traversal in p_t_s_r") }
                 }
         }
         p_t_s_rec(cx, match_expr(elts[idx]),
@@ -677,17 +677,17 @@ fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool,
 
 fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                      _body: ast::mac_body) -> base::macro_def {
-    let args = get_mac_args_no_max(cx, sp, arg, 0u, "macro");
+    let args = get_mac_args_no_max(cx, sp, arg, 0u, ~"macro");
 
-    let mut macro_name: option<@str/~> = none;
+    let mut macro_name: option<@~str> = none;
     let mut clauses: ~[@clause] = ~[];
     for args.each |arg| {
         alt arg.node {
           expr_vec(elts, mutbl) {
             if vec::len(elts) != 2u {
                 cx.span_fatal((*arg).span,
-                              "extension clause must consist of ~[" +
-                                  "macro invocation, expansion body]");
+                              ~"extension clause must consist of ~[" +
+                                  ~"macro invocation, expansion body]");
             }
 
 
@@ -702,21 +702,21 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                           some(other_id) {
                             if id != other_id {
                                 cx.span_fatal(pth.span,
-                                              "macro name must be " +
-                                                  "consistent");
+                                              ~"macro name must be " +
+                                                  ~"consistent");
                             }
                           }
                         }
                       }
                       none {
                         cx.span_fatal(pth.span,
-                                      "macro name must not be a path");
+                                      ~"macro name must not be a path");
                       }
                     }
                     let arg = alt invoc_arg {
                       some(arg) { arg }
                       none { cx.span_fatal(mac.span,
-                                           "macro must have arguments")}
+                                           ~"macro must have arguments")}
                     };
                     vec::push(clauses,
                               @{params: pattern_to_selectors(cx, arg),
@@ -726,21 +726,21 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                     // the macro arg situation)
                   }
                   _ {
-                      cx.span_bug(mac.span, "undocumented invariant in \
+                      cx.span_bug(mac.span, ~"undocumented invariant in \
                          add_extension");
                   }
                 }
               }
               _ {
                 cx.span_fatal(elts[0u].span,
-                              "extension clause must" +
-                                  " start with a macro invocation.");
+                              ~"extension clause must" +
+                                  ~" start with a macro invocation.");
               }
             }
           }
           _ {
             cx.span_fatal((*arg).span,
-                          "extension must be ~[clause, " + " ...]");
+                          ~"extension must be ~[clause, " + ~" ...]");
           }
         }
     }
@@ -751,8 +751,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
              alt macro_name {
                some(id) { id }
                none {
-                 cx.span_fatal(sp, "macro definition must have " +
-                               "at least one clause")
+                 cx.span_fatal(sp, ~"macro definition must have " +
+                               ~"at least one clause")
                }
              },
          ext: normal({expander: ext, span: some(option::get(arg).span)})};
@@ -762,7 +762,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                          clauses: ~[@clause]) -> @expr {
         let arg = alt arg {
           some(arg) { arg }
-          none { cx.span_fatal(sp, "macro must have arguments")}
+          none { cx.span_fatal(sp, ~"macro must have arguments")}
         };
         for clauses.each |c| {
             alt use_selectors_to_bind(c.params, arg) {
@@ -770,7 +770,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
               none { again; }
             }
         }
-        cx.span_fatal(sp, "no clauses match macro invocation");
+        cx.span_fatal(sp, ~"no clauses match macro invocation");
     }
 }
 
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 3ba1a7b159e..ff7fedde34e 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -16,7 +16,7 @@ export expand_include_bin;
 /* #line(): expands to the current line number */
 fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                _body: ast::mac_body) -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "line");
+    get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"line");
     let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo);
     ret mk_uint(cx, sp, loc.line);
 }
@@ -24,7 +24,7 @@ fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
 /* #col(): expands to the current column number */
 fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
               _body: ast::mac_body) -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "col");
+    get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"col");
     let loc = codemap::lookup_char_pos(cx.codemap(), sp.lo);
     ret mk_uint(cx, sp, loc.col);
 }
@@ -34,7 +34,7 @@ fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
  * out if we wanted. */
 fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                _body: ast::mac_body) -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "file");
+    get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"file");
     let { file: @{ name: filename, _ }, _ } =
         codemap::lookup_char_pos(cx.codemap(), sp.lo);
     ret mk_uniq_str(cx, sp, filename);
@@ -42,21 +42,21 @@ fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
 
 fn expand_stringify(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                     _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), "stringify");
+    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), ~"stringify");
     ret mk_uniq_str(cx, sp, pprust::expr_to_str(args[0]));
 }
 
 fn expand_mod(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body)
     -> @ast::expr {
-    get_mac_args(cx, sp, arg, 0u, option::some(0u), "file");
+    get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"file");
     ret mk_uniq_str(cx, sp,
-                    str::connect(cx.mod_path().map(|x|*x), "::"));
+                    str::connect(cx.mod_path().map(|x|*x), ~"::"));
 }
 
 fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                   _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), "include");
-    let file = expr_to_str(cx, args[0], "#include_str requires a string");
+    let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), ~"include");
+    let file = expr_to_str(cx, args[0], ~"#include_str requires a string");
     let p = parse::new_parser_from_file(cx.parse_sess(), cx.cfg(),
                                         res_rel_file(cx, sp, file),
                                         parse::parser::SOURCE_FILE);
@@ -65,9 +65,9 @@ fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
 
 fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
                       _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),"include_str");
+    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),~"include_str");
 
-    let file = expr_to_str(cx, args[0], "#include_str requires a string");
+    let file = expr_to_str(cx, args[0], ~"#include_str requires a string");
 
     let res = io::read_whole_file_str(res_rel_file(cx, sp, file));
     alt res {
@@ -82,9 +82,9 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
 
 fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
                       _body: ast::mac_body) -> @ast::expr {
-    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),"include_bin");
+    let args = get_mac_args(cx,sp,arg,1u,option::some(1u),~"include_bin");
 
-    let file = expr_to_str(cx, args[0], "#include_bin requires a string");
+    let file = expr_to_str(cx, args[0], ~"#include_bin requires a string");
 
     alt io::read_whole_file(res_rel_file(cx, sp, file)) {
       result::ok(src) {
diff --git a/src/libsyntax/ext/tt/earley_parser.rs b/src/libsyntax/ext/tt/earley_parser.rs
index f830330e182..3f604aafb3d 100644
--- a/src/libsyntax/ext/tt/earley_parser.rs
+++ b/src/libsyntax/ext/tt/earley_parser.rs
@@ -83,7 +83,7 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@arb_depth])
           }
           {node: mtc_bb(bind_name, _, idx), span: sp} {
             if ret_val.contains_key(bind_name) {
-                p_s.span_diagnostic.span_fatal(sp, "Duplicated bind name: "
+                p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "
                                                + *bind_name)
             }
             ret_val.insert(bind_name, res[idx]);
@@ -97,7 +97,7 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@arb_depth])
 
 enum parse_result {
     success(hashmap<ident, @arb_depth>),
-    failure(codemap::span, str)
+    failure(codemap::span, ~str)
 }
 
 fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
@@ -207,9 +207,9 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
                     nameize(sess, ms,
                             vec::map(eof_eis[0u].matches, |dv| dv.pop())));
             } else if eof_eis.len() > 1u {
-                ret failure(sp, "Ambiguity: multiple successful parses");
+                ret failure(sp, ~"Ambiguity: multiple successful parses");
             } else {
-                ret failure(sp, "Unexpected end of macro invocation");
+                ret failure(sp, ~"Unexpected end of macro invocation");
             }
         } else {
             if (bb_eis.len() > 0u && next_eis.len() > 0u)
@@ -217,13 +217,13 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
                 let nts = str::connect(vec::map(bb_eis, |ei| {
                     alt ei.elts[ei.idx].node
                         { mtc_bb(_,name,_) { *name } _ { fail; } }
-                }), " or ");
+                }), ~" or ");
                 ret failure(sp, #fmt[
                     "Local ambiguity: multiple parsing options: \
                      built-in NTs %s or %u other options.",
                     nts, next_eis.len()]);
             } else if (bb_eis.len() == 0u && next_eis.len() == 0u) {
-                ret failure(sp, "No rules expected the token "
+                ret failure(sp, ~"No rules expected the token "
                             + to_str(*rdr.interner(), tok));
             } else if (next_eis.len() > 0u) {
                 /* Now process the next token */
@@ -259,32 +259,32 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
     }
 }
 
-fn parse_nt(p: parser, name: str) -> whole_nt {
+fn parse_nt(p: parser, name: ~str) -> whole_nt {
     alt name {
-      "item" { alt p.parse_item(~[], ast::public) {
+      ~"item" { alt p.parse_item(~[], ast::public) {
         some(i) { token::w_item(i) }
-        none { p.fatal("expected an item keyword") }
+        none { p.fatal(~"expected an item keyword") }
       }}
-      "block" { token::w_block(p.parse_block()) }
-      "stmt" { token::w_stmt(p.parse_stmt(~[])) }
-      "pat" { token::w_pat(p.parse_pat()) }
-      "expr" { token::w_expr(p.parse_expr()) }
-      "ty" { token::w_ty(p.parse_ty(false /* no need to disambiguate*/)) }
+      ~"block" { token::w_block(p.parse_block()) }
+      ~"stmt" { token::w_stmt(p.parse_stmt(~[])) }
+      ~"pat" { token::w_pat(p.parse_pat()) }
+      ~"expr" { token::w_expr(p.parse_expr()) }
+      ~"ty" { token::w_ty(p.parse_ty(false /* no need to disambiguate*/)) }
       // this could be handled like a token, since it is one
-      "ident" { alt copy p.token {
+      ~"ident" { alt copy p.token {
           token::IDENT(sn,b) { p.bump(); token::w_ident(sn,b) }
-          _ { p.fatal("expected ident, found "
+          _ { p.fatal(~"expected ident, found "
                       + token::to_str(*p.reader.interner(), copy p.token)) }
       } }
-      "path" { token::w_path(p.parse_path_with_tps(false)) }
-      "tt" {
+      ~"path" { token::w_path(p.parse_path_with_tps(false)) }
+      ~"tt" {
         p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
         let res = token::w_tt(@p.parse_token_tree());
         p.quote_depth -= 1u;
         res
       }
-      "mtcs" { token::w_mtcs(p.parse_matchers()) }
-      _ { p.fatal("Unsupported builtin nonterminal parser: " + name)}
+      ~"mtcs" { token::w_mtcs(p.parse_matchers()) }
+      _ { p.fatal(~"Unsupported builtin nonterminal parser: " + name)}
     }
 }
 
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index b454dcfc0be..7bed3107f53 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -18,9 +18,9 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
 
     let argument_gram = ~[
         ms(mtc_rep(~[
-            ms(mtc_bb(@"lhs"/~,@"mtcs"/~, 0u)),
+            ms(mtc_bb(@~"lhs",@~"mtcs", 0u)),
             ms(mtc_tok(FAT_ARROW)),
-            ms(mtc_bb(@"rhs"/~,@"tt"/~, 1u)),
+            ms(mtc_bb(@~"rhs",@~"tt", 1u)),
         ], some(SEMI), false))];
 
     let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
@@ -31,20 +31,20 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
       failure(sp, msg) { cx.span_fatal(sp, msg); }
     };
 
-    let lhses = alt arguments.get(@"lhs"/~) {
+    let lhses = alt arguments.get(@~"lhs") {
       @seq(s, sp) { s }
-      _ { cx.span_bug(sp, "wrong-structured lhs") }
+      _ { cx.span_bug(sp, ~"wrong-structured lhs") }
     };
-    let rhses = alt arguments.get(@"rhs"/~) {
+    let rhses = alt arguments.get(@~"rhs") {
       @seq(s, sp) { s }
-      _ { cx.span_bug(sp, "wrong-structured rhs") }
+      _ { cx.span_bug(sp, ~"wrong-structured rhs") }
     };
 
     fn generic_extension(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree],
                          lhses: ~[@arb_depth], rhses: ~[@arb_depth])
     -> mac_result {
         let mut best_fail_spot = {lo: 0u, hi: 0u, expn_info: none};
-        let mut best_fail_msg = "internal error: ran no matchers";
+        let mut best_fail_msg = ~"internal error: ran no matchers";
 
         let s_d = cx.parse_sess().span_diagnostic;
         let itr = cx.parse_sess().interner;
@@ -57,7 +57,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
                   success(m) {
                     let rhs = alt rhses[i] {
                       @leaf(w_tt(@tt)) { tt }
-                      _ { cx.span_bug(sp, "bad thing in rhs") }
+                      _ { cx.span_bug(sp, ~"bad thing in rhs") }
                     };
                     let trncbr = new_tt_reader(s_d, itr, some(m), ~[rhs]);
                     let p = parser(cx.parse_sess(), cx.cfg(),
@@ -71,7 +71,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
                   }
                 }
               }
-              _ { cx.bug("non-matcher found in parsed lhses"); }
+              _ { cx.bug(~"non-matcher found in parsed lhses"); }
             }
         }
         cx.span_fatal(best_fail_spot, best_fail_msg);
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index df0c39bb266..a9bc124b605 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -25,7 +25,7 @@ type tt_frame = @{
 
 type tt_reader = @{
     sp_diag: span_handler,
-    interner: @interner<@str/~>,
+    interner: @interner<@~str>,
     mut cur: tt_frame,
     /* for MBE-style macro transcription */
     interpolations: std::map::hashmap<ident, @arb_depth>,
@@ -39,7 +39,7 @@ type tt_reader = @{
 /** This can do Macro-By-Example transcription. On the other hand, if
  *  `src` contains no `tt_dotdotdot`s and `tt_interpolate`s, `interp` can (and
  *  should) be none. */
-fn new_tt_reader(sp_diag: span_handler, itr: @interner<@str/~>,
+fn new_tt_reader(sp_diag: span_handler, itr: @interner<@~str>,
                  interp: option<std::map::hashmap<ident,@arb_depth>>,
                  src: ~[ast::token_tree])
     -> tt_reader {
@@ -93,7 +93,7 @@ fn lookup_cur_ad(r: tt_reader, name: ident) -> @arb_depth {
     lookup_cur_ad_by_ad(r, r.interpolations.get(name))
 }
 enum lis {
-    lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(str)
+    lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
 }
 
 fn lockstep_iter_size(&&t: token_tree, &&r: tt_reader) -> lis {
@@ -183,7 +183,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
               lis_unconstrained {
                 r.sp_diag.span_fatal(
                     sp, /* blame macro writer */
-                    "attempted to repeat an expression containing no syntax \
+                    ~"attempted to repeat an expression containing no syntax \
                      variables matched as repeating at this depth");
               }
               lis_contradiction(msg) { /* FIXME #2887 blame macro invoker
@@ -200,7 +200,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
                     if !zerok {
                         r.sp_diag.span_fatal(sp, /* FIXME #2887 blame invoker
                                                   */
-                                             "this must repeat at least \
+                                             ~"this must repeat at least \
                                               once");
                     }
                     /* we need to pop before we proceed, so recur */
diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs
index 7ccc9c5a438..9d56754e5d4 100644
--- a/src/libsyntax/parse.rs
+++ b/src/libsyntax/parse.rs
@@ -25,7 +25,7 @@ type parse_sess = @{
     cm: codemap::codemap,
     mut next_id: node_id,
     span_diagnostic: span_handler,
-    interner: @interner::interner<@str/~>,
+    interner: @interner::interner<@~str>,
     // these two must be kept up to date
     mut chpos: uint,
     mut byte_pos: uint
@@ -36,7 +36,7 @@ fn new_parse_sess(demitter: option<emitter>) -> parse_sess {
     ret @{cm: cm,
           mut next_id: 1,
           span_diagnostic: mk_span_handler(mk_handler(demitter), cm),
-          interner: @interner::mk::<@str/~>(|x| str::hash(*x),
+          interner: @interner::mk::<@~str>(|x| str::hash(*x),
                                           |x,y| str::eq(*x, *y)),
           mut chpos: 0u, mut byte_pos: 0u};
 }
@@ -46,24 +46,24 @@ fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::codemap)
     ret @{cm: cm,
           mut next_id: 1,
           span_diagnostic: sh,
-          interner: @interner::mk::<@str/~>(|x| str::hash(*x),
+          interner: @interner::mk::<@~str>(|x| str::hash(*x),
                                           |x,y| str::eq(*x, *y)),
           mut chpos: 0u, mut byte_pos: 0u};
 }
 
-fn parse_crate_from_file(input: str, cfg: ast::crate_cfg, sess: parse_sess) ->
-   @ast::crate {
-    if str::ends_with(input, ".rc") {
+fn parse_crate_from_file(input: ~str, cfg: ast::crate_cfg,
+                         sess: parse_sess) -> @ast::crate {
+    if str::ends_with(input, ~".rc") {
         parse_crate_from_crate_file(input, cfg, sess)
-    } else if str::ends_with(input, ".rs") {
+    } else if str::ends_with(input, ~".rs") {
         parse_crate_from_source_file(input, cfg, sess)
     } else {
-        sess.span_diagnostic.handler().fatal("unknown input file type: " +
+        sess.span_diagnostic.handler().fatal(~"unknown input file type: " +
                                              input)
     }
 }
 
-fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
+fn parse_crate_from_crate_file(input: ~str, cfg: ast::crate_cfg,
                                sess: parse_sess) -> @ast::crate {
     let (p, rdr) = new_parser_etc_from_file(sess, cfg, input,
                                             parser::CRATE_FILE);
@@ -87,7 +87,7 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
                            config: /* FIXME (#2543) */ copy p.cfg});
 }
 
-fn parse_crate_from_source_file(input: str, cfg: ast::crate_cfg,
+fn parse_crate_from_source_file(input: ~str, cfg: ast::crate_cfg,
                                 sess: parse_sess) -> @ast::crate {
     let (p, rdr) = new_parser_etc_from_file(sess, cfg, input,
                                             parser::SOURCE_FILE);
@@ -97,7 +97,7 @@ fn parse_crate_from_source_file(input: str, cfg: ast::crate_cfg,
     ret r;
 }
 
-fn parse_crate_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg,
+fn parse_crate_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg,
                                sess: parse_sess) -> @ast::crate {
     let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name,
                                                   codemap::fss_none, source);
@@ -107,7 +107,7 @@ fn parse_crate_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg,
     ret r;
 }
 
-fn parse_expr_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg,
+fn parse_expr_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg,
                               sess: parse_sess) -> @ast::expr {
     let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name,
                                                   codemap::fss_none, source);
@@ -117,7 +117,7 @@ fn parse_expr_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg,
     ret r;
 }
 
-fn parse_item_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg,
+fn parse_item_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg,
                               +attrs: ~[ast::attribute],
                               vis: ast::visibility,
                               sess: parse_sess) -> option<@ast::item> {
@@ -130,8 +130,8 @@ fn parse_item_from_source_str(name: str, source: @str/~, cfg: ast::crate_cfg,
 }
 
 fn parse_from_source_str<T>(f: fn (p: parser) -> T,
-                            name: str, ss: codemap::file_substr,
-                            source: @str/~, cfg: ast::crate_cfg,
+                            name: ~str, ss: codemap::file_substr,
+                            source: @~str, cfg: ast::crate_cfg,
                             sess: parse_sess)
     -> T
 {
@@ -139,7 +139,7 @@ fn parse_from_source_str<T>(f: fn (p: parser) -> T,
                                                   source);
     let r = f(p);
     if !p.reader.is_eof() {
-        p.reader.fatal("expected end-of-string");
+        p.reader.fatal(~"expected end-of-string");
     }
     sess.chpos = rdr.chpos;
     sess.byte_pos = sess.byte_pos + rdr.pos;
@@ -155,8 +155,8 @@ fn next_node_id(sess: parse_sess) -> node_id {
 }
 
 fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
-                                  +name: str, +ss: codemap::file_substr,
-                                  source: @str/~) -> (parser, string_reader) {
+                                  +name: ~str, +ss: codemap::file_substr,
+                                  source: @~str) -> (parser, string_reader) {
     let ftype = parser::SOURCE_FILE;
     let filemap = codemap::new_filemap_w_substr
         (name, ss, source, sess.chpos, sess.byte_pos);
@@ -167,15 +167,15 @@ fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
 }
 
 fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
-                              +name: str, +ss: codemap::file_substr,
-                              source: @str/~) -> parser {
+                              +name: ~str, +ss: codemap::file_substr,
+                              source: @~str) -> parser {
     let (p, _) = new_parser_etc_from_source_str(sess, cfg, name, ss, source);
     ret p;
 }
 
 
-fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
-                            ftype: parser::file_type) ->
+fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg,
+                            +path: ~str, ftype: parser::file_type) ->
    (parser, string_reader) {
     let res = io::read_whole_file_str(path);
     alt res {
@@ -190,7 +190,7 @@ fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
     ret (parser(sess, cfg, srdr as reader, ftype), srdr);
 }
 
-fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
+fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: ~str,
                         ftype: parser::file_type) -> parser {
     let (p, _) = new_parser_etc_from_file(sess, cfg, path, ftype);
     ret p;
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index b10a05d8ca4..995feff0b70 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -56,7 +56,7 @@ impl parser_attr for parser {
                 let attr = ::attr::mk_sugared_doc_attr(
                         *self.get_str(s), self.span.lo, self.span.hi);
                 if attr.node.style != ast::attr_outer {
-                  self.fatal("expected outer comment");
+                  self.fatal(~"expected outer comment");
                 }
                 attrs += ~[attr];
                 self.bump();
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs
index aa3e808f63e..737307bb648 100644
--- a/src/libsyntax/parse/comments.rs
+++ b/src/libsyntax/parse/comments.rs
@@ -18,28 +18,28 @@ enum cmnt_style {
     blank_line, // Just a manual blank line "\n\n", for layout
 }
 
-type cmnt = {style: cmnt_style, lines: ~[str], pos: uint};
+type cmnt = {style: cmnt_style, lines: ~[~str], pos: uint};
 
-fn is_doc_comment(s: str) -> bool {
-    s.starts_with("///") ||
-    s.starts_with("//!") ||
-    s.starts_with("/**") ||
-    s.starts_with("/*!")
+fn is_doc_comment(s: ~str) -> bool {
+    s.starts_with(~"///") ||
+    s.starts_with(~"//!") ||
+    s.starts_with(~"/**") ||
+    s.starts_with(~"/*!")
 }
 
-fn doc_comment_style(comment: str) -> ast::attr_style {
+fn doc_comment_style(comment: ~str) -> ast::attr_style {
     assert is_doc_comment(comment);
-    if comment.starts_with("//!") || comment.starts_with("/*!") {
+    if comment.starts_with(~"//!") || comment.starts_with(~"/*!") {
         ast::attr_inner
     } else {
         ast::attr_outer
     }
 }
 
-fn strip_doc_comment_decoration(comment: str) -> str {
+fn strip_doc_comment_decoration(comment: ~str) -> ~str {
 
     /// remove whitespace-only lines from the start/end of lines
-    fn vertical_trim(lines: ~[str]) -> ~[str] {
+    fn vertical_trim(lines: ~[~str]) -> ~[~str] {
         let mut i = 0u, j = lines.len();
         while i < j && lines[i].trim().is_empty() {
             i += 1u;
@@ -51,7 +51,7 @@ fn strip_doc_comment_decoration(comment: str) -> str {
     }
 
     // drop leftmost columns that contain only values in chars
-    fn block_trim(lines: ~[str], chars: str, max: option<uint>) -> ~[str] {
+    fn block_trim(lines: ~[~str], chars: ~str, max: option<uint>) -> ~[~str] {
 
         let mut i = max.get_default(uint::max_value);
         for lines.each |line| {
@@ -72,31 +72,31 @@ fn strip_doc_comment_decoration(comment: str) -> str {
         ret do lines.map |line| {
             let chars = str::chars(line);
             if i > chars.len() {
-                ""
+                ~""
             } else {
                 str::from_chars(chars.slice(i, chars.len()))
             }
         };
     }
 
-    if comment.starts_with("//") {
+    if comment.starts_with(~"//") {
         ret comment.slice(3u, comment.len()).trim();
     }
 
-    if comment.starts_with("/*") {
+    if comment.starts_with(~"/*") {
         let lines = str::lines_any(comment.slice(3u, comment.len() - 2u));
         let lines = vertical_trim(lines);
-        let lines = block_trim(lines, "\t ", none);
-        let lines = block_trim(lines, "*", some(1u));
-        let lines = block_trim(lines, "\t ", none);
-        ret str::connect(lines, "\n");
+        let lines = block_trim(lines, ~"\t ", none);
+        let lines = block_trim(lines, ~"*", some(1u));
+        let lines = block_trim(lines, ~"\t ", none);
+        ret str::connect(lines, ~"\n");
     }
 
-    fail "not a doc-comment: " + comment;
+    fail ~"not a doc-comment: " + comment;
 }
 
-fn read_to_eol(rdr: string_reader) -> str {
-    let mut val = "";
+fn read_to_eol(rdr: string_reader) -> ~str {
+    let mut val = ~"";
     while rdr.curr != '\n' && !is_eof(rdr) {
         str::push_char(val, rdr.curr);
         bump(rdr);
@@ -105,7 +105,7 @@ fn read_to_eol(rdr: string_reader) -> str {
     ret val;
 }
 
-fn read_one_line_comment(rdr: string_reader) -> str {
+fn read_one_line_comment(rdr: string_reader) -> ~str {
     let val = read_to_eol(rdr);
     assert ((val[0] == '/' as u8 && val[1] == '/' as u8) ||
             (val[0] == '#' as u8 && val[1] == '!' as u8));
@@ -120,7 +120,7 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
 
 fn push_blank_line_comment(rdr: string_reader, &comments: ~[cmnt]) {
     #debug(">>> blank-line comment");
-    let v: ~[str] = ~[];
+    let v: ~[~str] = ~[];
     vec::push(comments, {style: blank_line, lines: v, pos: rdr.chpos});
 }
 
@@ -151,7 +151,7 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
                                                         &comments: ~[cmnt]) {
     #debug(">>> line comments");
     let p = rdr.chpos;
-    let mut lines: ~[str] = ~[];
+    let mut lines: ~[~str] = ~[];
     while rdr.curr == '/' && nextch(rdr) == '/' {
         let line = read_one_line_comment(rdr);
         log(debug, line);
@@ -171,22 +171,22 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
     }
 }
 
-fn all_whitespace(s: str, begin: uint, end: uint) -> bool {
+fn all_whitespace(s: ~str, begin: uint, end: uint) -> bool {
     let mut i: uint = begin;
     while i != end { if !is_whitespace(s[i] as char) { ret false; } i += 1u; }
     ret true;
 }
 
-fn trim_whitespace_prefix_and_push_line(&lines: ~[str],
-                                        s: str, col: uint) unsafe {
+fn trim_whitespace_prefix_and_push_line(&lines: ~[~str],
+                                        s: ~str, col: uint) unsafe {
     let mut s1;
     let len = str::len(s);
     if all_whitespace(s, 0u, uint::min(len, col)) {
         if col < len {
             s1 = str::slice(s, col, len);
-        } else { s1 = ""; }
+        } else { s1 = ~""; }
     } else { s1 = s; }
-    log(debug, "pushing line: " + s1);
+    log(debug, ~"pushing line: " + s1);
     vec::push(lines, s1);
 }
 
@@ -194,7 +194,7 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
                                                         &comments: ~[cmnt]) {
     #debug(">>> block comment");
     let p = rdr.chpos;
-    let mut lines: ~[str] = ~[];
+    let mut lines: ~[~str] = ~[];
     let mut col: uint = rdr.col;
     bump(rdr);
     bump(rdr);
@@ -211,27 +211,27 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
         ret;
     }
 
-    let mut curr_line = "/*";
+    let mut curr_line = ~"/*";
     let mut level: int = 1;
     while level > 0 {
         #debug("=== block comment level %d", level);
-        if is_eof(rdr) {(rdr as reader).fatal("unterminated block comment");}
+        if is_eof(rdr) {(rdr as reader).fatal(~"unterminated block comment");}
         if rdr.curr == '\n' {
             trim_whitespace_prefix_and_push_line(lines, curr_line, col);
-            curr_line = "";
+            curr_line = ~"";
             bump(rdr);
         } else {
             str::push_char(curr_line, rdr.curr);
             if rdr.curr == '/' && nextch(rdr) == '*' {
                 bump(rdr);
                 bump(rdr);
-                curr_line += "*";
+                curr_line += ~"*";
                 level += 1;
             } else {
                 if rdr.curr == '*' && nextch(rdr) == '/' {
                     bump(rdr);
                     bump(rdr);
-                    curr_line += "/";
+                    curr_line += ~"/";
                     level -= 1;
                 } else { bump(rdr); }
             }
@@ -268,14 +268,14 @@ fn consume_comment(rdr: string_reader, code_to_the_left: bool,
     #debug("<<< consume comment");
 }
 
-type lit = {lit: str, pos: uint};
+type lit = {lit: ~str, pos: uint};
 
 fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
-                                path: str,
+                                path: ~str,
                                 srdr: io::reader) ->
    {cmnts: ~[cmnt], lits: ~[lit]} {
     let src = @str::from_bytes(srdr.read_whole_stream());
-    let itr = @interner::mk::<@str/~>(
+    let itr = @interner::mk::<@~str>(
         |x| str::hash(*x),
         |x,y| str::eq(*x, *y)
     );
@@ -308,9 +308,9 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
         if token::is_lit(tok) {
             let s = get_str_from(rdr, bstart);
             vec::push(literals, {lit: s, pos: sp.lo});
-            log(debug, "tok lit: " + s);
+            log(debug, ~"tok lit: " + s);
         } else {
-            log(debug, "tok: " + token::to_str(*rdr.interner, tok));
+            log(debug, ~"tok: " + token::to_str(*rdr.interner, tok));
         }
         first_read = false;
     }
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index 9e5f2ac2c9b..79ac3aeaf8b 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -18,7 +18,7 @@ fn seq_sep_none() -> seq_sep {
     ret {sep: option::none, trailing_sep_allowed: false};
 }
 
-fn token_to_str(reader: reader, ++token: token::token) -> str {
+fn token_to_str(reader: reader, ++token: token::token) -> ~str {
     token::to_str(*reader.interner(), token)
 }
 
@@ -28,23 +28,23 @@ impl parser_common for parser {
     fn unexpected_last(t: token::token) -> ! {
         self.span_fatal(
             copy self.last_span,
-            "unexpected token: `" + token_to_str(self.reader, t) + "`");
+            ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`");
     }
 
     fn unexpected() -> ! {
-        self.fatal("unexpected token: `"
-                   + token_to_str(self.reader, self.token) + "`");
+        self.fatal(~"unexpected token: `"
+                   + token_to_str(self.reader, self.token) + ~"`");
     }
 
     fn expect(t: token::token) {
         if self.token == t {
             self.bump();
         } else {
-            let mut s: str = "expected `";
+            let mut s: ~str = ~"expected `";
             s += token_to_str(self.reader, t);
-            s += "` but found `";
+            s += ~"` but found `";
             s += token_to_str(self.reader, self.token);
-            self.fatal(s + "`");
+            self.fatal(s + ~"`");
         }
     }
 
@@ -52,10 +52,10 @@ impl parser_common for parser {
         alt copy self.token {
           token::IDENT(i, _) { self.bump(); ret self.get_str(i); }
           token::ACTUALLY(token::w_ident(*)) { self.bug(
-              "ident interpolation not converted to real token"); }
-          _ { self.fatal("expected ident, found `"
+              ~"ident interpolation not converted to real token"); }
+          _ { self.fatal(~"expected ident, found `"
                          + token_to_str(self.reader, self.token)
-                         + "`"); }
+                         + ~"`"); }
         }
     }
 
@@ -76,13 +76,13 @@ impl parser_common for parser {
     }
 
     // A sanity check that the word we are asking for is a known keyword
-    fn require_keyword(word: str) {
+    fn require_keyword(word: ~str) {
         if !self.keywords.contains_key(word) {
             self.bug(#fmt("unknown keyword: %s", word));
         }
     }
 
-    fn token_is_keyword(word: str, ++tok: token::token) -> bool {
+    fn token_is_keyword(word: ~str, ++tok: token::token) -> bool {
         self.require_keyword(word);
         alt tok {
           token::IDENT(sid, false) { str::eq(word, *self.get_str(sid)) }
@@ -90,7 +90,7 @@ impl parser_common for parser {
         }
     }
 
-    fn is_keyword(word: str) -> bool {
+    fn is_keyword(word: ~str) -> bool {
         self.token_is_keyword(word, self.token)
     }
 
@@ -103,7 +103,7 @@ impl parser_common for parser {
         }
     }
 
-    fn eat_keyword(word: str) -> bool {
+    fn eat_keyword(word: ~str) -> bool {
         self.require_keyword(word);
 
         // FIXME (#13042): this gratuitous use of @ is to
@@ -119,16 +119,16 @@ impl parser_common for parser {
         }
     }
 
-    fn expect_keyword(word: str) {
+    fn expect_keyword(word: ~str) {
         self.require_keyword(word);
         if !self.eat_keyword(word) {
-            self.fatal("expected `" + word + "`, found `" +
+            self.fatal(~"expected `" + word + ~"`, found `" +
                        token_to_str(self.reader, self.token) +
-                       "`");
+                       ~"`");
         }
     }
 
-    fn is_restricted_keyword(word: str) -> bool {
+    fn is_restricted_keyword(word: ~str) -> bool {
         self.restricted_keywords.contains_key(word)
     }
 
@@ -142,9 +142,9 @@ impl parser_common for parser {
         }
     }
 
-    fn check_restricted_keywords_(w: str) {
+    fn check_restricted_keywords_(w: ~str) {
         if self.is_restricted_keyword(w) {
-            self.fatal("found `" + w + "` in restricted position");
+            self.fatal(~"found `" + w + ~"` in restricted position");
         }
     }
 
@@ -154,11 +154,11 @@ impl parser_common for parser {
         } else if self.token == token::BINOP(token::SHR) {
             self.swap(token::GT, self.span.lo + 1u, self.span.hi);
         } else {
-            let mut s: str = "expected `";
+            let mut s: ~str = ~"expected `";
             s += token_to_str(self.reader, token::GT);
-            s += "`, found `";
+            s += ~"`, found `";
             s += token_to_str(self.reader, self.token);
-            s += "`";
+            s += ~"`";
             self.fatal(s);
         }
     }
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index 54a2abf20c9..125dc809079 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -9,7 +9,7 @@ type ctx =
 
 fn eval_crate_directives(cx: ctx,
                          cdirs: ~[@ast::crate_directive],
-                         prefix: str,
+                         prefix: ~str,
                          &view_items: ~[@ast::view_item],
                          &items: ~[@ast::item]) {
     for cdirs.each |sub_cdir| {
@@ -18,11 +18,11 @@ fn eval_crate_directives(cx: ctx,
 }
 
 fn eval_crate_directives_to_mod(cx: ctx, cdirs: ~[@ast::crate_directive],
-                                prefix: str, suffix: option<str>)
+                                prefix: ~str, suffix: option<~str>)
     -> (ast::_mod, ~[ast::attribute]) {
     #debug("eval crate prefix: %s", prefix);
     #debug("eval crate suffix: %s",
-           option::get_default(suffix, "none"));
+           option::get_default(suffix, ~"none"));
     let (cview_items, citems, cattrs)
         = parse_companion_mod(cx, prefix, suffix);
     let mut view_items: ~[@ast::view_item] = ~[];
@@ -43,17 +43,17 @@ companion mod is a .rs file with the same name as the directory.
 We build the path to the companion mod by combining the prefix and the
 optional suffix then adding the .rs extension.
 */
-fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
+fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
     -> (~[@ast::view_item], ~[@ast::item], ~[ast::attribute]) {
 
-    fn companion_file(+prefix: str, suffix: option<str>) -> str {
+    fn companion_file(+prefix: ~str, suffix: option<~str>) -> ~str {
         ret alt suffix {
           option::some(s) { path::connect(prefix, s) }
           option::none { prefix }
-        } + ".rs";
+        } + ~".rs";
     }
 
-    fn file_exists(path: str) -> bool {
+    fn file_exists(path: ~str) -> bool {
         // Crude, but there's no lib function for this and I'm not
         // up to writing it just now
         alt io::file_reader(path) {
@@ -78,8 +78,8 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
     }
 }
 
-fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @str/~ {
-    alt ::attr::first_attr_value_str_by_name(attrs, "path") {
+fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @~str {
+    alt ::attr::first_attr_value_str_by_name(attrs, ~"path") {
       some(d) {
         ret d;
       }
@@ -87,12 +87,12 @@ fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @str/~ {
     }
 }
 
-fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
+fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
                         &view_items: ~[@ast::view_item],
                         &items: ~[@ast::item]) {
     alt cdir.node {
       ast::cdir_src_mod(id, attrs) {
-        let file_path = cdir_path_opt(@(*id + ".rs"), attrs);
+        let file_path = cdir_path_opt(@(*id + ~".rs"), attrs);
         let full_path =
             if path::path_is_absolute(*file_path) {
                 *file_path
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index d7f9fc12840..7afdc301b02 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -12,22 +12,22 @@ export string_reader_as_reader, tt_reader_as_reader;
 iface reader {
     fn is_eof() -> bool;
     fn next_token() -> {tok: token::token, sp: span};
-    fn fatal(str) -> !;
+    fn fatal(~str) -> !;
     fn span_diag() -> span_handler;
-    fn interner() -> @interner<@str/~>;
+    fn interner() -> @interner<@~str>;
     fn peek() -> {tok: token::token, sp: span};
     fn dup() -> reader;
 }
 
 type string_reader = @{
     span_diagnostic: span_handler,
-    src: @str/~,
+    src: @~str,
     mut col: uint,
     mut pos: uint,
     mut curr: char,
     mut chpos: uint,
     filemap: codemap::filemap,
-    interner: @interner<@str/~>,
+    interner: @interner<@~str>,
     /* cached: */
     mut peek_tok: token::token,
     mut peek_span: span
@@ -35,7 +35,7 @@ type string_reader = @{
 
 fn new_string_reader(span_diagnostic: span_handler,
                      filemap: codemap::filemap,
-                     itr: @interner<@str/~>) -> string_reader {
+                     itr: @interner<@~str>) -> string_reader {
     let r = new_low_level_string_reader(span_diagnostic, filemap, itr);
     string_advance_token(r); /* fill in peek_* */
     ret r;
@@ -44,7 +44,7 @@ fn new_string_reader(span_diagnostic: span_handler,
 /* For comments.rs, which hackily pokes into 'pos' and 'curr' */
 fn new_low_level_string_reader(span_diagnostic: span_handler,
                                filemap: codemap::filemap,
-                               itr: @interner<@str/~>)
+                               itr: @interner<@~str>)
     -> string_reader {
     let r = @{span_diagnostic: span_diagnostic, src: filemap.src,
               mut col: 0u, mut pos: 0u, mut curr: -1 as char,
@@ -75,11 +75,11 @@ impl string_reader_as_reader of reader for string_reader {
         string_advance_token(self);
         ret ret_val;
     }
-    fn fatal(m: str) -> ! {
+    fn fatal(m: ~str) -> ! {
         self.span_diagnostic.span_fatal(copy self.peek_span, m)
     }
     fn span_diag() -> span_handler { self.span_diagnostic }
-    fn interner() -> @interner<@str/~> { self.interner }
+    fn interner() -> @interner<@~str> { self.interner }
     fn peek() -> {tok: token::token, sp: span} {
         {tok: self.peek_tok, sp: self.peek_span}
     }
@@ -97,11 +97,11 @@ impl tt_reader_as_reader of reader for tt_reader {
         }
         tt_next_token(self)
     }
-    fn fatal(m: str) -> ! {
+    fn fatal(m: ~str) -> ! {
         self.sp_diag.span_fatal(copy self.cur_span, m);
     }
     fn span_diag() -> span_handler { self.sp_diag }
-    fn interner() -> @interner<@str/~> { self.interner }
+    fn interner() -> @interner<@~str> { self.interner }
     fn peek() -> {tok: token::token, sp: span} {
         { tok: self.cur_tok, sp: self.cur_span }
     }
@@ -125,7 +125,7 @@ fn string_advance_token(&&r: string_reader) {
 
 }
 
-fn get_str_from(rdr: string_reader, start: uint) -> str unsafe {
+fn get_str_from(rdr: string_reader, start: uint) -> ~str unsafe {
     // I'm pretty skeptical about this subtraction. What if there's a
     // multi-byte character before the mark?
     ret str::slice(*rdr.src, start - 1u, rdr.pos - 1u);
@@ -211,7 +211,7 @@ fn consume_any_line_comment(rdr: string_reader)
             // line comments starting with "///" or "//!" are doc-comments
             if rdr.curr == '/' || rdr.curr == '!' {
                 let start_chpos = rdr.chpos - 2u;
-                let mut acc = "//";
+                let mut acc = ~"//";
                 while rdr.curr != '\n' && !is_eof(rdr) {
                     str::push_char(acc, rdr.curr);
                     bump(rdr);
@@ -250,15 +250,15 @@ fn consume_block_comment(rdr: string_reader)
     // block comments starting with "/**" or "/*!" are doc-comments
     if rdr.curr == '*' || rdr.curr == '!' {
         let start_chpos = rdr.chpos - 2u;
-        let mut acc = "/*";
+        let mut acc = ~"/*";
         while !(rdr.curr == '*' && nextch(rdr) == '/') && !is_eof(rdr) {
             str::push_char(acc, rdr.curr);
             bump(rdr);
         }
         if is_eof(rdr) {
-            rdr.fatal("unterminated block doc-comment");
+            rdr.fatal(~"unterminated block doc-comment");
         } else {
-            acc += "*/";
+            acc += ~"*/";
             bump(rdr);
             bump(rdr);
             ret some({
@@ -270,7 +270,7 @@ fn consume_block_comment(rdr: string_reader)
 
     let mut level: int = 1;
     while level > 0 {
-        if is_eof(rdr) { rdr.fatal("unterminated block comment"); }
+        if is_eof(rdr) { rdr.fatal(~"unterminated block comment"); }
         if rdr.curr == '/' && nextch(rdr) == '*' {
             bump(rdr);
             bump(rdr);
@@ -288,9 +288,9 @@ fn consume_block_comment(rdr: string_reader)
     ret consume_whitespace_and_comments(rdr);
 }
 
-fn scan_exponent(rdr: string_reader) -> option<str> {
+fn scan_exponent(rdr: string_reader) -> option<~str> {
     let mut c = rdr.curr;
-    let mut rslt = "";
+    let mut rslt = ~"";
     if c == 'e' || c == 'E' {
         str::push_char(rslt, c);
         bump(rdr);
@@ -302,12 +302,12 @@ fn scan_exponent(rdr: string_reader) -> option<str> {
         let exponent = scan_digits(rdr, 10u);
         if str::len(exponent) > 0u {
             ret some(rslt + exponent);
-        } else { rdr.fatal("scan_exponent: bad fp literal"); }
-    } else { ret none::<str>; }
+        } else { rdr.fatal(~"scan_exponent: bad fp literal"); }
+    } else { ret none::<~str>; }
 }
 
-fn scan_digits(rdr: string_reader, radix: uint) -> str {
-    let mut rslt = "";
+fn scan_digits(rdr: string_reader, radix: uint) -> ~str {
+    let mut rslt = ~"";
     loop {
         let c = rdr.curr;
         if c == '_' { bump(rdr); again; }
@@ -366,7 +366,7 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
                       else { either::right(ast::ty_u64) };
         }
         if str::len(num_str) == 0u {
-            rdr.fatal("no valid digits found for number");
+            rdr.fatal(~"no valid digits found for number");
         }
         let parsed = option::get(u64::from_str_radix(num_str, base as u64));
         alt tp {
@@ -379,7 +379,7 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
         is_float = true;
         bump(rdr);
         let dec_part = scan_digits(rdr, 10u);
-        num_str += "." + dec_part;
+        num_str += ~"." + dec_part;
     }
     alt scan_exponent(rdr) {
       some(s) {
@@ -414,7 +414,7 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
                              ast::ty_f);
     } else {
         if str::len(num_str) == 0u {
-            rdr.fatal("no valid digits found for number");
+            rdr.fatal(~"no valid digits found for number");
         }
         let parsed = option::get(u64::from_str_radix(num_str, base as u64));
 
@@ -440,7 +440,7 @@ fn scan_numeric_escape(rdr: string_reader, n_hex_digits: uint) -> char {
 }
 
 fn next_token_inner(rdr: string_reader) -> token::token {
-    let mut accum_str = "";
+    let mut accum_str = ~"";
     let mut c = rdr.curr;
     if (c >= 'a' && c <= 'z')
         || (c >= 'A' && c <= 'Z')
@@ -455,7 +455,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             bump(rdr);
             c = rdr.curr;
         }
-        if str::eq(accum_str, "_") { ret token::UNDERSCORE; }
+        if str::eq(accum_str, ~"_") { ret token::UNDERSCORE; }
         let is_mod_name = c == ':' && nextch(rdr) == ':';
 
         // FIXME: perform NFKC normalization here. (Issue #2253)
@@ -578,7 +578,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             }
         }
         if rdr.curr != '\'' {
-            rdr.fatal("unterminated character constant");
+            rdr.fatal(~"unterminated character constant");
         }
         bump(rdr); // advance curr past token
         ret token::LIT_INT(c2 as i64, ast::ty_char);
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 784044a2df0..1c34894eb98 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -160,8 +160,8 @@ class parser {
     let mut restriction: restriction;
     let mut quote_depth: uint; // not (yet) related to the quasiquoter
     let reader: reader;
-    let keywords: hashmap<str, ()>;
-    let restricted_keywords: hashmap<str, ()>;
+    let keywords: hashmap<~str, ()>;
+    let restricted_keywords: hashmap<~str, ()>;
 
     new(sess: parse_sess, cfg: ast::crate_cfg, +rdr: reader, ftype: file_type)
     {
@@ -220,29 +220,29 @@ class parser {
         }
         ret copy self.buffer[(self.buffer_start + dist - 1) & 3].tok;
     }
-    fn fatal(m: str) -> ! {
+    fn fatal(m: ~str) -> ! {
         self.sess.span_diagnostic.span_fatal(copy self.span, m)
     }
-    fn span_fatal(sp: span, m: str) -> ! {
+    fn span_fatal(sp: span, m: ~str) -> ! {
         self.sess.span_diagnostic.span_fatal(sp, m)
     }
-    fn bug(m: str) -> ! {
+    fn bug(m: ~str) -> ! {
         self.sess.span_diagnostic.span_bug(copy self.span, m)
     }
-    fn warn(m: str) {
+    fn warn(m: ~str) {
         self.sess.span_diagnostic.span_warn(copy self.span, m)
     }
-    fn get_str(i: token::str_num) -> @str/~ {
+    fn get_str(i: token::str_num) -> @~str {
         interner::get(*self.reader.interner(), i)
     }
     fn get_id() -> node_id { next_node_id(self.sess) }
 
     fn parse_ty_fn(purity: ast::purity) -> ty_ {
-        let proto = if self.eat_keyword("extern") {
-            self.expect_keyword("fn");
+        let proto = if self.eat_keyword(~"extern") {
+            self.expect_keyword(~"fn");
             ast::proto_bare
         } else {
-            self.expect_keyword("fn");
+            self.expect_keyword(~"fn");
             self.parse_fn_ty_proto()
         };
         ty_fn(proto, self.parse_ty_fn_decl(purity))
@@ -259,7 +259,7 @@ class parser {
                 let name = self.parse_value_ident();
                 p.bump();
                 name
-            } else { @""/~ };
+            } else { @~"" };
 
             {mode: mode, ty: p.parse_ty(false), ident: name,
              id: p.get_id()}
@@ -317,8 +317,8 @@ class parser {
                            vis: vis})
               }
 
-              _ { p.fatal("expected `;` or `}` but found `" +
-                          token_to_str(p.reader, p.token) + "`");
+              _ { p.fatal(~"expected `;` or `}` but found `" +
+                          token_to_str(p.reader, p.token) + ~"`");
                 }
             }
         }
@@ -345,7 +345,7 @@ class parser {
     fn ident_index(args: ~[arg], i: ident) -> uint {
         let mut j = 0u;
         for args.each |a| { if a.ident == i { ret j; } j += 1u; }
-        self.fatal("unbound variable `" + *i + "` in constraint arg");
+        self.fatal(~"unbound variable `" + *i + ~"` in constraint arg");
     }
 
     fn parse_type_constr_arg() -> @ty_constr_arg {
@@ -431,7 +431,7 @@ class parser {
         }
     }
 
-    fn region_from_name(s: option<@str/~>) -> @region {
+    fn region_from_name(s: option<@~str>) -> @region {
         let r = alt s {
           some (string) { re_named(string) }
           none { re_anon }
@@ -538,19 +538,19 @@ class parser {
             let region = self.parse_region_with_sep();
             let mt = self.parse_mt();
             ty_rptr(region, mt)
-        } else if self.eat_keyword("pure") {
+        } else if self.eat_keyword(~"pure") {
             self.parse_ty_fn(ast::pure_fn)
-        } else if self.eat_keyword("unsafe") {
+        } else if self.eat_keyword(~"unsafe") {
             self.parse_ty_fn(ast::unsafe_fn)
-        } else if self.is_keyword("fn") {
+        } else if self.is_keyword(~"fn") {
             self.parse_ty_fn(ast::impure_fn)
-        } else if self.eat_keyword("extern") {
-            self.expect_keyword("fn");
+        } else if self.eat_keyword(~"extern") {
+            self.expect_keyword(~"fn");
             ty_fn(proto_bare, self.parse_ty_fn_decl(ast::impure_fn))
         } else if self.token == token::MOD_SEP || is_ident(self.token) {
             let path = self.parse_path_with_tps(colons_before_params);
             ty_path(path, self.get_id())
-        } else { self.fatal("expected type"); };
+        } else { self.fatal(~"expected type"); };
 
         let sp = mk_sp(lo, self.last_span.hi);
         ret @{id: self.get_id(),
@@ -588,9 +588,9 @@ class parser {
             @{id: p.get_id(), is_move: is_move, name: ident, span: sp}
         }
 
-        if self.eat_keyword("move") {
+        if self.eat_keyword(~"move") {
             either::right(parse_capture_item(self, true))
-        } else if self.eat_keyword("copy") {
+        } else if self.eat_keyword(~"copy") {
             either::right(parse_capture_item(self, false))
         } else {
             parse_arg_fn(self)
@@ -642,7 +642,7 @@ class parser {
                 some(mac_aq(mk_sp(lo,hi), e))
               }
               _ {
-                self.fatal("expected `(` or unsuffixed integer literal");
+                self.fatal(~"expected `(` or unsuffixed integer literal");
               }
             }
           }
@@ -692,9 +692,9 @@ class parser {
 
     fn parse_lit() -> lit {
         let lo = self.span.lo;
-        let lit = if self.eat_keyword("true") {
+        let lit = if self.eat_keyword(~"true") {
             lit_bool(true)
-        } else if self.eat_keyword("false") {
+        } else if self.eat_keyword(~"false") {
             lit_bool(false)
         } else {
             let tok = self.token;
@@ -753,7 +753,7 @@ class parser {
             // Hack: avoid parsing vstores like /@ and /~.  This is painful
             // because the notation for region bounds and the notation for
             // vstores is... um... the same.  I guess that's my fault.  This
-            // is still not ideal as for str/& we end up parsing more than we
+            // is still not ideal as for &str we end up parsing more than we
             // ought to and have to sort it out later.
             if self.token == token::BINOP(token::SLASH)
                 && self.look_ahead(1u) == token::BINOP(token::AND) {
@@ -781,9 +781,9 @@ class parser {
     }
 
     fn parse_mutability() -> mutability {
-        if self.eat_keyword("mut") {
+        if self.eat_keyword(~"mut") {
             m_mutbl
-        } else if self.eat_keyword("const") {
+        } else if self.eat_keyword(~"const") {
             m_const
         } else {
             m_imm
@@ -866,7 +866,7 @@ class parser {
             ret self.mk_pexpr(lo, hi, expr_tup(es));
         } else if self.token == token::LBRACE {
             self.bump();
-            if self.is_keyword("mut") ||
+            if self.is_keyword(~"mut") ||
                 is_plain_ident(self.token)
                 && self.look_ahead(1u) == token::COLON {
                 let mut fields = ~[self.parse_field(token::COLON)];
@@ -874,11 +874,11 @@ class parser {
                 while self.token != token::RBRACE {
                     // optional comma before "with"
                     if self.token == token::COMMA
-                        && self.token_is_keyword("with",
+                        && self.token_is_keyword(~"with",
                                                  self.look_ahead(1u)) {
                         self.bump();
                     }
-                    if self.eat_keyword("with") {
+                    if self.eat_keyword(~"with") {
                         base = some(self.parse_expr()); break;
                     }
                     self.expect(token::COMMA);
@@ -897,36 +897,38 @@ class parser {
             }
         } else if token::is_bar(self.token) {
             ret pexpr(self.parse_lambda_expr());
-        } else if self.eat_keyword("new") {
+        } else if self.eat_keyword(~"new") {
             self.expect(token::LPAREN);
             let r = self.parse_expr();
             self.expect(token::RPAREN);
             let v = self.parse_expr();
             ret self.mk_pexpr(lo, self.span.hi,
                               expr_new(r, self.get_id(), v));
-        } else if self.eat_keyword("if") {
+        } else if self.eat_keyword(~"if") {
             ret pexpr(self.parse_if_expr());
-        } else if self.eat_keyword("for") {
-            ret pexpr(self.parse_sugary_call_expr("for", expr_loop_body));
-        } else if self.eat_keyword("do") {
-            ret pexpr(self.parse_sugary_call_expr("do", expr_do_body));
-        } else if self.eat_keyword("while") {
+        } else if self.eat_keyword(~"for") {
+            ret pexpr(self.parse_sugary_call_expr(~"for", expr_loop_body));
+        } else if self.eat_keyword(~"do") {
+            ret pexpr(self.parse_sugary_call_expr(~"do", expr_do_body));
+        } else if self.eat_keyword(~"while") {
             ret pexpr(self.parse_while_expr());
-        } else if self.eat_keyword("loop") {
+        } else if self.eat_keyword(~"loop") {
             ret pexpr(self.parse_loop_expr());
-        } else if self.eat_keyword("alt") {
+        } else if self.eat_keyword(~"alt") {
             ret pexpr(self.parse_alt_expr());
-        } else if self.eat_keyword("fn") {
+        } else if self.eat_keyword(~"fn") {
             let proto = self.parse_fn_ty_proto();
             alt proto {
-              proto_bare { self.fatal("fn expr are deprecated, use fn@"); }
-              proto_any { self.fatal("fn* cannot be used in an expression"); }
+              proto_bare { self.fatal(~"fn expr are deprecated, use fn@"); }
+              proto_any {
+                self.fatal(~"fn* cannot be used in an expression");
+              }
               _ { /* fallthrough */ }
             }
             ret pexpr(self.parse_fn_expr(proto));
-        } else if self.eat_keyword("unchecked") {
+        } else if self.eat_keyword(~"unchecked") {
             ret pexpr(self.parse_block_expr(lo, unchecked_blk));
-        } else if self.eat_keyword("unsafe") {
+        } else if self.eat_keyword(~"unsafe") {
             ret pexpr(self.parse_block_expr(lo, unsafe_blk));
         } else if self.token == token::LBRACKET {
             self.bump();
@@ -958,13 +960,13 @@ class parser {
             let ex_ext = self.parse_syntax_ext();
             hi = ex_ext.span.hi;
             ex = ex_ext.node;
-        } else if self.eat_keyword("fail") {
+        } else if self.eat_keyword(~"fail") {
             if can_begin_expr(self.token) {
                 let e = self.parse_expr();
                 hi = e.span.hi;
                 ex = expr_fail(some(e));
             } else { ex = expr_fail(none); }
-        } else if self.eat_keyword("log") {
+        } else if self.eat_keyword(~"log") {
             self.expect(token::LPAREN);
             let lvl = self.parse_expr();
             self.expect(token::COMMA);
@@ -972,18 +974,18 @@ class parser {
             ex = expr_log(2, lvl, e);
             hi = self.span.hi;
             self.expect(token::RPAREN);
-        } else if self.eat_keyword("assert") {
+        } else if self.eat_keyword(~"assert") {
             let e = self.parse_expr();
             ex = expr_assert(e);
             hi = e.span.hi;
-        } else if self.eat_keyword("check") {
+        } else if self.eat_keyword(~"check") {
             /* Should be a predicate (pure boolean function) applied to
             arguments that are all either slot variables or literals.
             but the typechecker enforces that. */
             let e = self.parse_expr();
             hi = e.span.hi;
             ex = expr_check(checked_expr, e);
-        } else if self.eat_keyword("claim") {
+        } else if self.eat_keyword(~"claim") {
             /* Same rules as check, except that if check-claims
             is enabled (a command-line flag), then the parser turns
             claims into check */
@@ -991,25 +993,25 @@ class parser {
             let e = self.parse_expr();
             hi = e.span.hi;
             ex = expr_check(claimed_expr, e);
-        } else if self.eat_keyword("ret") {
+        } else if self.eat_keyword(~"ret") {
             if can_begin_expr(self.token) {
                 let e = self.parse_expr();
                 hi = e.span.hi;
                 ex = expr_ret(some(e));
             } else { ex = expr_ret(none); }
-        } else if self.eat_keyword("break") {
+        } else if self.eat_keyword(~"break") {
             ex = expr_break;
             hi = self.span.hi;
-        } else if self.eat_keyword("again") {
+        } else if self.eat_keyword(~"again") {
             ex = expr_again;
             hi = self.span.hi;
-        } else if self.eat_keyword("copy") {
+        } else if self.eat_keyword(~"copy") {
             let e = self.parse_expr();
             ex = expr_copy(e);
             hi = e.span.hi;
         } else if self.token == token::MOD_SEP ||
-            is_ident(self.token) && !self.is_keyword("true") &&
-            !self.is_keyword("false") {
+            is_ident(self.token) && !self.is_keyword(~"true") &&
+            !self.is_keyword(~"false") {
             let pth = self.parse_path_with_tps(true);
 
             /* `!`, as an operator, is prefix, so we know this isn't that */
@@ -1065,7 +1067,7 @@ class parser {
     fn parse_syntax_ext_naked(lo: uint) -> @expr {
         alt self.token {
           token::IDENT(_, _) {}
-          _ { self.fatal("expected a syntax expander name"); }
+          _ { self.fatal(~"expected a syntax expander name"); }
         }
         let pth = self.parse_path_without_tps();
         //temporary for a backwards-compatible cycle:
@@ -1093,7 +1095,7 @@ class parser {
                 alt (self.token) {
                   token::LBRACE {depth += 1u;}
                   token::RBRACE {depth -= 1u;}
-                  token::EOF {self.fatal("unexpected EOF in macro body");}
+                  token::EOF {self.fatal(~"unexpected EOF in macro body");}
                   _ {}
                 }
                 self.bump();
@@ -1181,7 +1183,7 @@ class parser {
                 self.bump();
                 ret (some(sep), zerok);
             } else {
-                self.fatal("expected `*` or `+`");
+                self.fatal(~"expected `*` or `+`");
             }
         }
     }
@@ -1201,11 +1203,11 @@ class parser {
             alt p.token {
               token::RPAREN | token::RBRACE | token::RBRACKET
               if !delim_ok {
-                p.fatal("incorrect close delimiter: `"
-                           + token_to_str(p.reader, p.token) + "`");
+                p.fatal(~"incorrect close delimiter: `"
+                           + token_to_str(p.reader, p.token) + ~"`");
               }
               token::EOF {
-                p.fatal("file ended in the middle of a macro invocation");
+                p.fatal(~"file ended in the middle of a macro invocation");
               }
               /* we ought to allow different depths of unquotation */
               token::DOLLAR if p.quote_depth > 0u {
@@ -1280,7 +1282,7 @@ class parser {
                 let ms = self.parse_matcher_subseq(name_idx, token::LPAREN,
                                                    token::RPAREN);
                 if ms.len() == 0u {
-                    self.fatal("repetition body must be nonempty");
+                    self.fatal(~"repetition body must be nonempty");
                 }
                 let (sep, zerok) = self.parse_sep_and_zerok();
                 mtc_rep(ms, sep, zerok)
@@ -1411,7 +1413,7 @@ class parser {
           }
           _ {}
         }
-        if as_prec > min_prec && self.eat_keyword("as") {
+        if as_prec > min_prec && self.eat_keyword(~"as") {
             let rhs = self.parse_ty(true);
             let _as =
                 self.mk_pexpr(lhs.span.lo, rhs.span.hi, expr_cast(lhs, rhs));
@@ -1474,7 +1476,7 @@ class parser {
         let thn = self.parse_block();
         let mut els: option<@expr> = none;
         let mut hi = thn.span.hi;
-        if self.eat_keyword("else") {
+        if self.eat_keyword(~"else") {
             let elexpr = self.parse_else_expr();
             els = some(elexpr);
             hi = elexpr.span.hi;
@@ -1483,7 +1485,7 @@ class parser {
     }
 
     fn parse_if_expr() -> @expr {
-        if self.eat_keyword("check") {
+        if self.eat_keyword(~"check") {
             let q = self.parse_if_expr_1();
             ret self.mk_expr(q.lo, q.hi,
                              expr_if_check(q.cond, q.then, q.els));
@@ -1560,7 +1562,7 @@ class parser {
     }
 
     fn parse_else_expr() -> @expr {
-        if self.eat_keyword("if") {
+        if self.eat_keyword(~"if") {
             ret self.parse_if_expr();
         } else {
             let blk = self.parse_block();
@@ -1568,7 +1570,7 @@ class parser {
         }
     }
 
-    fn parse_sugary_call_expr(keyword: str,
+    fn parse_sugary_call_expr(keyword: ~str,
                               ctor: fn(+@expr) -> expr_) -> @expr {
         let lo = self.last_span;
         // Parse the callee `foo` in
@@ -1625,7 +1627,7 @@ class parser {
 
     fn parse_alt_expr() -> @expr {
         let lo = self.last_span.lo;
-        let mode = if self.eat_keyword("check") { alt_check }
+        let mode = if self.eat_keyword(~"check") { alt_check }
         else { alt_exhaustive };
         let discriminant = self.parse_expr();
         self.expect(token::LBRACE);
@@ -1633,7 +1635,7 @@ class parser {
         while self.token != token::RBRACE {
             let pats = self.parse_pats();
             let mut guard = none;
-            if self.eat_keyword("if") { guard = some(self.parse_expr()); }
+            if self.eat_keyword(~"if") { guard = some(self.parse_expr()); }
             if self.token == token::FAT_ARROW { self.bump(); }
             let blk = self.parse_block();
             vec::push(arms, {pats: pats, guard: guard, body: blk});
@@ -1736,9 +1738,9 @@ class parser {
                 if self.token == token::UNDERSCORE {
                     self.bump();
                     if self.token != token::RBRACE {
-                        self.fatal("expected `}`, found `" +
+                        self.fatal(~"expected `}`, found `" +
                                    token_to_str(self.reader, self.token) +
-                                   "`");
+                                   ~"`");
                     }
                     etc = true;
                     break;
@@ -1789,10 +1791,10 @@ class parser {
             }
           }
           tok {
-            if !is_ident(tok) || self.is_keyword("true")
-                || self.is_keyword("false") {
+            if !is_ident(tok) || self.is_keyword(~"true")
+                || self.is_keyword(~"false") {
                 let val = self.parse_expr_res(RESTRICT_NO_BAR_OP);
-                if self.eat_keyword("to") {
+                if self.eat_keyword(~"to") {
                     let end = self.parse_expr_res(RESTRICT_NO_BAR_OP);
                     hi = end.span.hi;
                     pat = pat_range(val, end);
@@ -1866,7 +1868,7 @@ class parser {
     }
 
     fn parse_let() -> @decl {
-        let is_mutbl = self.eat_keyword("mut");
+        let is_mutbl = self.eat_keyword(~"mut");
         let lo = self.span.lo;
         let mut locals = ~[self.parse_local(is_mutbl, true)];
         while self.eat(token::COMMA) {
@@ -1879,11 +1881,11 @@ class parser {
     fn parse_instance_var(pr: visibility) -> @class_member {
         let mut is_mutbl = class_immutable;
         let lo = self.span.lo;
-        if self.eat_keyword("mut") {
+        if self.eat_keyword(~"mut") {
             is_mutbl = class_mutable;
         }
         if !is_plain_ident(self.token) {
-            self.fatal("expected ident");
+            self.fatal(~"expected ident");
         }
         let name = self.parse_ident();
         self.expect(token::COLON);
@@ -1896,14 +1898,14 @@ class parser {
         fn check_expected_item(p: parser, current_attrs: ~[attribute]) {
             // If we have attributes then we should have an item
             if vec::is_not_empty(current_attrs) {
-                p.fatal("expected item");
+                p.fatal(~"expected item");
             }
         }
 
         let lo = self.span.lo;
-        if self.is_keyword("let") {
+        if self.is_keyword(~"let") {
             check_expected_item(self, first_item_attrs);
-            self.expect_keyword("let");
+            self.expect_keyword(~"let");
             let decl = self.parse_let();
             ret @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id()));
         } else {
@@ -1936,7 +1938,7 @@ class parser {
     }
 
     fn expr_is_complete(e: pexpr) -> bool {
-        log(debug, ("expr_is_complete", self.restriction,
+        log(debug, (~"expr_is_complete", self.restriction,
                     print::pprust::expr_to_str(*e),
                     classify::expr_requires_semi_to_be_stmt(*e)));
         ret self.restriction == RESTRICT_STMT_EXPR &&
@@ -1962,12 +1964,12 @@ class parser {
         }
 
         let lo = self.span.lo;
-        if self.eat_keyword("unchecked") {
+        if self.eat_keyword(~"unchecked") {
             self.expect(token::LBRACE);
             let {inner, next} = maybe_parse_inner_attrs_and_next(self,
                                                                  parse_attrs);
             ret (inner, self.parse_block_tail_(lo, unchecked_blk, next));
-        } else if self.eat_keyword("unsafe") {
+        } else if self.eat_keyword(~"unsafe") {
             self.expect(token::LBRACE);
             let {inner, next} = maybe_parse_inner_attrs_and_next(self,
                                                                  parse_attrs);
@@ -2004,7 +2006,7 @@ class parser {
         let mut initial_attrs = attrs_remaining;
 
         if self.token == token::RBRACE && !vec::is_empty(initial_attrs) {
-            self.fatal("expected item");
+            self.fatal(~"expected item");
         }
 
         while self.token != token::RBRACE {
@@ -2028,9 +2030,9 @@ class parser {
                       }
                       t {
                         if classify::stmt_ends_with_semi(*stmt) {
-                            self.fatal("expected `;` or `}` after expression \
-                                        but found `"
-                                       + token_to_str(self.reader, t) + "`");
+                            self.fatal(~"expected `;` or `}` after \
+                                         expression but found `"
+                                       + token_to_str(self.reader, t) + ~"`");
                         }
                         vec::push(stmts, stmt);
                       }
@@ -2060,9 +2062,9 @@ class parser {
         let ident = self.parse_ident();
         if self.eat(token::COLON) {
             while self.token != token::COMMA && self.token != token::GT {
-                if self.eat_keyword("send") { push(bounds, bound_send); }
-                else if self.eat_keyword("copy") { push(bounds, bound_copy) }
-                else if self.eat_keyword("const") {
+                if self.eat_keyword(~"send") { push(bounds, bound_send); }
+                else if self.eat_keyword(~"copy") { push(bounds, bound_copy) }
+                else if self.eat_keyword(~"const") {
                     push(bounds, bound_const)
                 }
                 else { push(bounds, bound_trait(self.parse_ty(false))); }
@@ -2156,16 +2158,16 @@ class parser {
     fn parse_method_name() -> ident {
         alt copy self.token {
           token::BINOP(op) { self.bump(); @token::binop_to_str(op) }
-          token::NOT { self.bump(); @"!"/~ }
+          token::NOT { self.bump(); @~"!" }
           token::LBRACKET {
             self.bump();
             self.expect(token::RBRACKET);
-            @"[]"/~
+            @~"[]"
           }
           _ {
             let id = self.parse_value_ident();
-            if id == @"unary"/~ && self.eat(token::BINOP(token::MINUS)) {
-                @"unary-"/~
+            if id == @~"unary" && self.eat(token::BINOP(token::MINUS)) {
+                @~"unary-"
             }
             else { id }
           }
@@ -2208,7 +2210,7 @@ class parser {
                 self.parse_region_param();
                 (none, self.parse_ty_params())
             }
-            else if self.is_keyword("of") {
+            else if self.is_keyword(~"of") {
                 (none, ~[])
             } else {
                 let id = self.parse_ident();
@@ -2216,7 +2218,7 @@ class parser {
                 (some(id), self.parse_ty_params())
             }
         };
-        let ifce = if self.eat_keyword("of") {
+        let ifce = if self.eat_keyword(~"of") {
             let path = self.parse_path_with_tps(false);
             if option::is_none(ident) {
                 ident = some(vec::last(path.idents));
@@ -2225,9 +2227,9 @@ class parser {
         } else { none };
         let ident = alt ident {
           some(name) { name }
-          none { self.expect_keyword("of"); fail; }
+          none { self.expect_keyword(~"of"); fail; }
         };
-        self.expect_keyword("for");
+        self.expect_keyword(~"for");
         let ty = self.parse_ty(false);
         let mut meths = ~[];
         self.expect(token::LBRACE);
@@ -2310,14 +2312,14 @@ class parser {
           Is it strange for the parser to check this?
           */
           none {
-            self.fatal("class with no constructor");
+            self.fatal(~"class with no constructor");
           }
         }
     }
 
     fn parse_single_class_item(vis: visibility)
         -> @class_member {
-        if self.eat_keyword("let") {
+        if self.eat_keyword(~"let") {
             let a_var = self.parse_instance_var(vis);
             self.expect(token::SEMI);
             ret a_var;
@@ -2348,15 +2350,15 @@ class parser {
 
     fn parse_class_item(class_name_with_tps: @path)
         -> class_contents {
-        if self.eat_keyword("new") {
+        if self.eat_keyword(~"new") {
             // result type is always the type of the class
             ret self.parse_ctor(ty_path(class_name_with_tps,
                                         self.get_id()));
         }
-        else if self.eat_keyword("drop") {
+        else if self.eat_keyword(~"drop") {
             ret self.parse_dtor();
         }
-        else if self.eat_keyword("priv") {
+        else if self.eat_keyword(~"priv") {
             self.expect(token::LBRACE);
             let mut results = ~[];
             while self.token != token::RBRACE {
@@ -2372,8 +2374,8 @@ class parser {
     }
 
     fn parse_visibility(def: visibility) -> visibility {
-        if self.eat_keyword("pub") { public }
-        else if self.eat_keyword("priv") { private }
+        if self.eat_keyword(~"pub") { public }
+        else if self.eat_keyword(~"priv") { private }
         else { def }
     }
 
@@ -2395,8 +2397,8 @@ class parser {
             alt self.parse_item(attrs, vis) {
               some(i) { vec::push(items, i); }
               _ {
-                self.fatal("expected item but found `" +
-                           token_to_str(self.reader, self.token) + "`");
+                self.fatal(~"expected item but found `" +
+                           token_to_str(self.reader, self.token) + ~"`");
               }
             }
             #debug["parse_mod_items: attrs=%?", attrs];
@@ -2404,7 +2406,7 @@ class parser {
 
         if first && attrs_remaining.len() > 0u {
             // We parsed attributes for the first item but didn't find it
-            self.fatal("expected item");
+            self.fatal(~"expected item");
         }
 
         ret {view_items: view_items, items: items};
@@ -2444,12 +2446,12 @@ class parser {
     }
 
     fn parse_fn_purity() -> purity {
-        if self.eat_keyword("fn") { impure_fn }
-        else if self.eat_keyword("pure") {
-            self.expect_keyword("fn");
+        if self.eat_keyword(~"fn") { impure_fn }
+        else if self.eat_keyword(~"pure") {
+            self.expect_keyword(~"fn");
             pure_fn
-        } else if self.eat_keyword("unsafe") {
-            self.expect_keyword("fn");
+        } else if self.eat_keyword(~"unsafe") {
+            self.expect_keyword(~"fn");
             unsafe_fn
         }
         else { self.unexpected(); }
@@ -2478,7 +2480,7 @@ class parser {
     }
 
     fn parse_item_foreign_mod() -> item_info {
-        self.expect_keyword("mod");
+        self.expect_keyword(~"mod");
         let id = self.parse_ident();
         self.expect(token::LBRACE);
         let more_attrs = self.parse_inner_attrs_and_next();
@@ -2563,7 +2565,7 @@ class parser {
         }
         self.expect(token::RBRACE);
         if (have_disr && !all_nullary) {
-            self.fatal("discriminator values can only be used with a c-like \
+            self.fatal(~"discriminator values can only be used with a c-like \
                         enum");
         }
         (id, item_enum(variants, ty_params), none)
@@ -2603,39 +2605,39 @@ class parser {
     fn parse_item(+attrs: ~[attribute], vis: visibility)
         -> option<@item> {
         let lo = self.span.lo;
-        let (ident, item_, extra_attrs) = if self.eat_keyword("const") {
+        let (ident, item_, extra_attrs) = if self.eat_keyword(~"const") {
             self.parse_item_const()
-        } else if self.is_keyword("fn") &&
+        } else if self.is_keyword(~"fn") &&
             !self.fn_expr_lookahead(self.look_ahead(1u)) {
             self.bump();
             self.parse_item_fn(impure_fn)
-        } else if self.eat_keyword("pure") {
-            self.expect_keyword("fn");
+        } else if self.eat_keyword(~"pure") {
+            self.expect_keyword(~"fn");
             self.parse_item_fn(pure_fn)
-        } else if self.is_keyword("unsafe")
+        } else if self.is_keyword(~"unsafe")
             && self.look_ahead(1u) != token::LBRACE {
             self.bump();
-            self.expect_keyword("fn");
+            self.expect_keyword(~"fn");
             self.parse_item_fn(unsafe_fn)
-        } else if self.eat_keyword("extern") {
-            if self.eat_keyword("fn") {
+        } else if self.eat_keyword(~"extern") {
+            if self.eat_keyword(~"fn") {
                 self.parse_item_fn(extern_fn)
             } else {
                 self.parse_item_foreign_mod()
             }
-        } else if self.eat_keyword("mod") {
+        } else if self.eat_keyword(~"mod") {
             self.parse_item_mod()
-        } else if self.eat_keyword("type") {
+        } else if self.eat_keyword(~"type") {
             self.parse_item_type()
-        } else if self.eat_keyword("enum") {
+        } else if self.eat_keyword(~"enum") {
             self.parse_item_enum(vis)
-        } else if self.eat_keyword("iface") {
+        } else if self.eat_keyword(~"iface") {
             self.parse_item_trait()
-        } else if self.eat_keyword("trait") {
+        } else if self.eat_keyword(~"trait") {
             self.parse_item_trait()
-        } else if self.eat_keyword("impl") {
+        } else if self.eat_keyword(~"impl") {
             self.parse_item_impl()
-        } else if self.eat_keyword("class") {
+        } else if self.eat_keyword(~"class") {
             self.parse_item_class()
         } else if !self.is_any_keyword(copy self.token)
             && self.look_ahead(1) == token::NOT
@@ -2747,21 +2749,21 @@ class parser {
     }
 
     fn is_view_item() -> bool {
-        let tok = if !self.is_keyword("pub") && !self.is_keyword("priv") {
+        let tok = if !self.is_keyword(~"pub") && !self.is_keyword(~"priv") {
             self.token
         } else { self.look_ahead(1u) };
-        self.token_is_keyword("use", tok)
-            || self.token_is_keyword("import", tok)
-            || self.token_is_keyword("export", tok)
+        self.token_is_keyword(~"use", tok)
+            || self.token_is_keyword(~"import", tok)
+            || self.token_is_keyword(~"export", tok)
     }
 
     fn parse_view_item(+attrs: ~[attribute]) -> @view_item {
         let lo = self.span.lo, vis = self.parse_visibility(private);
-        let node = if self.eat_keyword("use") {
+        let node = if self.eat_keyword(~"use") {
             self.parse_use()
-        } else if self.eat_keyword("import") {
+        } else if self.eat_keyword(~"import") {
             view_item_import(self.parse_view_paths())
-        } else if self.eat_keyword("export") {
+        } else if self.eat_keyword(~"export") {
             view_item_export(self.parse_view_paths())
         } else { fail; };
         self.expect(token::SEMI);
@@ -2775,7 +2777,7 @@ class parser {
         let mut attrs = vec::append(first_item_attrs,
                                     self.parse_outer_attributes());
         let mut items = ~[];
-        while if only_imports { self.is_keyword("import") }
+        while if only_imports { self.is_keyword(~"import") }
         else { self.is_view_item() } {
             vec::push(items, self.parse_view_item(attrs));
             attrs = self.parse_outer_attributes();
@@ -2796,11 +2798,11 @@ class parser {
                       config: self.cfg});
     }
 
-    fn parse_str() -> @str/~ {
+    fn parse_str() -> @~str {
         alt copy self.token {
           token::LIT_STR(s) { self.bump(); self.get_str(s) }
           _ {
-            self.fatal("expected string literal")
+            self.fatal(~"expected string literal")
           }
         }
     }
@@ -2821,8 +2823,8 @@ class parser {
         let expect_mod = vec::len(outer_attrs) > 0u;
 
         let lo = self.span.lo;
-        if expect_mod || self.is_keyword("mod") {
-            self.expect_keyword("mod");
+        if expect_mod || self.is_keyword(~"mod") {
+            self.expect_keyword(~"mod");
             let id = self.parse_ident();
             alt self.token {
               // mod x = "foo.rs";
@@ -2849,7 +2851,7 @@ class parser {
         } else if self.is_view_item() {
             let vi = self.parse_view_item(outer_attrs);
             ret spanned(lo, vi.span.hi, cdir_view_item(vi));
-        } else { ret self.fatal("expected crate directive"); }
+        } else { ret self.fatal(~"expected crate directive"); }
     }
 
     fn parse_crate_directives(term: token::token,
@@ -2860,7 +2862,7 @@ class parser {
         // accept seeing the terminator next, so if we do see it then fail the
         // same way parse_crate_directive would
         if vec::len(first_outer_attr) > 0u && self.token == term {
-            self.expect_keyword("mod");
+            self.expect_keyword(~"mod");
         }
 
         let mut cdirs: ~[@crate_directive] = ~[];
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 2c7b14cfe11..7db5af23266 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -100,61 +100,61 @@ enum whole_nt {
     w_mtcs(~[ast::matcher])
 }
 
-fn binop_to_str(o: binop) -> str {
+fn binop_to_str(o: binop) -> ~str {
     alt o {
-      PLUS { "+" }
-      MINUS { "-" }
-      STAR { "*" }
-      SLASH { "/" }
-      PERCENT { "%" }
-      CARET { "^" }
-      AND { "&" }
-      OR { "|" }
-      SHL { "<<" }
-      SHR { ">>" }
+      PLUS { ~"+" }
+      MINUS { ~"-" }
+      STAR { ~"*" }
+      SLASH { ~"/" }
+      PERCENT { ~"%" }
+      CARET { ~"^" }
+      AND { ~"&" }
+      OR { ~"|" }
+      SHL { ~"<<" }
+      SHR { ~">>" }
     }
 }
 
-fn to_str(in: interner<@str/~>, t: token) -> str {
+fn to_str(in: interner<@~str>, t: token) -> ~str {
     alt t {
-      EQ { "=" }
-      LT { "<" }
-      LE { "<=" }
-      EQEQ { "==" }
-      NE { "!=" }
-      GE { ">=" }
-      GT { ">" }
-      NOT { "!" }
-      TILDE { "~" }
-      OROR { "||" }
-      ANDAND { "&&" }
+      EQ { ~"=" }
+      LT { ~"<" }
+      LE { ~"<=" }
+      EQEQ { ~"==" }
+      NE { ~"!=" }
+      GE { ~">=" }
+      GT { ~">" }
+      NOT { ~"!" }
+      TILDE { ~"~" }
+      OROR { ~"||" }
+      ANDAND { ~"&&" }
       BINOP(op) { binop_to_str(op) }
-      BINOPEQ(op) { binop_to_str(op) + "=" }
+      BINOPEQ(op) { binop_to_str(op) + ~"=" }
 
       /* Structural symbols */
-      AT { "@" }
-      DOT { "." }
-      ELLIPSIS { "..." }
-      COMMA { "," }
-      SEMI { "" }
-      COLON { ":" }
-      MOD_SEP { "::" }
-      RARROW { "->" }
-      LARROW { "<-" }
-      DARROW { "<->" }
-      FAT_ARROW { "=>" }
-      LPAREN { "(" }
-      RPAREN { ")" }
-      LBRACKET { "[" }
-      RBRACKET { "]" }
-      LBRACE { "{" }
-      RBRACE { "}" }
-      POUND { "#" }
-      DOLLAR { "$" }
+      AT { ~"@" }
+      DOT { ~"." }
+      ELLIPSIS { ~"..." }
+      COMMA { ~"," }
+      SEMI { ~"" }
+      COLON { ~":" }
+      MOD_SEP { ~"::" }
+      RARROW { ~"->" }
+      LARROW { ~"<-" }
+      DARROW { ~"<->" }
+      FAT_ARROW { ~"=>" }
+      LPAREN { ~"(" }
+      RPAREN { ~")" }
+      LBRACKET { ~"[" }
+      RBRACKET { ~"]" }
+      LBRACE { ~"{" }
+      RBRACE { ~"}" }
+      POUND { ~"#" }
+      DOLLAR { ~"$" }
 
       /* Literals */
       LIT_INT(c, ast::ty_char) {
-        "'" + char::escape_default(c as char) + "'"
+        ~"'" + char::escape_default(c as char) + ~"'"
       }
       LIT_INT(i, t) {
         int::to_str(i as int, 10u) + ast_util::int_ty_to_str(t)
@@ -170,28 +170,28 @@ fn to_str(in: interner<@str/~>, t: token) -> str {
             ast_util::float_ty_to_str(t)
       }
       LIT_STR(s) {
-        "\""
+        ~"\""
             + str::escape_default(*interner::get(in, s))
-            + "\""
+            + ~"\""
       }
 
       /* Name components */
       IDENT(s, _) {
         *interner::get(in, s)
       }
-      UNDERSCORE { "_" }
+      UNDERSCORE { ~"_" }
 
       /* Other */
       DOC_COMMENT(s) { *interner::get(in, s) }
-      EOF { "<eof>" }
+      EOF { ~"<eof>" }
       ACTUALLY(w_nt) {
-        "an interpolated " +
+        ~"an interpolated " +
             alt w_nt {
-              w_item(*) { "item" } w_block(*) { "block" }
-              w_stmt(*) { "statement" } w_pat(*) { "pattern" }
-              w_expr(*) { "expression" } w_ty(*) { "type" }
-              w_ident(*) { "identifier" } w_path(*) { "path" }
-              w_tt(*) { "tt" } w_mtcs(*) { "matcher sequence" }
+              w_item(*) { ~"item" } w_block(*) { ~"block" }
+              w_stmt(*) { ~"statement" } w_pat(*) { ~"pattern" }
+              w_expr(*) { ~"expression" } w_ty(*) { ~"type" }
+              w_ident(*) { ~"identifier" } w_path(*) { ~"path" }
+              w_tt(*) { ~"tt" } w_mtcs(*) { ~"matcher sequence" }
             }
       }
     }
@@ -256,7 +256,7 @@ pure fn is_bar(t: token) -> bool {
  * the grammar is unambiguous. Restricted keywords may not appear
  * in positions that might otherwise contain _value identifiers_.
  */
-fn keyword_table() -> hashmap<str, ()> {
+fn keyword_table() -> hashmap<~str, ()> {
     let keywords = str_hash();
     for contextual_keyword_table().each_key |word| {
         keywords.insert(word, ());
@@ -268,18 +268,18 @@ fn keyword_table() -> hashmap<str, ()> {
 }
 
 /// Keywords that may be used as identifiers
-fn contextual_keyword_table() -> hashmap<str, ()> {
+fn contextual_keyword_table() -> hashmap<~str, ()> {
     let words = str_hash();
     let keys = ~[
-        "as",
-        "else",
-        "move",
-        "of",
-        "priv", "pub",
-        "self", "send", "static",
-        "to",
-        "use",
-        "with"
+        ~"as",
+        ~"else",
+        ~"move",
+        ~"of",
+        ~"priv", ~"pub",
+        ~"self", ~"send", ~"static",
+        ~"to",
+        ~"use",
+        ~"with"
     ];
     for keys.each |word| {
         words.insert(word, ());
@@ -301,23 +301,23 @@ fn contextual_keyword_table() -> hashmap<str, ()> {
  * * `true` or `false` as identifiers would always be shadowed by
  *   the boolean constants
  */
-fn restricted_keyword_table() -> hashmap<str, ()> {
+fn restricted_keyword_table() -> hashmap<~str, ()> {
     let words = str_hash();
     let keys = ~[
-        "alt", "again", "assert",
-        "break",
-        "check", "claim", "class", "const", "copy",
-        "do", "drop",
-        "else", "enum", "export", "extern",
-        "fail", "false", "fn", "for",
-        "if", "iface", "impl", "import",
-        "let", "log", "loop",
-        "mod", "mut",
-        "new",
-        "pure", "ret",
-        "true", "trait", "type",
-        "unchecked", "unsafe",
-        "while"
+        ~"alt", ~"again", ~"assert",
+        ~"break",
+        ~"check", ~"claim", ~"class", ~"const", ~"copy",
+        ~"do", ~"drop",
+        ~"else", ~"enum", ~"export", ~"extern",
+        ~"fail", ~"false", ~"fn", ~"for",
+        ~"if", ~"iface", ~"impl", ~"import",
+        ~"let", ~"log", ~"loop",
+        ~"mod", ~"mut",
+        ~"new",
+        ~"pure", ~"ret",
+        ~"true", ~"trait", ~"type",
+        ~"unchecked", ~"unsafe",
+        ~"while"
     ];
     for keys.each |word| {
         words.insert(word, ());
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 09754d27e67..1a318d53635 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -59,33 +59,33 @@ type break_t = {offset: int, blank_space: int};
 
 type begin_t = {offset: int, breaks: breaks};
 
-enum token { STRING(@str/~, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
+enum token { STRING(@~str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
 
-fn tok_str(++t: token) -> str {
+fn tok_str(++t: token) -> ~str {
     alt t {
       STRING(s, len) { ret #fmt["STR(%s,%d)", *s, len]; }
-      BREAK(_) { ret "BREAK"; }
-      BEGIN(_) { ret "BEGIN"; }
-      END { ret "END"; }
-      EOF { ret "EOF"; }
+      BREAK(_) { ret ~"BREAK"; }
+      BEGIN(_) { ret ~"BEGIN"; }
+      END { ret ~"END"; }
+      EOF { ret ~"EOF"; }
     }
 }
 
 fn buf_str(toks: ~[mut token], szs: ~[mut int], left: uint, right: uint,
-           lim: uint) -> str {
+           lim: uint) -> ~str {
     let n = vec::len(toks);
     assert (n == vec::len(szs));
     let mut i = left;
     let mut L = lim;
-    let mut s = "[";
+    let mut s = ~"[";
     while i != right && L != 0u {
         L -= 1u;
-        if i != left { s += ", "; }
+        if i != left { s += ~", "; }
         s += #fmt["%d=%s", szs[i], tok_str(toks[i])];
         i += 1u;
         i %= n;
     }
-    s += "]";
+    s += ~"]";
     ret s;
 }
 
@@ -389,7 +389,7 @@ impl printer for printer {
     }
     fn print_newline(amount: int) {
         #debug("NEWLINE %d", amount);
-        self.out.write_str("\n");
+        self.out.write_str(~"\n");
         self.pending_indentation = 0;
         self.indent(amount);
     }
@@ -405,9 +405,9 @@ impl printer for printer {
             {offset: 0, pbreak: broken(inconsistent)}
         }
     }
-    fn write_str(s: str) {
+    fn write_str(s: ~str) {
         while self.pending_indentation > 0 {
-            self.out.write_str(" ");
+            self.out.write_str(~" ");
             self.pending_indentation -= 1;
         }
         self.out.write_str(s);
@@ -492,15 +492,15 @@ fn end(p: printer) { p.pretty_print(END); }
 
 fn eof(p: printer) { p.pretty_print(EOF); }
 
-fn word(p: printer, wrd: str) {
+fn word(p: printer, wrd: ~str) {
     p.pretty_print(STRING(@wrd, str::len(wrd) as int));
 }
 
-fn huge_word(p: printer, wrd: str) {
+fn huge_word(p: printer, wrd: ~str) {
     p.pretty_print(STRING(@wrd, size_infinity));
 }
 
-fn zero_word(p: printer, wrd: str) { p.pretty_print(STRING(@wrd, 0)); }
+fn zero_word(p: printer, wrd: ~str) { p.pretty_print(STRING(@wrd, 0)); }
 
 fn spaces(p: printer, n: uint) { break_offset(p, n, 0); }
 
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index c0cbf78c5cf..29da51ec083 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -64,7 +64,7 @@ const default_columns: uint = 78u;
 // it can scan the input text for comments and literals to
 // copy forward.
 fn print_crate(cm: codemap, span_diagnostic: diagnostic::span_handler,
-               crate: @ast::crate, filename: str, in: io::reader,
+               crate: @ast::crate, filename: ~str, in: io::reader,
                out: io::writer, ann: pp_ann, is_expanded: bool) {
     let r = comments::gather_comments_and_literals(span_diagnostic,
                                                    filename, in);
@@ -89,28 +89,28 @@ fn print_crate_(s: ps, &&crate: @ast::crate) {
     eof(s.s);
 }
 
-fn ty_to_str(ty: @ast::ty) -> str { ret to_str(ty, print_type); }
+fn ty_to_str(ty: @ast::ty) -> ~str { ret to_str(ty, print_type); }
 
-fn pat_to_str(pat: @ast::pat) -> str { ret to_str(pat, print_pat); }
+fn pat_to_str(pat: @ast::pat) -> ~str { ret to_str(pat, print_pat); }
 
-fn expr_to_str(e: @ast::expr) -> str { ret to_str(e, print_expr); }
+fn expr_to_str(e: @ast::expr) -> ~str { ret to_str(e, print_expr); }
 
-fn stmt_to_str(s: ast::stmt) -> str { ret to_str(s, print_stmt); }
+fn stmt_to_str(s: ast::stmt) -> ~str { ret to_str(s, print_stmt); }
 
-fn item_to_str(i: @ast::item) -> str { ret to_str(i, print_item); }
+fn item_to_str(i: @ast::item) -> ~str { ret to_str(i, print_item); }
 
-fn attr_to_str(i: ast::attribute) -> str { ret to_str(i, print_attribute); }
+fn attr_to_str(i: ast::attribute) -> ~str { ret to_str(i, print_attribute); }
 
-fn typarams_to_str(tps: ~[ast::ty_param]) -> str {
+fn typarams_to_str(tps: ~[ast::ty_param]) -> ~str {
     ret to_str(tps, print_type_params)
 }
 
-fn path_to_str(&&p: @ast::path) -> str {
+fn path_to_str(&&p: @ast::path) -> ~str {
     ret to_str(p, |a,b| print_path(a, b, false));
 }
 
 fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
-              params: ~[ast::ty_param]) -> str {
+              params: ~[ast::ty_param]) -> ~str {
     let buffer = io::mem_buffer();
     let s = rust_printer(io::mem_buffer_writer(buffer));
     print_fn(s, decl, name, params);
@@ -134,7 +134,7 @@ fn test_fun_to_str() {
     assert fun_to_str(decl, "a", ~[]) == "fn a()";
 }
 
-fn block_to_str(blk: ast::blk) -> str {
+fn block_to_str(blk: ast::blk) -> ~str {
     let buffer = io::mem_buffer();
     let s = rust_printer(io::mem_buffer_writer(buffer));
     // containing cbox, will be closed by print-block at }
@@ -146,15 +146,15 @@ fn block_to_str(blk: ast::blk) -> str {
     io::mem_buffer_str(buffer)
 }
 
-fn meta_item_to_str(mi: ast::meta_item) -> str {
+fn meta_item_to_str(mi: ast::meta_item) -> ~str {
     ret to_str(@mi, print_meta_item);
 }
 
-fn attribute_to_str(attr: ast::attribute) -> str {
+fn attribute_to_str(attr: ast::attribute) -> ~str {
     ret to_str(attr, print_attribute);
 }
 
-fn variant_to_str(var: ast::variant) -> str {
+fn variant_to_str(var: ast::variant) -> ~str {
     ret to_str(var, print_variant);
 }
 
@@ -182,17 +182,17 @@ fn box(s: ps, u: uint, b: pp::breaks) {
     pp::box(s.s, u, b);
 }
 
-fn nbsp(s: ps) { word(s.s, " "); }
+fn nbsp(s: ps) { word(s.s, ~" "); }
 
-fn word_nbsp(s: ps, w: str) { word(s.s, w); nbsp(s); }
+fn word_nbsp(s: ps, w: ~str) { word(s.s, w); nbsp(s); }
 
-fn word_space(s: ps, w: str) { word(s.s, w); space(s.s); }
+fn word_space(s: ps, w: ~str) { word(s.s, w); space(s.s); }
 
-fn popen(s: ps) { word(s.s, "("); }
+fn popen(s: ps) { word(s.s, ~"("); }
 
-fn pclose(s: ps) { word(s.s, ")"); }
+fn pclose(s: ps) { word(s.s, ~")"); }
 
-fn head(s: ps, w: str) {
+fn head(s: ps, w: ~str) {
     // outer-box is consistent
     cbox(s, indent_unit);
     // head-box is inconsistent
@@ -202,14 +202,14 @@ fn head(s: ps, w: str) {
 }
 
 fn bopen(s: ps) {
-    word(s.s, "{");
+    word(s.s, ~"{");
     end(s); // close the head-box
 }
 
 fn bclose_(s: ps, span: codemap::span, indented: uint) {
     maybe_print_comment(s, span.hi);
     break_offset_if_not_bol(s, 1u, -(indented as int));
-    word(s.s, "}");
+    word(s.s, ~"}");
     end(s); // close the outer-box
 }
 fn bclose(s: ps, span: codemap::span) { bclose_(s, span, indent_unit); }
@@ -250,19 +250,19 @@ fn break_offset_if_not_bol(s: ps, n: uint, off: int) {
 
 // Synthesizes a comment that was not textually present in the original source
 // file.
-fn synth_comment(s: ps, text: str) {
-    word(s.s, "/*");
+fn synth_comment(s: ps, text: ~str) {
+    word(s.s, ~"/*");
     space(s.s);
     word(s.s, text);
     space(s.s);
-    word(s.s, "*/");
+    word(s.s, ~"*/");
 }
 
 fn commasep<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN)) {
     box(s, 0u, b);
     let mut first = true;
     for elts.each |elt| {
-        if first { first = false; } else { word_space(s, ","); }
+        if first { first = false; } else { word_space(s, ~","); }
         op(s, elt);
     }
     end(s);
@@ -279,7 +279,7 @@ fn commasep_cmnt<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN),
         op(s, elt);
         i += 1u;
         if i < len {
-            word(s.s, ",");
+            word(s.s, ~",");
             maybe_print_trailing_comment(s, get_span(elt),
                                          some(get_span(elts[i]).hi));
             space_if_not_bol(s);
@@ -312,9 +312,9 @@ fn print_foreign_mod(s: ps, nmod: ast::foreign_mod,
 
 fn print_region(s: ps, region: @ast::region) {
     alt region.node {
-      ast::re_anon { word_space(s, "&"); }
+      ast::re_anon { word_space(s, ~"&"); }
       ast::re_named(name) {
-        word(s.s, "&");
+        word(s.s, ~"&");
         word(s.s, *name);
       }
     }
@@ -328,41 +328,41 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
     maybe_print_comment(s, ty.span.lo);
     ibox(s, 0u);
     alt ty.node {
-      ast::ty_nil { word(s.s, "()"); }
-      ast::ty_bot { word(s.s, "!"); }
-      ast::ty_box(mt) { word(s.s, "@"); print_mt(s, mt); }
-      ast::ty_uniq(mt) { word(s.s, "~"); print_mt(s, mt); }
+      ast::ty_nil { word(s.s, ~"()"); }
+      ast::ty_bot { word(s.s, ~"!"); }
+      ast::ty_box(mt) { word(s.s, ~"@"); print_mt(s, mt); }
+      ast::ty_uniq(mt) { word(s.s, ~"~"); print_mt(s, mt); }
       ast::ty_vec(mt) {
-        word(s.s, "[");
+        word(s.s, ~"[");
         alt mt.mutbl {
-          ast::m_mutbl { word_space(s, "mut"); }
-          ast::m_const { word_space(s, "const"); }
+          ast::m_mutbl { word_space(s, ~"mut"); }
+          ast::m_const { word_space(s, ~"const"); }
           ast::m_imm { }
         }
         print_type(s, mt.ty);
-        word(s.s, "]");
+        word(s.s, ~"]");
       }
-      ast::ty_ptr(mt) { word(s.s, "*"); print_mt(s, mt); }
+      ast::ty_ptr(mt) { word(s.s, ~"*"); print_mt(s, mt); }
       ast::ty_rptr(region, mt) {
         alt region.node {
-          ast::re_anon { word(s.s, "&"); }
-          _ { print_region(s, region); word(s.s, "."); }
+          ast::re_anon { word(s.s, ~"&"); }
+          _ { print_region(s, region); word(s.s, ~"."); }
         }
         print_mt(s, mt);
       }
       ast::ty_rec(fields) {
-        word(s.s, "{");
+        word(s.s, ~"{");
         fn print_field(s: ps, f: ast::ty_field) {
             cbox(s, indent_unit);
             print_mutability(s, f.node.mt.mutbl);
             word(s.s, *f.node.ident);
-            word_space(s, ":");
+            word_space(s, ~":");
             print_type(s, f.node.mt.ty);
             end(s);
         }
         fn get_span(f: ast::ty_field) -> codemap::span { ret f.span; }
         commasep_cmnt(s, consistent, fields, print_field, get_span);
-        word(s.s, ",}");
+        word(s.s, ~",}");
       }
       ast::ty_tup(elts) {
         popen(s);
@@ -382,7 +382,7 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
         alt v {
           ast::vstore_fixed(_) {
             print_type(s, t);
-            word(s.s, "/");
+            word(s.s, ~"/");
             print_vstore(s, v);
           }
           _ {
@@ -392,10 +392,10 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
         }
       }
       ast::ty_mac(_) {
-          fail "print_type doesn't know how to print a ty_mac";
+          fail ~"print_type doesn't know how to print a ty_mac";
       }
       ast::ty_infer {
-          fail "print_type shouldn't see a ty_infer";
+          fail ~"print_type shouldn't see a ty_infer";
       }
 
     }
@@ -410,7 +410,7 @@ fn print_foreign_item(s: ps, item: @ast::foreign_item) {
       ast::foreign_item_fn(decl, typarams) {
         print_fn(s, decl, item.ident, typarams);
         end(s); // end head-ibox
-        word(s.s, ";");
+        word(s.s, ~";");
         end(s); // end the outer fn box
       }
     }
@@ -424,33 +424,33 @@ fn print_item(s: ps, &&item: @ast::item) {
     s.ann.pre(ann_node);
     alt item.node {
       ast::item_const(ty, expr) {
-        head(s, "const");
-        word_space(s, *item.ident + ":");
+        head(s, ~"const");
+        word_space(s, *item.ident + ~":");
         print_type(s, ty);
         space(s.s);
         end(s); // end the head-ibox
 
-        word_space(s, "=");
+        word_space(s, ~"=");
         print_expr(s, expr);
-        word(s.s, ";");
+        word(s.s, ~";");
         end(s); // end the outer cbox
 
       }
       ast::item_fn(decl, typarams, body) {
         print_fn(s, decl, item.ident, typarams);
-        word(s.s, " ");
+        word(s.s, ~" ");
         print_block_with_attrs(s, body, item.attrs);
       }
       ast::item_mod(_mod) {
-        head(s, "mod");
+        head(s, ~"mod");
         word_nbsp(s, *item.ident);
         bopen(s);
         print_mod(s, _mod, item.attrs);
         bclose(s, item.span);
       }
       ast::item_foreign_mod(nmod) {
-        head(s, "extern");
-        word_nbsp(s, "mod");
+        head(s, ~"extern");
+        word_nbsp(s, ~"mod");
         word_nbsp(s, *item.ident);
         bopen(s);
         print_foreign_mod(s, nmod, item.attrs);
@@ -459,15 +459,15 @@ fn print_item(s: ps, &&item: @ast::item) {
       ast::item_ty(ty, params) {
         ibox(s, indent_unit);
         ibox(s, 0u);
-        word_nbsp(s, "type");
+        word_nbsp(s, ~"type");
         word(s.s, *item.ident);
         print_type_params(s, params);
         end(s); // end the inner ibox
 
         space(s.s);
-        word_space(s, "=");
+        word_space(s, ~"=");
         print_type(s, ty);
-        word(s.s, ";");
+        word(s.s, ~";");
         end(s); // end the outer ibox
       }
       ast::item_enum(variants, params) {
@@ -477,15 +477,15 @@ fn print_item(s: ps, &&item: @ast::item) {
                 vec::len(variants[0].node.args) == 1u;
         if newtype {
             ibox(s, indent_unit);
-            word_space(s, "enum");
-        } else { head(s, "enum"); }
+            word_space(s, ~"enum");
+        } else { head(s, ~"enum"); }
         word(s.s, *item.ident);
         print_type_params(s, params);
         space(s.s);
         if newtype {
-            word_space(s, "=");
+            word_space(s, ~"=");
             print_type(s, variants[0].node.args[0].ty);
-            word(s.s, ";");
+            word(s.s, ~";");
             end(s);
         } else {
             bopen(s);
@@ -495,7 +495,7 @@ fn print_item(s: ps, &&item: @ast::item) {
                 print_outer_attributes(s, v.node.attrs);
                 ibox(s, indent_unit);
                 print_variant(s, v);
-                word(s.s, ",");
+                word(s.s, ~",");
                 end(s);
                 maybe_print_trailing_comment(s, v.span, none::<uint>);
             }
@@ -503,25 +503,25 @@ fn print_item(s: ps, &&item: @ast::item) {
         }
       }
       ast::item_class(tps, traits, items, ctor, m_dtor) {
-          head(s, "class");
+          head(s, ~"class");
           word_nbsp(s, *item.ident);
           print_type_params(s, tps);
           if vec::len(traits) != 0u {
-              word_space(s, ":");
+              word_space(s, ~":");
               commasep(s, inconsistent, traits, |s, p|
                   print_path(s, p.path, false));
           }
           bopen(s);
           hardbreak_if_not_bol(s);
           maybe_print_comment(s, ctor.span.lo);
-          head(s, "new");
+          head(s, ~"new");
           print_fn_args_and_ret(s, ctor.node.dec, ~[]);
           space(s.s);
           print_block(s, ctor.node.body);
           do option::iter(m_dtor) |dtor| {
             hardbreak_if_not_bol(s);
             maybe_print_comment(s, dtor.span.lo);
-            head(s, "drop");
+            head(s, ~"drop");
             print_block(s, dtor.node.body);
           }
           for items.each |ci| {
@@ -537,7 +537,7 @@ fn print_item(s: ps, &&item: @ast::item) {
              let pr = ast_util::class_member_visibility(ci);
              alt pr {
                 ast::private {
-                    head(s, "priv");
+                    head(s, ~"priv");
                     bopen(s);
                     hardbreak_if_not_bol(s);
                 }
@@ -545,15 +545,15 @@ fn print_item(s: ps, &&item: @ast::item) {
              }
              alt ci.node {
                 ast::instance_var(nm, t, mt, _,_) {
-                    word_nbsp(s, "let");
+                    word_nbsp(s, ~"let");
                     alt mt {
-                      ast::class_mutable { word_nbsp(s, "mut"); }
+                      ast::class_mutable { word_nbsp(s, ~"mut"); }
                       _ {}
                     }
                     word(s.s, *nm);
-                    word_nbsp(s, ":");
+                    word_nbsp(s, ~":");
                     print_type(s, t);
-                    word(s.s, ";");
+                    word(s.s, ~";");
                 }
                 ast::class_method(m) {
                     print_method(s, m);
@@ -567,16 +567,16 @@ fn print_item(s: ps, &&item: @ast::item) {
           bclose(s, item.span);
        }
       ast::item_impl(tps, ifce, ty, methods) {
-        head(s, "impl");
+        head(s, ~"impl");
         word(s.s, *item.ident);
         print_type_params(s, tps);
         space(s.s);
         option::iter(ifce, |p| {
-            word_nbsp(s, "of");
+            word_nbsp(s, ~"of");
             print_path(s, p.path, false);
             space(s.s);
             });
-        word_nbsp(s, "for");
+        word_nbsp(s, ~"for");
         print_type(s, ty);
         space(s.s);
         bopen(s);
@@ -586,10 +586,10 @@ fn print_item(s: ps, &&item: @ast::item) {
         bclose(s, item.span);
       }
       ast::item_trait(tps, methods) {
-        head(s, "iface");
+        head(s, ~"iface");
         word(s.s, *item.ident);
         print_type_params(s, tps);
-        word(s.s, " ");
+        word(s.s, ~" ");
         bopen(s);
         for methods.each |meth| { print_trait_method(s, meth); }
         bclose(s, item.span);
@@ -602,7 +602,7 @@ fn print_item(s: ps, &&item: @ast::item) {
         bclose(s, item.span);
       }
       ast::item_mac(_) {
-        fail "invalid item-position syntax bit"
+        fail ~"invalid item-position syntax bit"
       }
     }
     s.ann.post(ann_node);
@@ -614,7 +614,7 @@ fn print_item(s: ps, &&item: @ast::item) {
 /// A prettier option would involve scraping the macro grammar for formatting
 /// advice. But that would be hard.
 fn print_tt(_s: ps, _tt: ast::token_tree) {
-    fail "token trees cannot be pretty-printed"
+    fail ~"token trees cannot be pretty-printed"
 }
 
 fn print_variant(s: ps, v: ast::variant) {
@@ -630,7 +630,7 @@ fn print_variant(s: ps, v: ast::variant) {
     alt v.node.disr_expr {
       some(d) {
         space(s.s);
-        word_space(s, "=");
+        word_space(s, ~"=");
         print_expr(s, d);
       }
       _ {}
@@ -642,7 +642,7 @@ fn print_ty_method(s: ps, m: ast::ty_method) {
     maybe_print_comment(s, m.span.lo);
     print_outer_attributes(s, m.attrs);
     print_ty_fn(s, none, m.decl, some(m.ident), some(m.tps));
-    word(s.s, ";");
+    word(s.s, ~";");
 }
 
 fn print_trait_method(s: ps, m: ast::trait_method) {
@@ -657,7 +657,7 @@ fn print_method(s: ps, meth: @ast::method) {
     maybe_print_comment(s, meth.span.lo);
     print_outer_attributes(s, meth.attrs);
     print_fn(s, meth.decl, meth.ident, meth.tps);
-    word(s.s, " ");
+    word(s.s, ~" ");
     print_block_with_attrs(s, meth.body, meth.attrs);
 }
 
@@ -679,7 +679,7 @@ fn print_inner_attributes(s: ps, attrs: ~[ast::attribute]) {
           ast::attr_inner {
             print_attribute(s, attr);
             if !attr.node.is_sugared_doc {
-                word(s.s, ";");
+                word(s.s, ~";");
             }
             count += 1;
           }
@@ -697,9 +697,9 @@ fn print_attribute(s: ps, attr: ast::attribute) {
         let comment = attr::get_meta_item_value_str(meta).get();
         word(s.s, *comment);
     } else {
-        word(s.s, "#[");
+        word(s.s, ~"#[");
         print_meta_item(s, @attr.node.value);
-        word(s.s, "]");
+        word(s.s, ~"]");
     }
 }
 
@@ -717,10 +717,10 @@ fn print_stmt(s: ps, st: ast::stmt) {
       ast::stmt_semi(expr, _) {
         space_if_not_bol(s);
         print_expr(s, expr);
-        word(s.s, ";");
+        word(s.s, ~";");
       }
     }
-    if parse::classify::stmt_ends_with_semi(st) { word(s.s, ";"); }
+    if parse::classify::stmt_ends_with_semi(st) { word(s.s, ~";"); }
     maybe_print_trailing_comment(s, st.span, none::<uint>);
 }
 
@@ -743,15 +743,15 @@ fn print_possibly_embedded_block(s: ps, blk: ast::blk, embedded: embed_type,
 fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
                                   indented: uint, attrs: ~[ast::attribute]) {
     alt blk.node.rules {
-      ast::unchecked_blk { word(s.s, "unchecked"); }
-      ast::unsafe_blk { word(s.s, "unsafe"); }
+      ast::unchecked_blk { word(s.s, ~"unchecked"); }
+      ast::unsafe_blk { word(s.s, ~"unsafe"); }
       ast::default_blk { }
     }
     maybe_print_comment(s, blk.span.lo);
     let ann_node = node_block(s, blk);
     s.ann.pre(ann_node);
     alt embedded {
-      block_macro { word(s.s, "#{"); end(s); }
+      block_macro { word(s.s, ~"#{"); end(s); }
       block_block_fn { end(s); }
       block_normal { bopen(s); }
     }
@@ -788,8 +788,8 @@ fn print_maybe_parens_discrim(s: ps, e: @ast::expr) {
 
 fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
             elseopt: option<@ast::expr>, chk: bool) {
-    head(s, "if");
-    if chk { word_nbsp(s, "check"); }
+    head(s, ~"if");
+    if chk { word_nbsp(s, ~"check"); }
     print_maybe_parens_discrim(s, test);
     space(s.s);
     print_block(s, blk);
@@ -801,7 +801,7 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
               ast::expr_if(i, t, e) {
                 cbox(s, indent_unit - 1u);
                 ibox(s, 0u);
-                word(s.s, " else if ");
+                word(s.s, ~" else if ");
                 print_maybe_parens_discrim(s, i);
                 space(s.s);
                 print_block(s, t);
@@ -811,12 +811,12 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
               ast::expr_block(b) {
                 cbox(s, indent_unit - 1u);
                 ibox(s, 0u);
-                word(s.s, " else ");
+                word(s.s, ~" else ");
                 print_block(s, b);
               }
               // BLEAH, constraints would be great here
               _ {
-                  fail "print_if saw if with weird alternative";
+                  fail ~"print_if saw if with weird alternative";
               }
             }
           }
@@ -829,31 +829,31 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
 fn print_mac(s: ps, m: ast::mac) {
     alt m.node {
       ast::mac_invoc(path, arg, body) {
-        word(s.s, "#");
+        word(s.s, ~"#");
         print_path(s, path, false);
         alt arg {
           some(@{node: ast::expr_vec(_, _), _}) { }
-          _ { word(s.s, " "); }
+          _ { word(s.s, ~" "); }
         }
         option::iter(arg, |a| print_expr(s, a));
         // FIXME: extension 'body' (#2339)
       }
       ast::mac_invoc_tt(path, tts) {
         print_path(s, path, false);
-        word(s.s, "!");
+        word(s.s, ~"!");
         bopen(s);
         for tts.each() |tt| { print_tt(s, tt); }
         bclose(s, m.span);
       }
       ast::mac_embed_type(ty) {
-        word(s.s, "#<");
+        word(s.s, ~"#<");
         print_type(s, ty);
-        word(s.s, ">");
+        word(s.s, ~">");
       }
       ast::mac_embed_block(blk) {
         print_possibly_embedded_block(s, blk, block_normal, indent_unit);
       }
-      ast::mac_ellipsis { word(s.s, "..."); }
+      ast::mac_ellipsis { word(s.s, ~"..."); }
       ast::mac_var(v) { word(s.s, #fmt("$%u", v)); }
       _ { /* fixme */ }
     }
@@ -862,16 +862,16 @@ fn print_mac(s: ps, m: ast::mac) {
 fn print_vstore(s: ps, t: ast::vstore) {
     alt t {
       ast::vstore_fixed(some(i)) { word(s.s, #fmt("%u", i)); }
-      ast::vstore_fixed(none) { word(s.s, "_"); }
-      ast::vstore_uniq { word(s.s, "~"); }
-      ast::vstore_box { word(s.s, "@"); }
+      ast::vstore_fixed(none) { word(s.s, ~"_"); }
+      ast::vstore_uniq { word(s.s, ~"~"); }
+      ast::vstore_box { word(s.s, ~"@"); }
       ast::vstore_slice(r) {
           alt r.node {
-            ast::re_anon { word(s.s, "&"); }
+            ast::re_anon { word(s.s, ~"&"); }
             ast::re_named(name) {
-                word(s.s, "&");
+                word(s.s, ~"&");
                 word(s.s, *name);
-                word(s.s, ".");
+                word(s.s, ~".");
             }
           }
       }
@@ -888,7 +888,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         alt v {
           ast::vstore_fixed(_) {
             print_expr(s, e);
-            word(s.s, "/");
+            word(s.s, ~"/");
             print_vstore(s, v);
           }
           _ {
@@ -899,38 +899,38 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
       }
       ast::expr_vec(exprs, mutbl) {
         ibox(s, indent_unit);
-        word(s.s, "[");
+        word(s.s, ~"[");
         if mutbl == ast::m_mutbl {
-            word(s.s, "mut");
+            word(s.s, ~"mut");
             if vec::len(exprs) > 0u { nbsp(s); }
         }
         commasep_exprs(s, inconsistent, exprs);
-        word(s.s, "]");
+        word(s.s, ~"]");
         end(s);
       }
       ast::expr_rec(fields, wth) {
         fn print_field(s: ps, field: ast::field) {
             ibox(s, indent_unit);
-            if field.node.mutbl == ast::m_mutbl { word_nbsp(s, "mut"); }
+            if field.node.mutbl == ast::m_mutbl { word_nbsp(s, ~"mut"); }
             word(s.s, *field.node.ident);
-            word_space(s, ":");
+            word_space(s, ~":");
             print_expr(s, field.node.expr);
             end(s);
         }
         fn get_span(field: ast::field) -> codemap::span { ret field.span; }
-        word(s.s, "{");
+        word(s.s, ~"{");
         commasep_cmnt(s, consistent, fields, print_field, get_span);
         alt wth {
           some(expr) {
             if vec::len(fields) > 0u { space(s.s); }
             ibox(s, indent_unit);
-            word_space(s, "with");
+            word_space(s, ~"with");
             print_expr(s, expr);
             end(s);
           }
-          _ { word(s.s, ","); }
+          _ { word(s.s, ~","); }
         }
-        word(s.s, "}");
+        word(s.s, ~"}");
       }
       ast::expr_tup(exprs) {
         popen(s);
@@ -942,8 +942,8 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         let blk = if has_block {
             let blk_arg = vec::pop(base_args);
             alt blk_arg.node {
-              ast::expr_loop_body(_) { word_nbsp(s, "for"); }
-              ast::expr_do_body(_) { word_nbsp(s, "do"); }
+              ast::expr_loop_body(_) { word_nbsp(s, ~"for"); }
+              ast::expr_do_body(_) { word_nbsp(s, ~"do"); }
               _ {}
             }
             some(blk_arg)
@@ -971,7 +971,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         print_op_maybe_parens(s, expr, parse::prec::unop_prec);
       }
       ast::expr_addr_of(m, expr) {
-        word(s.s, "&");
+        word(s.s, ~"&");
         print_mutability(s, m);
         print_expr(s, expr);
       }
@@ -979,7 +979,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
       ast::expr_cast(expr, ty) {
         print_op_maybe_parens(s, expr, parse::prec::as_prec);
         space(s.s);
-        word_space(s, "as");
+        word_space(s, ~"as");
         print_type_ex(s, ty, true);
       }
       ast::expr_if(test, blk, elseopt) {
@@ -989,21 +989,21 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         print_if(s, test, blk, elseopt, true);
       }
       ast::expr_while(test, blk) {
-        head(s, "while");
+        head(s, ~"while");
         print_maybe_parens_discrim(s, test);
         space(s.s);
         print_block(s, blk);
       }
       ast::expr_loop(blk) {
-        head(s, "loop");
+        head(s, ~"loop");
         space(s.s);
         print_block(s, blk);
       }
       ast::expr_alt(expr, arms, mode) {
         cbox(s, alt_indent_unit);
         ibox(s, 4u);
-        word_nbsp(s, "alt");
-        if mode == ast::alt_check { word_nbsp(s, "check"); }
+        word_nbsp(s, ~"alt");
+        if mode == ast::alt_check { word_nbsp(s, ~"check"); }
         print_maybe_parens_discrim(s, expr);
         space(s.s);
         bopen(s);
@@ -1015,12 +1015,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
             for arm.pats.each |p| {
                 if first {
                     first = false;
-                } else { space(s.s); word_space(s, "|"); }
+                } else { space(s.s); word_space(s, ~"|"); }
                 print_pat(s, p);
             }
             space(s.s);
             alt arm.guard {
-              some(e) { word_space(s, "if"); print_expr(s, e); space(s.s); }
+              some(e) { word_space(s, ~"if"); print_expr(s, e); space(s.s); }
               none { }
             }
             print_possibly_embedded_block(s, arm.body, block_normal,
@@ -1060,30 +1060,30 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         ibox(s, 0u);
         print_block(s, blk);
       }
-      ast::expr_copy(e) { word_space(s, "copy"); print_expr(s, e); }
+      ast::expr_copy(e) { word_space(s, ~"copy"); print_expr(s, e); }
       ast::expr_move(lhs, rhs) {
         print_expr(s, lhs);
         space(s.s);
-        word_space(s, "<-");
+        word_space(s, ~"<-");
         print_expr(s, rhs);
       }
       ast::expr_assign(lhs, rhs) {
         print_expr(s, lhs);
         space(s.s);
-        word_space(s, "=");
+        word_space(s, ~"=");
         print_expr(s, rhs);
       }
       ast::expr_swap(lhs, rhs) {
         print_expr(s, lhs);
         space(s.s);
-        word_space(s, "<->");
+        word_space(s, ~"<->");
         print_expr(s, rhs);
       }
       ast::expr_assign_op(op, lhs, rhs) {
         print_expr(s, lhs);
         space(s.s);
         word(s.s, ast_util::binop_to_str(op));
-        word_space(s, "=");
+        word_space(s, ~"=");
         print_expr(s, rhs);
       }
       ast::expr_field(expr, id, tys) {
@@ -1093,46 +1093,46 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         } else {
             print_expr_parens_if_not_bot(s, expr);
         }
-        word(s.s, ".");
+        word(s.s, ~".");
         word(s.s, *id);
         if vec::len(tys) > 0u {
-            word(s.s, "::<");
+            word(s.s, ~"::<");
             commasep(s, inconsistent, tys, print_type);
-            word(s.s, ">");
+            word(s.s, ~">");
         }
       }
       ast::expr_index(expr, index) {
         print_expr_parens_if_not_bot(s, expr);
-        word(s.s, "[");
+        word(s.s, ~"[");
         print_expr(s, index);
-        word(s.s, "]");
+        word(s.s, ~"]");
       }
       ast::expr_path(path) { print_path(s, path, true); }
       ast::expr_fail(maybe_fail_val) {
-        word(s.s, "fail");
+        word(s.s, ~"fail");
         alt maybe_fail_val {
-          some(expr) { word(s.s, " "); print_expr(s, expr); }
+          some(expr) { word(s.s, ~" "); print_expr(s, expr); }
           _ { }
         }
       }
-      ast::expr_break { word(s.s, "break"); }
-      ast::expr_again { word(s.s, "again"); }
+      ast::expr_break { word(s.s, ~"break"); }
+      ast::expr_again { word(s.s, ~"again"); }
       ast::expr_ret(result) {
-        word(s.s, "ret");
+        word(s.s, ~"ret");
         alt result {
-          some(expr) { word(s.s, " "); print_expr(s, expr); }
+          some(expr) { word(s.s, ~" "); print_expr(s, expr); }
           _ { }
         }
       }
       ast::expr_log(lvl, lexp, expr) {
         alt check lvl {
-          1 { word_nbsp(s, "log"); print_expr(s, expr); }
-          0 { word_nbsp(s, "log_err"); print_expr(s, expr); }
+          1 { word_nbsp(s, ~"log"); print_expr(s, expr); }
+          0 { word_nbsp(s, ~"log_err"); print_expr(s, expr); }
           2 {
-            word_nbsp(s, "log");
+            word_nbsp(s, ~"log");
             popen(s);
             print_expr(s, lexp);
-            word(s.s, ",");
+            word(s.s, ~",");
             space_if_not_bol(s);
             print_expr(s, expr);
             pclose(s);
@@ -1141,19 +1141,19 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
       }
       ast::expr_check(m, expr) {
         alt m {
-          ast::claimed_expr { word_nbsp(s, "claim"); }
-          ast::checked_expr { word_nbsp(s, "check"); }
+          ast::claimed_expr { word_nbsp(s, ~"claim"); }
+          ast::checked_expr { word_nbsp(s, ~"check"); }
         }
         popen(s);
         print_expr(s, expr);
         pclose(s);
       }
       ast::expr_assert(expr) {
-        word_nbsp(s, "assert");
+        word_nbsp(s, ~"assert");
         print_expr(s, expr);
       }
       ast::expr_new(p, _, v) {
-        word_nbsp(s, "new");
+        word_nbsp(s, ~"new");
         popen(s);
         print_expr(s, p);
         pclose(s);
@@ -1186,7 +1186,7 @@ fn print_local_decl(s: ps, loc: @ast::local) {
     print_pat(s, loc.node.pat);
     alt loc.node.ty.node {
       ast::ty_infer { }
-      _ { word_space(s, ":"); print_type(s, loc.node.ty); }
+      _ { word_space(s, ~":"); print_type(s, loc.node.ty); }
     }
 }
 
@@ -1196,12 +1196,12 @@ fn print_decl(s: ps, decl: @ast::decl) {
       ast::decl_local(locs) {
         space_if_not_bol(s);
         ibox(s, indent_unit);
-        word_nbsp(s, "let");
+        word_nbsp(s, ~"let");
 
         // if any are mut, all are mut
         if vec::any(locs, |l| l.node.is_mutbl) {
             assert vec::all(locs, |l| l.node.is_mutbl);
-            word_nbsp(s, "mut");
+            word_nbsp(s, ~"mut");
         }
 
         fn print_local(s: ps, &&loc: @ast::local) {
@@ -1212,8 +1212,8 @@ fn print_decl(s: ps, decl: @ast::decl) {
               some(init) {
                 nbsp(s);
                 alt init.op {
-                  ast::init_assign { word_space(s, "="); }
-                  ast::init_move { word_space(s, "<-"); }
+                  ast::init_assign { word_space(s, ~"="); }
+                  ast::init_move { word_space(s, ~"<-"); }
                 }
                 print_expr(s, init.expr);
               }
@@ -1232,33 +1232,33 @@ fn print_ident(s: ps, ident: ast::ident) { word(s.s, *ident); }
 fn print_for_decl(s: ps, loc: @ast::local, coll: @ast::expr) {
     print_local_decl(s, loc);
     space(s.s);
-    word_space(s, "in");
+    word_space(s, ~"in");
     print_expr(s, coll);
 }
 
 fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
     maybe_print_comment(s, path.span.lo);
-    if path.global { word(s.s, "::"); }
+    if path.global { word(s.s, ~"::"); }
     let mut first = true;
     for path.idents.each |id| {
-        if first { first = false; } else { word(s.s, "::"); }
+        if first { first = false; } else { word(s.s, ~"::"); }
         word(s.s, *id);
     }
     if path.rp.is_some() || !path.types.is_empty() {
-        if colons_before_params { word(s.s, "::"); }
+        if colons_before_params { word(s.s, ~"::"); }
 
         alt path.rp {
           none { /* ok */ }
           some(r) {
-            word(s.s, "/");
+            word(s.s, ~"/");
             print_region(s, r);
           }
         }
 
         if !path.types.is_empty() {
-            word(s.s, "<");
+            word(s.s, ~"<");
             commasep(s, inconsistent, path.types, print_type);
-            word(s.s, ">");
+            word(s.s, ~">");
         }
     }
 }
@@ -1270,18 +1270,18 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
     /* Pat isn't normalized, but the beauty of it
      is that it doesn't matter */
     alt pat.node {
-      ast::pat_wild { word(s.s, "_"); }
+      ast::pat_wild { word(s.s, ~"_"); }
       ast::pat_ident(path, sub) {
         print_path(s, path, true);
         alt sub {
-          some(p) { word(s.s, "@"); print_pat(s, p); }
+          some(p) { word(s.s, ~"@"); print_pat(s, p); }
           none {}
         }
       }
       ast::pat_enum(path, args_) {
         print_path(s, path, true);
         alt args_ {
-          none { word(s.s, "(*)"); }
+          none { word(s.s, ~"(*)"); }
           some(args) {
             if vec::len(args) > 0u {
               popen(s);
@@ -1292,34 +1292,34 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
         }
       }
       ast::pat_rec(fields, etc) {
-        word(s.s, "{");
+        word(s.s, ~"{");
         fn print_field(s: ps, f: ast::field_pat) {
             cbox(s, indent_unit);
             word(s.s, *f.ident);
-            word_space(s, ":");
+            word_space(s, ~":");
             print_pat(s, f.pat);
             end(s);
         }
         fn get_span(f: ast::field_pat) -> codemap::span { ret f.pat.span; }
         commasep_cmnt(s, consistent, fields, print_field, get_span);
         if etc {
-            if vec::len(fields) != 0u { word_space(s, ","); }
-            word(s.s, "_");
+            if vec::len(fields) != 0u { word_space(s, ~","); }
+            word(s.s, ~"_");
         }
-        word(s.s, "}");
+        word(s.s, ~"}");
       }
       ast::pat_tup(elts) {
         popen(s);
         commasep(s, inconsistent, elts, print_pat);
         pclose(s);
       }
-      ast::pat_box(inner) { word(s.s, "@"); print_pat(s, inner); }
-      ast::pat_uniq(inner) { word(s.s, "~"); print_pat(s, inner); }
+      ast::pat_box(inner) { word(s.s, ~"@"); print_pat(s, inner); }
+      ast::pat_uniq(inner) { word(s.s, ~"~"); print_pat(s, inner); }
       ast::pat_lit(e) { print_expr(s, e); }
       ast::pat_range(begin, end) {
         print_expr(s, begin);
         space(s.s);
-        word_space(s, "to");
+        word_space(s, ~"to");
         print_expr(s, end);
       }
     }
@@ -1329,8 +1329,8 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
 fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident,
             typarams: ~[ast::ty_param]) {
     alt decl.purity {
-      ast::impure_fn { head(s, "fn") }
-      _ { head(s, purity_to_str(decl.purity) + " fn") }
+      ast::impure_fn { head(s, ~"fn") }
+      _ { head(s, purity_to_str(decl.purity) + ~" fn") }
     }
     word(s.s, *name);
     print_type_params(s, typarams);
@@ -1343,9 +1343,9 @@ fn print_fn_args(s: ps, decl: ast::fn_decl,
     if cap_items.is_not_empty() {
         let mut first = decl.inputs.is_empty();
         for cap_items.each |cap_item| {
-            if first { first = false; } else { word_space(s, ","); }
-            if cap_item.is_move { word_nbsp(s, "move") }
-            else { word_nbsp(s, "copy") }
+            if first { first = false; } else { word_space(s, ~","); }
+            if cap_item.is_move { word_nbsp(s, ~"move") }
+            else { word_nbsp(s, ~"copy") }
             word(s.s, *cap_item.name);
         }
     }
@@ -1363,49 +1363,49 @@ fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
     maybe_print_comment(s, decl.output.span.lo);
     if decl.output.node != ast::ty_nil {
         space_if_not_bol(s);
-        word_space(s, "->");
+        word_space(s, ~"->");
         print_type(s, decl.output);
     }
 }
 
 fn print_fn_block_args(s: ps, decl: ast::fn_decl,
                        cap_items: ~[ast::capture_item]) {
-    word(s.s, "|");
+    word(s.s, ~"|");
     print_fn_args(s, decl, cap_items);
-    word(s.s, "|");
+    word(s.s, ~"|");
     if decl.output.node != ast::ty_infer {
         space_if_not_bol(s);
-        word_space(s, "->");
+        word_space(s, ~"->");
         print_type(s, decl.output);
     }
     maybe_print_comment(s, decl.output.span.lo);
 }
 
-fn mode_to_str(m: ast::mode) -> str {
+fn mode_to_str(m: ast::mode) -> ~str {
     alt m {
-      ast::expl(ast::by_mutbl_ref) { "&" }
-      ast::expl(ast::by_move) { "-" }
-      ast::expl(ast::by_ref) { "&&" }
-      ast::expl(ast::by_val) { "++" }
-      ast::expl(ast::by_copy) { "+" }
-      ast::infer(_) { "" }
+      ast::expl(ast::by_mutbl_ref) { ~"&" }
+      ast::expl(ast::by_move) { ~"-" }
+      ast::expl(ast::by_ref) { ~"&&" }
+      ast::expl(ast::by_val) { ~"++" }
+      ast::expl(ast::by_copy) { ~"+" }
+      ast::infer(_) { ~"" }
     }
 }
 
 fn print_arg_mode(s: ps, m: ast::mode) {
     let ms = mode_to_str(m);
-    if ms != "" { word(s.s, ms); }
+    if ms != ~"" { word(s.s, ms); }
 }
 
 fn print_bounds(s: ps, bounds: @~[ast::ty_param_bound]) {
     if vec::len(*bounds) > 0u {
-        word(s.s, ":");
+        word(s.s, ~":");
         for vec::each(*bounds) |bound| {
             nbsp(s);
             alt bound {
-              ast::bound_copy { word(s.s, "copy"); }
-              ast::bound_send { word(s.s, "send"); }
-              ast::bound_const { word(s.s, "const"); }
+              ast::bound_copy { word(s.s, ~"copy"); }
+              ast::bound_send { word(s.s, ~"send"); }
+              ast::bound_const { word(s.s, ~"const"); }
               ast::bound_trait(t) { print_type(s, t); }
             }
         }
@@ -1414,13 +1414,13 @@ fn print_bounds(s: ps, bounds: @~[ast::ty_param_bound]) {
 
 fn print_type_params(s: ps, &&params: ~[ast::ty_param]) {
     if vec::len(params) > 0u {
-        word(s.s, "<");
+        word(s.s, ~"<");
         fn printParam(s: ps, param: ast::ty_param) {
             word(s.s, *param.ident);
             print_bounds(s, param.bounds);
         }
         commasep(s, inconsistent, params, printParam);
-        word(s.s, ">");
+        word(s.s, ~">");
     }
 }
 
@@ -1430,7 +1430,7 @@ fn print_meta_item(s: ps, &&item: @ast::meta_item) {
       ast::meta_word(name) { word(s.s, *name); }
       ast::meta_name_value(name, value) {
         word_space(s, *name);
-        word_space(s, "=");
+        word_space(s, ~"=");
         print_literal(s, @value);
       }
       ast::meta_list(name, items) {
@@ -1448,23 +1448,23 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) {
       ast::view_path_simple(ident, path, _) {
         if path.idents[vec::len(path.idents)-1u] != ident {
             word_space(s, *ident);
-            word_space(s, "=");
+            word_space(s, ~"=");
         }
         print_path(s, path, false);
       }
 
       ast::view_path_glob(path, _) {
         print_path(s, path, false);
-        word(s.s, "::*");
+        word(s.s, ~"::*");
       }
 
       ast::view_path_list(path, idents, _) {
         print_path(s, path, false);
-        word(s.s, "::{");
+        word(s.s, ~"::{");
         do commasep(s, inconsistent, idents) |s, w| {
             word(s.s, *w.node.name)
         }
-        word(s.s, "}");
+        word(s.s, ~"}");
       }
     }
 }
@@ -1479,7 +1479,7 @@ fn print_view_item(s: ps, item: @ast::view_item) {
     print_outer_attributes(s, item.attrs);
     alt item.node {
       ast::view_item_use(id, mta, _) {
-        head(s, "use");
+        head(s, ~"use");
         word(s.s, *id);
         if vec::len(mta) > 0u {
             popen(s);
@@ -1489,16 +1489,16 @@ fn print_view_item(s: ps, item: @ast::view_item) {
       }
 
       ast::view_item_import(vps) {
-        head(s, "import");
+        head(s, ~"import");
         print_view_paths(s, vps);
       }
 
       ast::view_item_export(vps) {
-        head(s, "export");
+        head(s, ~"export");
         print_view_paths(s, vps);
       }
     }
-    word(s.s, ";");
+    word(s.s, ~";");
     end(s); // end inner head-block
     end(s); // end outer head-block
 }
@@ -1512,8 +1512,8 @@ fn print_op_maybe_parens(s: ps, expr: @ast::expr, outer_prec: uint) {
 
 fn print_mutability(s: ps, mutbl: ast::mutability) {
     alt mutbl {
-      ast::m_mutbl { word_nbsp(s, "mut"); }
-      ast::m_const { word_nbsp(s, "const"); }
+      ast::m_mutbl { word_nbsp(s, ~"mut"); }
+      ast::m_const { word_nbsp(s, ~"const"); }
       ast::m_imm {/* nothing */ }
     }
 }
@@ -1532,7 +1532,7 @@ fn print_arg(s: ps, input: ast::arg) {
       }
       _ {
         if str::len(*input.ident) > 0u {
-            word_space(s, *input.ident + ":");
+            word_space(s, *input.ident + ~":");
         }
         print_type(s, input.ty);
       }
@@ -1545,7 +1545,7 @@ fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
                tps: option<~[ast::ty_param]>) {
     ibox(s, indent_unit);
     word(s.s, opt_proto_to_str(opt_proto));
-    alt id { some(id) { word(s.s, " "); word(s.s, *id); } _ { } }
+    alt id { some(id) { word(s.s, ~" "); word(s.s, *id); } _ { } }
     alt tps { some(tps) { print_type_params(s, tps); } _ { } }
     zerobreak(s.s);
     popen(s);
@@ -1555,8 +1555,8 @@ fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
     if decl.output.node != ast::ty_nil {
         space_if_not_bol(s);
         ibox(s, indent_unit);
-        word_space(s, "->");
-        if decl.cf == ast::noreturn { word_nbsp(s, "!"); }
+        word_space(s, ~"->");
+        if decl.cf == ast::noreturn { word_nbsp(s, ~"!"); }
         else { print_type(s, decl.output); }
         end(s);
     }
@@ -1609,12 +1609,12 @@ fn print_literal(s: ps, &&lit: @ast::lit) {
     alt lit.node {
       ast::lit_str(st) { print_string(s, *st); }
       ast::lit_int(ch, ast::ty_char) {
-        word(s.s, "'" + char::escape_default(ch as char) + "'");
+        word(s.s, ~"'" + char::escape_default(ch as char) + ~"'");
       }
       ast::lit_int(i, t) {
         if i < 0_i64 {
             word(s.s,
-                 "-" + u64::to_str(-i as u64, 10u)
+                 ~"-" + u64::to_str(-i as u64, 10u)
                  + ast_util::int_ty_to_str(t));
         } else {
             word(s.s,
@@ -1629,7 +1629,7 @@ fn print_literal(s: ps, &&lit: @ast::lit) {
       }
       ast::lit_int_unsuffixed(i) {
         if i < 0_i64 {
-            word(s.s, "-" + u64::to_str(-i as u64, 10u));
+            word(s.s, ~"-" + u64::to_str(-i as u64, 10u));
         } else {
             word(s.s, u64::to_str(i as u64, 10u));
         }
@@ -1637,14 +1637,14 @@ fn print_literal(s: ps, &&lit: @ast::lit) {
       ast::lit_float(f, t) {
         word(s.s, *f + ast_util::float_ty_to_str(t));
       }
-      ast::lit_nil { word(s.s, "()"); }
+      ast::lit_nil { word(s.s, ~"()"); }
       ast::lit_bool(val) {
-        if val { word(s.s, "true"); } else { word(s.s, "false"); }
+        if val { word(s.s, ~"true"); } else { word(s.s, ~"false"); }
       }
     }
 }
 
-fn lit_to_str(l: @ast::lit) -> str { ret to_str(l, print_literal); }
+fn lit_to_str(l: @ast::lit) -> ~str { ret to_str(l, print_literal); }
 
 fn next_lit(s: ps, pos: uint) -> option<comments::lit> {
     alt s.literals {
@@ -1693,7 +1693,7 @@ fn print_comment(s: ps, cmnt: comments::cmnt) {
         }
       }
       comments::trailing {
-        word(s.s, " ");
+        word(s.s, ~" ");
         if vec::len(cmnt.lines) == 1u {
             word(s.s, cmnt.lines[0]);
             hardbreak(s.s);
@@ -1710,7 +1710,7 @@ fn print_comment(s: ps, cmnt: comments::cmnt) {
         // We need to do at least one, possibly two hardbreaks.
         let is_semi =
             alt s.s.last_token() {
-              pp::STRING(s, _) { *s == ";" }
+              pp::STRING(s, _) { *s == ~";" }
               _ { false }
             };
         if is_semi || is_begin(s) || is_end(s) { hardbreak(s.s); }
@@ -1719,13 +1719,13 @@ fn print_comment(s: ps, cmnt: comments::cmnt) {
     }
 }
 
-fn print_string(s: ps, st: str) {
-    word(s.s, "\"");
+fn print_string(s: ps, st: ~str) {
+    word(s.s, ~"\"");
     word(s.s, str::escape_default(st));
-    word(s.s, "\"");
+    word(s.s, ~"\"");
 }
 
-fn to_str<T>(t: T, f: fn@(ps, T)) -> str {
+fn to_str<T>(t: T, f: fn@(ps, T)) -> ~str {
     let buffer = io::mem_buffer();
     let s = rust_printer(io::mem_buffer_writer(buffer));
     f(s, t);
@@ -1744,23 +1744,23 @@ fn next_comment(s: ps) -> option<comments::cmnt> {
     }
 }
 
-fn constr_args_to_str<T>(f: fn@(T) -> str,
+fn constr_args_to_str<T>(f: fn@(T) -> ~str,
                          args: ~[@ast::sp_constr_arg<T>]) ->
-   str {
+   ~str {
     let mut comma = false;
-    let mut s = "(";
+    let mut s = ~"(";
     for args.each |a| {
-        if comma { s += ", "; } else { comma = true; }
+        if comma { s += ~", "; } else { comma = true; }
         s += constr_arg_to_str::<T>(f, a.node);
     }
-    s += ")";
+    s += ~")";
     ret s;
 }
 
-fn constr_arg_to_str<T>(f: fn@(T) -> str, c: ast::constr_arg_general_<T>) ->
-   str {
+fn constr_arg_to_str<T>(f: fn@(T) -> ~str, c: ast::constr_arg_general_<T>) ->
+   ~str {
     alt c {
-      ast::carg_base { ret "*"; }
+      ast::carg_base { ret ~"*"; }
       ast::carg_ident(i) { ret f(i); }
       ast::carg_lit(l) { ret lit_to_str(l); }
     }
@@ -1769,53 +1769,55 @@ fn constr_arg_to_str<T>(f: fn@(T) -> str, c: ast::constr_arg_general_<T>) ->
 // needed b/c constr_args_to_str needs
 // something that takes an alias
 // (argh)
-fn uint_to_str(&&i: uint) -> str { ret uint::str(i); }
+fn uint_to_str(&&i: uint) -> ~str { ret uint::str(i); }
 
-fn ast_ty_fn_constr_to_str(&&c: @ast::constr) -> str {
+fn ast_ty_fn_constr_to_str(&&c: @ast::constr) -> ~str {
     ret path_to_str(c.node.path) +
             constr_args_to_str(uint_to_str, c.node.args);
 }
 
-fn ast_fn_constr_to_str(decl: ast::fn_decl, &&c: @ast::constr) -> str {
+fn ast_fn_constr_to_str(decl: ast::fn_decl, &&c: @ast::constr) -> ~str {
     let arg_to_str = |a| fn_arg_idx_to_str(decl, a);
     ret path_to_str(c.node.path) +
             constr_args_to_str(arg_to_str, c.node.args);
 }
 
-fn ty_constr_to_str(&&c: @ast::ty_constr) -> str {
-    fn ty_constr_path_to_str(&&p: @ast::path) -> str { "*." + path_to_str(p) }
+fn ty_constr_to_str(&&c: @ast::ty_constr) -> ~str {
+    fn ty_constr_path_to_str(&&p: @ast::path) -> ~str {
+        ~"*." + path_to_str(p)
+    }
 
     ret path_to_str(c.node.path) +
             constr_args_to_str::<@ast::path>(ty_constr_path_to_str,
                                              c.node.args);
 }
 
-fn constrs_str<T>(constrs: ~[T], elt: fn(T) -> str) -> str {
-    let mut s = "", colon = true;
+fn constrs_str<T>(constrs: ~[T], elt: fn(T) -> ~str) -> ~str {
+    let mut s = ~"", colon = true;
     for constrs.each |c| {
-        if colon { s += " : "; colon = false; } else { s += ", "; }
+        if colon { s += ~" : "; colon = false; } else { s += ~", "; }
         s += elt(c);
     }
     ret s;
 }
 
-fn fn_arg_idx_to_str(decl: ast::fn_decl, &&idx: uint) -> str {
+fn fn_arg_idx_to_str(decl: ast::fn_decl, &&idx: uint) -> ~str {
     *decl.inputs[idx].ident
 }
 
-fn opt_proto_to_str(opt_p: option<ast::proto>) -> str {
+fn opt_proto_to_str(opt_p: option<ast::proto>) -> ~str {
     alt opt_p {
-      none { "fn" }
+      none { ~"fn" }
       some(p) { proto_to_str(p) }
     }
 }
 
-fn purity_to_str(p: ast::purity) -> str {
+fn purity_to_str(p: ast::purity) -> ~str {
     alt p {
-      ast::impure_fn {"impure"}
-      ast::unsafe_fn {"unsafe"}
-      ast::pure_fn {"pure"}
-      ast::extern_fn {"extern"}
+      ast::impure_fn {~"impure"}
+      ast::unsafe_fn {~"unsafe"}
+      ast::pure_fn {~"pure"}
+      ast::extern_fn {~"extern"}
     }
 }
 
@@ -1826,13 +1828,13 @@ fn print_purity(s: ps, p: ast::purity) {
     }
 }
 
-fn proto_to_str(p: ast::proto) -> str {
+fn proto_to_str(p: ast::proto) -> ~str {
     ret alt p {
-      ast::proto_bare { "extern fn" }
-      ast::proto_any { "fn" }
-      ast::proto_block { "fn&" }
-      ast::proto_uniq { "fn~" }
-      ast::proto_box { "fn@" }
+      ast::proto_bare { ~"extern fn" }
+      ast::proto_any { ~"fn" }
+      ast::proto_block { ~"fn&" }
+      ast::proto_uniq { ~"fn~" }
+      ast::proto_box { ~"fn@" }
     };
 }
 
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 3387b716ec6..6d3c2ba74bc 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -28,8 +28,8 @@ fn name_of_fn(fk: fn_kind) -> ident {
     alt fk {
       fk_item_fn(name, _) | fk_method(name, _, _)
           | fk_ctor(name, _, _, _) { /* FIXME (#2543) */ copy name }
-      fk_anon(*) | fk_fn_block(*) { @"anon"/~ }
-      fk_dtor(*)                  { @"drop"/~ }
+      fk_anon(*) | fk_fn_block(*) { @~"anon" }
+      fk_dtor(*)                  { @~"drop" }
     }
 }