about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs41
-rw-r--r--src/libsyntax/ast_map.rs26
-rw-r--r--src/libsyntax/ast_util.rs5
-rw-r--r--src/libsyntax/attr.rs116
-rw-r--r--src/libsyntax/codemap.rs20
-rw-r--r--src/libsyntax/ext/asm.rs21
-rw-r--r--src/libsyntax/ext/base.rs27
-rw-r--r--src/libsyntax/ext/build.rs48
-rw-r--r--src/libsyntax/ext/bytes.rs4
-rw-r--r--src/libsyntax/ext/cfg.rs7
-rw-r--r--src/libsyntax/ext/concat.rs10
-rw-r--r--src/libsyntax/ext/concat_idents.rs5
-rw-r--r--src/libsyntax/ext/deriving/decodable.rs40
-rw-r--r--src/libsyntax/ext/deriving/encodable.rs41
-rw-r--r--src/libsyntax/ext/deriving/generic.rs19
-rw-r--r--src/libsyntax/ext/deriving/mod.rs8
-rw-r--r--src/libsyntax/ext/deriving/primitive.rs11
-rw-r--r--src/libsyntax/ext/deriving/to_str.rs49
-rw-r--r--src/libsyntax/ext/env.rs13
-rw-r--r--src/libsyntax/ext/expand.rs130
-rw-r--r--src/libsyntax/ext/format.rs135
-rw-r--r--src/libsyntax/ext/quote.rs134
-rw-r--r--src/libsyntax/ext/source_util.rs28
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs17
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs12
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs14
-rw-r--r--src/libsyntax/fold.rs37
-rw-r--r--src/libsyntax/parse/attr.rs6
-rw-r--r--src/libsyntax/parse/comments.rs5
-rw-r--r--src/libsyntax/parse/lexer.rs41
-rw-r--r--src/libsyntax/parse/mod.rs120
-rw-r--r--src/libsyntax/parse/obsolete.rs14
-rw-r--r--src/libsyntax/parse/parser.rs113
-rw-r--r--src/libsyntax/parse/token.rs170
-rw-r--r--src/libsyntax/print/pp.rs39
-rw-r--r--src/libsyntax/print/pprust.rs64
-rw-r--r--src/libsyntax/util/interner.rs126
-rw-r--r--src/libsyntax/util/parser_testing.rs26
38 files changed, 1015 insertions, 727 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 1513946e401..228329cbda1 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -14,11 +14,13 @@ use codemap::{Span, Spanned, DUMMY_SP};
 use abi::AbiSet;
 use ast_util;
 use opt_vec::OptVec;
-use parse::token::{interner_get, str_to_ident, special_idents};
+use parse::token::{InternedString, special_idents, str_to_ident};
+use parse::token;
 
 use std::cell::RefCell;
 use std::hashmap::HashMap;
 use std::option::Option;
+use std::rc::Rc;
 use std::to_str::ToStr;
 use extra::serialize::{Encodable, Decodable, Encoder, Decoder};
 
@@ -125,7 +127,8 @@ pub type Mrk = u32;
 
 impl<S:Encoder> Encodable<S> for Ident {
     fn encode(&self, s: &mut S) {
-        s.emit_str(interner_get(self.name));
+        let string = token::get_ident(self.name);
+        s.emit_str(string.get());
     }
 }
 
@@ -295,9 +298,9 @@ pub type MetaItem = Spanned<MetaItem_>;
 
 #[deriving(Clone, Encodable, Decodable, IterBytes)]
 pub enum MetaItem_ {
-    MetaWord(@str),
-    MetaList(@str, ~[@MetaItem]),
-    MetaNameValue(@str, Lit),
+    MetaWord(InternedString),
+    MetaList(InternedString, ~[@MetaItem]),
+    MetaNameValue(InternedString, Lit),
 }
 
 // can't be derived because the MetaList requires an unordered comparison
@@ -402,19 +405,9 @@ impl ToStr for Sigil {
     }
 }
 
-#[deriving(Eq, Encodable, Decodable, IterBytes)]
-pub enum Vstore {
-    // FIXME (#3469): Change uint to @expr (actually only constant exprs)
-    VstoreFixed(Option<uint>),     // [1,2,3,4]
-    VstoreUniq,                    // ~[1,2,3,4]
-    VstoreBox,                     // @[1,2,3,4]
-    VstoreSlice(Option<Lifetime>)  // &'foo? [1,2,3,4]
-}
-
 #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
 pub enum ExprVstore {
     ExprVstoreUniq,                 // ~[1,2,3,4]
-    ExprVstoreBox,                  // @[1,2,3,4]
     ExprVstoreSlice,                // &[1,2,3,4]
     ExprVstoreMutSlice,             // &mut [1,2,3,4]
 }
@@ -721,14 +714,14 @@ pub type Lit = Spanned<Lit_>;
 
 #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
 pub enum Lit_ {
-    LitStr(@str, StrStyle),
-    LitBinary(@[u8]),
+    LitStr(InternedString, StrStyle),
+    LitBinary(Rc<~[u8]>),
     LitChar(u32),
     LitInt(i64, IntTy),
     LitUint(u64, UintTy),
     LitIntUnsuffixed(i64),
-    LitFloat(@str, FloatTy),
-    LitFloatUnsuffixed(@str),
+    LitFloat(InternedString, FloatTy),
+    LitFloatUnsuffixed(InternedString),
     LitNil,
     LitBool(bool),
 }
@@ -897,11 +890,11 @@ pub enum AsmDialect {
 
 #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
 pub struct InlineAsm {
-    asm: @str,
+    asm: InternedString,
     asm_str_style: StrStyle,
-    clobbers: @str,
-    inputs: ~[(@str, @Expr)],
-    outputs: ~[(@str, @Expr)],
+    clobbers: InternedString,
+    inputs: ~[(InternedString, @Expr)],
+    outputs: ~[(InternedString, @Expr)],
     volatile: bool,
     alignstack: bool,
     dialect: AsmDialect
@@ -1074,7 +1067,7 @@ pub enum ViewItem_ {
     // optional @str: if present, this is a location (containing
     // arbitrary characters) from which to fetch the crate sources
     // For example, extern mod whatever = "github.com/mozilla/rust"
-    ViewItemExternMod(Ident, Option<(@str, StrStyle)>, NodeId),
+    ViewItemExternMod(Ident, Option<(InternedString,StrStyle)>, NodeId),
     ViewItemUse(~[@ViewPath]),
 }
 
diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs
index bb66d620d29..89209ab2104 100644
--- a/src/libsyntax/ast_map.rs
+++ b/src/libsyntax/ast_map.rs
@@ -62,9 +62,10 @@ pub fn path_to_str_with_sep(p: &[PathElem], sep: &str, itr: @IdentInterner)
 
 pub fn path_ident_to_str(p: &Path, i: Ident, itr: @IdentInterner) -> ~str {
     if p.is_empty() {
-        itr.get(i.name).to_owned()
+        itr.get(i.name).into_owned()
     } else {
-        format!("{}::{}", path_to_str(*p, itr), itr.get(i.name))
+        let string = itr.get(i.name);
+        format!("{}::{}", path_to_str(*p, itr), string.as_slice())
     }
 }
 
@@ -75,7 +76,7 @@ pub fn path_to_str(p: &[PathElem], itr: @IdentInterner) -> ~str {
 pub fn path_elem_to_str(pe: PathElem, itr: @IdentInterner) -> ~str {
     match pe {
         PathMod(s) | PathName(s) | PathPrettyName(s, _) => {
-            itr.get(s.name).to_owned()
+            itr.get(s.name).into_owned()
         }
     }
 }
@@ -105,7 +106,11 @@ fn pretty_ty(ty: &Ty, itr: @IdentInterner, out: &mut ~str) {
         // need custom handling.
         TyNil => { out.push_str("$NIL$"); return }
         TyPath(ref path, _, _) => {
-            out.push_str(itr.get(path.segments.last().unwrap().identifier.name));
+            out.push_str(itr.get(path.segments
+                                     .last()
+                                     .unwrap()
+                                     .identifier
+                                     .name).as_slice());
             return
         }
         TyTup(ref tys) => {
@@ -138,7 +143,8 @@ pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> PathElem {
     match *trait_ref {
         None => pretty = ~"",
         Some(ref trait_ref) => {
-            pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name).to_owned();
+            pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name)
+                        .into_owned();
             pretty.push_char('$');
         }
     };
@@ -489,17 +495,21 @@ pub fn node_id_to_str(map: Map, id: NodeId, itr: @IdentInterner) -> ~str {
              path_ident_to_str(path, item.ident, itr), abi, id)
       }
       Some(NodeMethod(m, _, path)) => {
+        let name = itr.get(m.ident.name);
         format!("method {} in {} (id={})",
-             itr.get(m.ident.name), path_to_str(*path, itr), id)
+             name.as_slice(), path_to_str(*path, itr), id)
       }
       Some(NodeTraitMethod(ref tm, _, path)) => {
         let m = ast_util::trait_method_to_ty_method(&**tm);
+        let name = itr.get(m.ident.name);
         format!("method {} in {} (id={})",
-             itr.get(m.ident.name), path_to_str(*path, itr), id)
+             name.as_slice(), path_to_str(*path, itr), id)
       }
       Some(NodeVariant(ref variant, _, path)) => {
+        let name = itr.get(variant.node.name.name);
         format!("variant {} in {} (id={})",
-             itr.get(variant.node.name.name), path_to_str(*path, itr), id)
+             name.as_slice(),
+             path_to_str(*path, itr), id)
       }
       Some(NodeExpr(expr)) => {
         format!("expr {} (id={})", pprust::expr_to_str(expr, itr), id)
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index 405de5c5542..afedb62105b 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -25,7 +25,10 @@ use std::num;
 
 pub fn path_name_i(idents: &[Ident]) -> ~str {
     // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
-    idents.map(|i| token::interner_get(i.name)).connect("::")
+    idents.map(|i| {
+        let string = token::get_ident(i.name);
+        string.get().to_str()
+    }).connect("::")
 }
 
 // totally scary function: ignores all but the last element, should have
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index c44861bd7d7..78e9d3bd46f 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -16,24 +16,26 @@ use codemap::{Span, Spanned, spanned, dummy_spanned};
 use codemap::BytePos;
 use diagnostic::SpanHandler;
 use parse::comments::{doc_comment_style, strip_doc_comment_decoration};
+use parse::token::InternedString;
+use parse::token;
 use crateid::CrateId;
 
 use std::hashmap::HashSet;
 
 pub trait AttrMetaMethods {
-    // This could be changed to `fn check_name(&self, name: @str) ->
+    // This could be changed to `fn check_name(&self, name: InternedString) ->
     // bool` which would facilitate a side table recording which
     // attributes/meta items are used/unused.
 
     /// Retrieve the name of the meta item, e.g. foo in #[foo],
     /// #[foo="bar"] and #[foo(bar)]
-    fn name(&self) -> @str;
+    fn name(&self) -> InternedString;
 
     /**
      * Gets the string value if self is a MetaNameValue variant
      * containing a string, otherwise None.
      */
-    fn value_str(&self) -> Option<@str>;
+    fn value_str(&self) -> Option<InternedString>;
     /// Gets a list of inner meta items from a list MetaItem type.
     fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]>;
 
@@ -41,32 +43,36 @@ pub trait AttrMetaMethods {
      * If the meta item is a name-value type with a string value then returns
      * a tuple containing the name and string value, otherwise `None`
      */
-    fn name_str_pair(&self) -> Option<(@str, @str)>;
+    fn name_str_pair(&self) -> Option<(InternedString,InternedString)>;
 }
 
 impl AttrMetaMethods for Attribute {
-    fn name(&self) -> @str { self.meta().name() }
-    fn value_str(&self) -> Option<@str> { self.meta().value_str() }
+    fn name(&self) -> InternedString { self.meta().name() }
+    fn value_str(&self) -> Option<InternedString> {
+        self.meta().value_str()
+    }
     fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> {
         self.node.value.meta_item_list()
     }
-    fn name_str_pair(&self) -> Option<(@str, @str)> { self.meta().name_str_pair() }
+    fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
+        self.meta().name_str_pair()
+    }
 }
 
 impl AttrMetaMethods for MetaItem {
-    fn name(&self) -> @str {
+    fn name(&self) -> InternedString {
         match self.node {
-            MetaWord(n) => n,
-            MetaNameValue(n, _) => n,
-            MetaList(n, _) => n
+            MetaWord(ref n) => (*n).clone(),
+            MetaNameValue(ref n, _) => (*n).clone(),
+            MetaList(ref n, _) => (*n).clone(),
         }
     }
 
-    fn value_str(&self) -> Option<@str> {
+    fn value_str(&self) -> Option<InternedString> {
         match self.node {
             MetaNameValue(_, ref v) => {
                 match v.node {
-                    ast::LitStr(s, _) => Some(s),
+                    ast::LitStr(ref s, _) => Some((*s).clone()),
                     _ => None,
                 }
             },
@@ -81,19 +87,21 @@ impl AttrMetaMethods for MetaItem {
         }
     }
 
-    fn name_str_pair(&self) -> Option<(@str, @str)> {
+    fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
         self.value_str().map(|s| (self.name(), s))
     }
 }
 
 // Annoying, but required to get test_cfg to work
 impl AttrMetaMethods for @MetaItem {
-    fn name(&self) -> @str { (**self).name() }
-    fn value_str(&self) -> Option<@str> { (**self).value_str() }
+    fn name(&self) -> InternedString { (**self).name() }
+    fn value_str(&self) -> Option<InternedString> { (**self).value_str() }
     fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> {
         (**self).meta_item_list()
     }
-    fn name_str_pair(&self) -> Option<(@str, @str)> { (**self).name_str_pair() }
+    fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
+        (**self).name_str_pair()
+    }
 }
 
 
@@ -114,8 +122,10 @@ impl AttributeMethods for Attribute {
     fn desugar_doc(&self) -> Attribute {
         if self.node.is_sugared_doc {
             let comment = self.value_str().unwrap();
-            let meta = mk_name_value_item_str(@"doc",
-                                              strip_doc_comment_decoration(comment).to_managed());
+            let meta = mk_name_value_item_str(
+                InternedString::new("doc"),
+                token::intern_and_get_ident(strip_doc_comment_decoration(
+                        comment.get())));
             mk_attr(meta)
         } else {
             *self
@@ -125,20 +135,22 @@ impl AttributeMethods for Attribute {
 
 /* Constructors */
 
-pub fn mk_name_value_item_str(name: @str, value: @str) -> @MetaItem {
+pub fn mk_name_value_item_str(name: InternedString, value: InternedString)
+                              -> @MetaItem {
     let value_lit = dummy_spanned(ast::LitStr(value, ast::CookedStr));
     mk_name_value_item(name, value_lit)
 }
 
-pub fn mk_name_value_item(name: @str, value: ast::Lit) -> @MetaItem {
+pub fn mk_name_value_item(name: InternedString, value: ast::Lit)
+                          -> @MetaItem {
     @dummy_spanned(MetaNameValue(name, value))
 }
 
-pub fn mk_list_item(name: @str, items: ~[@MetaItem]) -> @MetaItem {
+pub fn mk_list_item(name: InternedString, items: ~[@MetaItem]) -> @MetaItem {
     @dummy_spanned(MetaList(name, items))
 }
 
-pub fn mk_word_item(name: @str) -> @MetaItem {
+pub fn mk_word_item(name: InternedString) -> @MetaItem {
     @dummy_spanned(MetaWord(name))
 }
 
@@ -150,12 +162,14 @@ pub fn mk_attr(item: @MetaItem) -> Attribute {
     })
 }
 
-pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute {
-    let style = doc_comment_style(text);
+pub fn mk_sugared_doc_attr(text: InternedString, lo: BytePos, hi: BytePos)
+                           -> Attribute {
+    let style = doc_comment_style(text.get());
     let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr));
     let attr = Attribute_ {
         style: style,
-        value: @spanned(lo, hi, MetaNameValue(@"doc", lit)),
+        value: @spanned(lo, hi, MetaNameValue(InternedString::new("doc"),
+                                              lit)),
         is_sugared_doc: true
     };
     spanned(lo, hi, attr)
@@ -178,20 +192,22 @@ pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
     debug!("attr::contains_name (name={})", name);
     metas.iter().any(|item| {
         debug!("  testing: {}", item.name());
-        name == item.name()
+        item.name().equiv(&name)
     })
 }
 
 pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
-                                 -> Option<@str> {
+                                 -> Option<InternedString> {
     attrs.iter()
-        .find(|at| name == at.name())
+        .find(|at| at.name().equiv(&name))
         .and_then(|at| at.value_str())
 }
 
 pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str)
-                                     -> Option<@str> {
-    items.rev_iter().find(|mi| name == mi.name()).and_then(|i| i.value_str())
+                                     -> Option<InternedString> {
+    items.rev_iter()
+         .find(|mi| mi.name().equiv(&name))
+         .and_then(|i| i.value_str())
 }
 
 /* Higher-level applications */
@@ -201,16 +217,16 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
     // human-readable strings.
     let mut v = items.iter()
         .map(|&mi| (mi.name(), mi))
-        .collect::<~[(@str, @MetaItem)]>();
+        .collect::<~[(InternedString, @MetaItem)]>();
 
-    v.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
+    v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b));
 
     // There doesn't seem to be a more optimal way to do this
     v.move_iter().map(|(_, m)| {
         match m.node {
-            MetaList(n, ref mis) => {
+            MetaList(ref n, ref mis) => {
                 @Spanned {
-                    node: MetaList(n, sort_meta_items(*mis)),
+                    node: MetaList((*n).clone(), sort_meta_items(*mis)),
                     .. /*bad*/ (*m).clone()
                 }
             }
@@ -225,7 +241,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
  */
 pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] {
     let mut result = ~[];
-    for attr in attrs.iter().filter(|at| "link" == at.name()) {
+    for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) {
         match attr.meta().node {
             MetaList(_, ref items) => result.push_all(*items),
             _ => ()
@@ -237,7 +253,7 @@ pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] {
 pub fn find_crateid(attrs: &[Attribute]) -> Option<CrateId> {
     match first_attr_value_str_by_name(attrs, "crate_id") {
         None => None,
-        Some(id) => from_str::<CrateId>(id),
+        Some(id) => from_str::<CrateId>(id.get()),
     }
 }
 
@@ -254,8 +270,8 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
     // FIXME (#2809)---validate the usage of #[inline] and #[inline]
     attrs.iter().fold(InlineNone, |ia,attr| {
         match attr.node.value.node {
-          MetaWord(n) if "inline" == n => InlineHint,
-          MetaList(n, ref items) if "inline" == n => {
+          MetaWord(ref n) if n.equiv(&("inline")) => InlineHint,
+          MetaList(ref n, ref items) if n.equiv(&("inline")) => {
             if contains_name(*items, "always") {
                 InlineAlways
             } else if contains_name(*items, "never") {
@@ -284,7 +300,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
     // this doesn't work.
     let some_cfg_matches = metas.any(|mi| {
         debug!("testing name: {}", mi.name());
-        if "cfg" == mi.name() { // it is a #[cfg()] attribute
+        if mi.name().equiv(&("cfg")) { // it is a #[cfg()] attribute
             debug!("is cfg");
             no_cfgs = false;
              // only #[cfg(...)] ones are understood.
@@ -294,7 +310,8 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
                     cfg_meta.iter().all(|cfg_mi| {
                         debug!("cfg({}[...])", cfg_mi.name());
                         match cfg_mi.node {
-                            ast::MetaList(s, ref not_cfgs) if "not" == s => {
+                            ast::MetaList(ref s, ref not_cfgs)
+                            if s.equiv(&("not")) => {
                                 debug!("not!");
                                 // inside #[cfg(not(...))], so these need to all
                                 // not match.
@@ -320,7 +337,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
 /// Represents the #[deprecated="foo"] (etc) attributes.
 pub struct Stability {
     level: StabilityLevel,
-    text: Option<@str>
+    text: Option<InternedString>
 }
 
 /// The available stability levels.
@@ -335,9 +352,10 @@ pub enum StabilityLevel {
 }
 
 /// Find the first stability attribute. `None` if none exists.
-pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It) -> Option<Stability> {
+pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It)
+                      -> Option<Stability> {
     for m in metas {
-        let level = match m.name().as_slice() {
+        let level = match m.name().get() {
             "deprecated" => Deprecated,
             "experimental" => Experimental,
             "unstable" => Unstable,
@@ -360,7 +378,7 @@ pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) {
     for meta in metas.iter() {
         let name = meta.name();
 
-        if !set.insert(name) {
+        if !set.insert(name.clone()) {
             diagnostic.span_fatal(meta.span,
                                   format!("duplicate meta item `{}`", name));
         }
@@ -384,14 +402,14 @@ pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprA
     -> ReprAttr {
     let mut acc = acc;
     match attr.node {
-        ast::MetaList(s, ref items) if "repr" == s => {
+        ast::MetaList(ref s, ref items) if s.equiv(&("repr")) => {
             for item in items.iter() {
                 match item.node {
-                    ast::MetaWord(word) => {
-                        let hint = match word.as_slice() {
+                    ast::MetaWord(ref word) => {
+                        let hint = match word.get() {
                             // Can't use "extern" because it's not a lexical identifier.
                             "C" => ReprExtern,
-                            _ => match int_type_of_word(word) {
+                            _ => match int_type_of_word(word.get()) {
                                 Some(ity) => ReprInt(item.span, ity),
                                 None => {
                                     // Not a word we recognize
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index d4a412bbe9f..2ada3ac16ea 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -160,7 +160,7 @@ pub struct LocWithOpt {
 pub struct FileMapAndLine {fm: @FileMap, line: uint}
 pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
 
-#[deriving(IterBytes)]
+#[deriving(Clone, IterBytes)]
 pub enum MacroFormat {
     // e.g. #[deriving(...)] <item>
     MacroAttribute,
@@ -168,9 +168,9 @@ pub enum MacroFormat {
     MacroBang
 }
 
-#[deriving(IterBytes)]
+#[deriving(Clone, IterBytes)]
 pub struct NameAndSpan {
-    name: @str,
+    name: ~str,
     // the format with which the macro was invoked.
     format: MacroFormat,
     span: Option<Span>
@@ -183,7 +183,7 @@ pub struct ExpnInfo {
     callee: NameAndSpan
 }
 
-pub type FileName = @str;
+pub type FileName = ~str;
 
 pub struct FileLines
 {
@@ -206,7 +206,7 @@ pub struct FileMap {
     /// e.g. `<anon>`
     name: FileName,
     /// The complete source code
-    src: @str,
+    src: ~str,
     /// The start position of this source in the CodeMap
     start_pos: BytePos,
     /// Locations of lines beginnings in the source code
@@ -267,7 +267,7 @@ impl CodeMap {
         }
     }
 
-    pub fn new_filemap(&self, filename: FileName, src: @str) -> @FileMap {
+    pub fn new_filemap(&self, filename: FileName, src: ~str) -> @FileMap {
         let mut files = self.files.borrow_mut();
         let start_pos = match files.get().last() {
             None => 0,
@@ -301,7 +301,7 @@ impl CodeMap {
     pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
         let loc = self.lookup_char_pos(pos);
         LocWithOpt {
-            filename: loc.file.name,
+            filename: loc.file.name.to_str(),
             line: loc.line,
             col: loc.col,
             file: Some(loc.file)
@@ -324,7 +324,7 @@ impl CodeMap {
 
     pub fn span_to_filename(&self, sp: Span) -> FileName {
         let lo = self.lookup_char_pos(sp.lo);
-        lo.file.name
+        lo.file.name.to_str()
     }
 
     pub fn span_to_lines(&self, sp: Span) -> @FileLines {
@@ -468,7 +468,7 @@ mod test {
     #[test]
     fn t1 () {
         let cm = CodeMap::new();
-        let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
+        let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line");
         fm.next_line(BytePos(0));
         assert_eq!(&fm.get_line(0),&~"first line.");
         // TESTING BROKEN BEHAVIOR:
@@ -480,7 +480,7 @@ mod test {
     #[should_fail]
     fn t2 () {
         let cm = CodeMap::new();
-        let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
+        let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line");
         // TESTING *REALLY* BROKEN BEHAVIOR:
         fm.next_line(BytePos(0));
         fm.next_line(BytePos(10));
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs
index 021f0d29d9e..1a3ebf3ce5d 100644
--- a/src/libsyntax/ext/asm.rs
+++ b/src/libsyntax/ext/asm.rs
@@ -17,6 +17,7 @@ use codemap::Span;
 use ext::base;
 use ext::base::*;
 use parse;
+use parse::token::InternedString;
 use parse::token;
 
 enum State {
@@ -43,7 +44,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                                            cx.cfg(),
                                            tts.to_owned());
 
-    let mut asm = @"";
+    let mut asm = InternedString::new("");
     let mut asm_str_style = None;
     let mut outputs = ~[];
     let mut inputs = ~[];
@@ -79,10 +80,10 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
                     let (constraint, _str_style) = p.parse_str();
 
-                    if constraint.starts_with("+") {
+                    if constraint.get().starts_with("+") {
                         cx.span_unimpl(p.last_span,
                                        "'+' (read+write) output operand constraint modifier");
-                    } else if !constraint.starts_with("=") {
+                    } else if !constraint.get().starts_with("=") {
                         cx.span_err(p.last_span, "output operand constraint lacks '='");
                     }
 
@@ -104,9 +105,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
                     let (constraint, _str_style) = p.parse_str();
 
-                    if constraint.starts_with("=") {
+                    if constraint.get().starts_with("=") {
                         cx.span_err(p.last_span, "input operand constraint contains '='");
-                    } else if constraint.starts_with("+") {
+                    } else if constraint.get().starts_with("+") {
                         cx.span_err(p.last_span, "input operand constraint contains '+'");
                     }
 
@@ -137,11 +138,11 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             Options => {
                 let (option, _str_style) = p.parse_str();
 
-                if "volatile" == option {
+                if option.equiv(&("volatile")) {
                     volatile = true;
-                } else if "alignstack" == option {
+                } else if option.equiv(&("alignstack")) {
                     alignstack = true;
-                } else if "intel" == option {
+                } else if option.equiv(&("intel")) {
                     dialect = ast::AsmIntel;
                 }
 
@@ -191,9 +192,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     MRExpr(@ast::Expr {
         id: ast::DUMMY_NODE_ID,
         node: ast::ExprInlineAsm(ast::InlineAsm {
-            asm: asm,
+            asm: token::intern_and_get_ident(asm.get()),
             asm_str_style: asm_str_style.unwrap(),
-            clobbers: cons.to_managed(),
+            clobbers: token::intern_and_get_ident(cons),
             inputs: inputs,
             outputs: outputs,
             volatile: volatile,
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index adf1eabf9d9..08098b71ce4 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -16,7 +16,7 @@ use ext;
 use ext::expand;
 use parse;
 use parse::token;
-use parse::token::{ident_to_str, intern, str_to_ident};
+use parse::token::{InternedString, intern, str_to_ident};
 use util::small_vector::SmallVector;
 
 use std::hashmap::HashMap;
@@ -31,7 +31,7 @@ use std::unstable::dynamic_lib::DynamicLibrary;
 // ast::MacInvocTT.
 
 pub struct MacroDef {
-    name: @str,
+    name: ~str,
     ext: SyntaxExtension
 }
 
@@ -335,7 +335,8 @@ impl<'a> ExtCtxt<'a> {
                     Some(@ExpnInfo {
                         call_site: Span {lo: cs.lo, hi: cs.hi,
                                          expn_info: self.backtrace},
-                        callee: *callee});
+                        callee: (*callee).clone()
+                    });
             }
         }
     }
@@ -396,9 +397,6 @@ impl<'a> ExtCtxt<'a> {
     pub fn set_trace_macros(&mut self, x: bool) {
         self.trace_mac = x
     }
-    pub fn str_of(&self, id: ast::Ident) -> @str {
-        ident_to_str(&id)
-    }
     pub fn ident_of(&self, st: &str) -> ast::Ident {
         str_to_ident(st)
     }
@@ -407,11 +405,11 @@ impl<'a> ExtCtxt<'a> {
 /// Extract a string literal from `expr`, emitting `err_msg` if `expr`
 /// is not a string literal. This does not stop compilation on error,
 /// merely emits a non-fatal error and returns None.
-pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr,
-                   err_msg: &str) -> Option<(@str, ast::StrStyle)> {
+pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, err_msg: &str)
+                   -> Option<(InternedString, ast::StrStyle)> {
     match expr.node {
         ast::ExprLit(l) => match l.node {
-            ast::LitStr(s, style) => return Some((s, style)),
+            ast::LitStr(ref s, style) => return Some(((*s).clone(), style)),
             _ => cx.span_err(l.span, err_msg)
         },
         _ => cx.span_err(expr.span, err_msg)
@@ -424,7 +422,9 @@ pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr,
 /// compilation should call
 /// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be
 /// done as rarely as possible).
-pub fn check_zero_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree],
+pub fn check_zero_tts(cx: &ExtCtxt,
+                      sp: Span,
+                      tts: &[ast::TokenTree],
                       name: &str) {
     if tts.len() != 0 {
         cx.span_err(sp, format!("{} takes no arguments", name));
@@ -437,13 +437,16 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
                                sp: Span,
                                tts: &[ast::TokenTree],
                                name: &str)
-                               -> Option<@str> {
+                               -> Option<~str> {
     if tts.len() != 1 {
         cx.span_err(sp, format!("{} takes 1 argument.", name));
     } else {
         match tts[0] {
             ast::TTTok(_, token::LIT_STR(ident))
-                | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => return Some(cx.str_of(ident)),
+            | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
+                let interned_str = token::get_ident(ident.name);
+                return Some(interned_str.get().to_str())
+            }
             _ => cx.span_err(sp, format!("{} requires a string.", name)),
         }
     }
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 9ad4f4f7fac..c5ee1948466 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -19,6 +19,7 @@ use fold::Folder;
 use opt_vec;
 use opt_vec::OptVec;
 use parse::token::special_idents;
+use parse::token;
 
 pub struct Field {
     ident: ast::Ident,
@@ -134,13 +135,13 @@ pub trait AstBuilder {
     fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr;
     fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr;
     fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr;
-    fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr;
-    fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr;
+    fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr;
+    fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr;
 
     fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr;
     fn expr_none(&self, sp: Span) -> @ast::Expr;
 
-    fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr;
+    fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr;
     fn expr_unreachable(&self, span: Span) -> @ast::Expr;
 
     fn pat(&self, span: Span, pat: ast::Pat_) -> @ast::Pat;
@@ -228,9 +229,17 @@ pub trait AstBuilder {
 
     fn attribute(&self, sp: Span, mi: @ast::MetaItem) -> ast::Attribute;
 
-    fn meta_word(&self, sp: Span, w: @str) -> @ast::MetaItem;
-    fn meta_list(&self, sp: Span, name: @str, mis: ~[@ast::MetaItem]) -> @ast::MetaItem;
-    fn meta_name_value(&self, sp: Span, name: @str, value: ast::Lit_) -> @ast::MetaItem;
+    fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem;
+    fn meta_list(&self,
+                 sp: Span,
+                 name: InternedString,
+                 mis: ~[@ast::MetaItem])
+                 -> @ast::MetaItem;
+    fn meta_name_value(&self,
+                       sp: Span,
+                       name: InternedString,
+                       value: ast::Lit_)
+                       -> @ast::MetaItem;
 
     fn view_use(&self, sp: Span,
                 vis: ast::Visibility, vp: ~[@ast::ViewPath]) -> ast::ViewItem;
@@ -581,10 +590,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
     fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr {
         self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
     }
-    fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr {
+    fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr {
         self.expr_lit(sp, ast::LitStr(s, ast::CookedStr))
     }
-    fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr {
+    fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr {
         self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq)
     }
 
@@ -612,7 +621,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.expr_path(none)
     }
 
-    fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr {
+    fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr {
         let loc = self.codemap().lookup_char_pos(span.lo);
         self.expr_call_global(
             span,
@@ -623,13 +632,16 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
             ],
             ~[
                 self.expr_str(span, msg),
-                self.expr_str(span, loc.file.name),
+                self.expr_str(span,
+                              token::intern_and_get_ident(loc.file.name)),
                 self.expr_uint(span, loc.line),
             ])
     }
 
     fn expr_unreachable(&self, span: Span) -> @ast::Expr {
-        self.expr_fail(span, @"internal error: entered unreachable code")
+        self.expr_fail(span,
+                       InternedString::new(
+                           "internal error: entered unreachable code"))
     }
 
 
@@ -866,13 +878,21 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         })
     }
 
-    fn meta_word(&self, sp: Span, w: @str) -> @ast::MetaItem {
+    fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem {
         @respan(sp, ast::MetaWord(w))
     }
-    fn meta_list(&self, sp: Span, name: @str, mis: ~[@ast::MetaItem]) -> @ast::MetaItem {
+    fn meta_list(&self,
+                 sp: Span,
+                 name: InternedString,
+                 mis: ~[@ast::MetaItem])
+                 -> @ast::MetaItem {
         @respan(sp, ast::MetaList(name, mis))
     }
-    fn meta_name_value(&self, sp: Span, name: @str, value: ast::Lit_) -> @ast::MetaItem {
+    fn meta_name_value(&self,
+                       sp: Span,
+                       name: InternedString,
+                       value: ast::Lit_)
+                       -> @ast::MetaItem {
         @respan(sp, ast::MetaNameValue(name, respan(sp, value)))
     }
 
diff --git a/src/libsyntax/ext/bytes.rs b/src/libsyntax/ext/bytes.rs
index 0c9a23be558..6852a0cec33 100644
--- a/src/libsyntax/ext/bytes.rs
+++ b/src/libsyntax/ext/bytes.rs
@@ -31,8 +31,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ->
             // expression is a literal
             ast::ExprLit(lit) => match lit.node {
                 // string literal, push each byte to vector expression
-                ast::LitStr(s, _) => {
-                    for byte in s.bytes() {
+                ast::LitStr(ref s, _) => {
+                    for byte in s.get().bytes() {
                         bytes.push(cx.expr_u8(expr.span, byte));
                     }
                 }
diff --git a/src/libsyntax/ext/cfg.rs b/src/libsyntax/ext/cfg.rs
index 9af295c0b11..295c456c9d0 100644
--- a/src/libsyntax/ext/cfg.rs
+++ b/src/libsyntax/ext/cfg.rs
@@ -21,9 +21,10 @@ use ext::base;
 use ext::build::AstBuilder;
 use attr;
 use attr::*;
-use parse;
-use parse::token;
 use parse::attr::ParserAttr;
+use parse::token::InternedString;
+use parse::token;
+use parse;
 
 pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult {
     let mut p = parse::new_parser_from_tts(cx.parse_sess(),
@@ -39,7 +40,7 @@ pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::M
     }
 
     // test_cfg searches for meta items looking like `cfg(foo, ...)`
-    let in_cfg = &[cx.meta_list(sp, @"cfg", cfgs)];
+    let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)];
 
     let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x));
     let e = cx.expr_bool(sp, matches_cfg);
diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs
index 2a68674af95..c13f9bf92af 100644
--- a/src/libsyntax/ext/concat.rs
+++ b/src/libsyntax/ext/concat.rs
@@ -14,6 +14,7 @@ use ast;
 use codemap;
 use ext::base;
 use ext::build::AstBuilder;
+use parse::token;
 
 pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
                          sp: codemap::Span,
@@ -28,9 +29,10 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
         match e.node {
             ast::ExprLit(lit) => {
                 match lit.node {
-                    ast::LitStr(s, _) | ast::LitFloat(s, _)
-                    | ast::LitFloatUnsuffixed(s) => {
-                        accumulator.push_str(s);
+                    ast::LitStr(ref s, _) |
+                    ast::LitFloat(ref s, _) |
+                    ast::LitFloatUnsuffixed(ref s) => {
+                        accumulator.push_str(s.get());
                     }
                     ast::LitChar(c) => {
                         accumulator.push_char(char::from_u32(c).unwrap());
@@ -55,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
             }
         }
     }
-    return base::MRExpr(cx.expr_str(sp, accumulator.to_managed()));
+    base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(accumulator)))
 }
diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs
index 9dcb5b4cb4c..e0d53add648 100644
--- a/src/libsyntax/ext/concat_idents.rs
+++ b/src/libsyntax/ext/concat_idents.rs
@@ -30,7 +30,10 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             }
         } else {
             match *e {
-                ast::TTTok(_, token::IDENT(ident,_)) => res_str.push_str(cx.str_of(ident)),
+                ast::TTTok(_, token::IDENT(ident,_)) => {
+                    let interned_str = token::get_ident(ident.name);
+                    res_str.push_str(interned_str.get())
+                }
                 _ => {
                     cx.span_err(sp, "concat_idents! requires ident args.");
                     return MacResult::dummy_expr();
diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs
index a9268d85c91..019a4dfe7cc 100644
--- a/src/libsyntax/ext/deriving/decodable.rs
+++ b/src/libsyntax/ext/deriving/decodable.rs
@@ -18,6 +18,8 @@ use codemap::Span;
 use ext::base::ExtCtxt;
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
+use parse::token::InternedString;
+use parse::token;
 
 pub fn expand_deriving_decodable(cx: &ExtCtxt,
                                  span: Span,
@@ -82,10 +84,15 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span,
                                       cx.expr_uint(span, field),
                                       lambdadecode])
             });
-            cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"),
-                                ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)),
-                                  cx.expr_uint(trait_span, nfields),
-                                  cx.lambda_expr_1(trait_span, result, blkarg)])
+            cx.expr_method_call(trait_span,
+                                decoder,
+                                cx.ident_of("read_struct"),
+                                ~[
+                cx.expr_str(trait_span,
+                            token::get_ident(substr.type_ident.name)),
+                cx.expr_uint(trait_span, nfields),
+                cx.lambda_expr_1(trait_span, result, blkarg)
+            ])
         }
         StaticEnum(_, ref fields) => {
             let variant = cx.ident_of("i");
@@ -95,7 +102,8 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span,
             let rvariant_arg = cx.ident_of("read_enum_variant_arg");
 
             for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() {
-                variants.push(cx.expr_str(v_span, cx.str_of(name)));
+                variants.push(cx.expr_str(v_span,
+                                          token::get_ident(name.name)));
 
                 let decoded = decode_static_fields(cx,
                                                    v_span,
@@ -120,9 +128,14 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span,
             let result = cx.expr_method_call(trait_span, blkdecoder,
                                              cx.ident_of("read_enum_variant"),
                                              ~[variant_vec, lambda]);
-            cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"),
-                                ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)),
-                                  cx.lambda_expr_1(trait_span, result, blkarg)])
+            cx.expr_method_call(trait_span,
+                                decoder,
+                                cx.ident_of("read_enum"),
+                                ~[
+                cx.expr_str(trait_span,
+                            token::get_ident(substr.type_ident.name)),
+                cx.lambda_expr_1(trait_span, result, blkarg)
+            ])
         }
         _ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)")
     };
@@ -135,7 +148,7 @@ fn decode_static_fields(cx: &ExtCtxt,
                         trait_span: Span,
                         outer_pat_ident: Ident,
                         fields: &StaticFields,
-                        getarg: |Span, @str, uint| -> @Expr)
+                        getarg: |Span, InternedString, uint| -> @Expr)
                         -> @Expr {
     match *fields {
         Unnamed(ref fields) => {
@@ -143,7 +156,10 @@ fn decode_static_fields(cx: &ExtCtxt,
                 cx.expr_ident(trait_span, outer_pat_ident)
             } else {
                 let fields = fields.iter().enumerate().map(|(i, &span)| {
-                    getarg(span, format!("_field{}", i).to_managed(), i)
+                    getarg(span,
+                           token::intern_and_get_ident(format!("_field{}",
+                                                               i)),
+                           i)
                 }).collect();
 
                 cx.expr_call_ident(trait_span, outer_pat_ident, fields)
@@ -152,7 +168,9 @@ fn decode_static_fields(cx: &ExtCtxt,
         Named(ref fields) => {
             // use the field's span to get nicer error messages.
             let fields = fields.iter().enumerate().map(|(i, &(name, span))| {
-                cx.field_imm(span, name, getarg(span, cx.str_of(name), i))
+                cx.field_imm(span,
+                             name,
+                             getarg(span, token::get_ident(name.name), i))
             }).collect();
             cx.expr_struct_ident(trait_span, outer_pat_ident, fields)
         }
diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs
index 9a8861f2e70..c50c9f18389 100644
--- a/src/libsyntax/ext/deriving/encodable.rs
+++ b/src/libsyntax/ext/deriving/encodable.rs
@@ -80,6 +80,7 @@ use codemap::Span;
 use ext::base::ExtCtxt;
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
+use parse::token;
 
 pub fn expand_deriving_encodable(cx: &ExtCtxt,
                                  span: Span,
@@ -125,10 +126,17 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
         Struct(ref fields) => {
             let emit_struct_field = cx.ident_of("emit_struct_field");
             let mut stmts = ~[];
-            for (i, &FieldInfo { name, self_, span, .. }) in fields.iter().enumerate() {
+            for (i, &FieldInfo {
+                    name,
+                    self_,
+                    span,
+                    ..
+                }) in fields.iter().enumerate() {
                 let name = match name {
-                    Some(id) => cx.str_of(id),
-                    None => format!("_field{}", i).to_managed()
+                    Some(id) => token::get_ident(id.name),
+                    None => {
+                        token::intern_and_get_ident(format!("_field{}", i))
+                    }
                 };
                 let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]);
                 let lambda = cx.lambda_expr_1(span, enc, blkarg);
@@ -141,10 +149,15 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
             }
 
             let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
-            cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_struct"),
-                                ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)),
-                                  cx.expr_uint(trait_span, fields.len()),
-                                  blk])
+            cx.expr_method_call(trait_span,
+                                encoder,
+                                cx.ident_of("emit_struct"),
+                                ~[
+                cx.expr_str(trait_span,
+                            token::get_ident(substr.type_ident.name)),
+                cx.expr_uint(trait_span, fields.len()),
+                blk
+            ])
         }
 
         EnumMatching(idx, variant, ref fields) => {
@@ -167,7 +180,8 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
             }
 
             let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
-            let name = cx.expr_str(trait_span, cx.str_of(variant.node.name));
+            let name = cx.expr_str(trait_span,
+                                   token::get_ident(variant.node.name.name));
             let call = cx.expr_method_call(trait_span, blkencoder,
                                            cx.ident_of("emit_enum_variant"),
                                            ~[name,
@@ -175,11 +189,14 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
                                              cx.expr_uint(trait_span, fields.len()),
                                              blk]);
             let blk = cx.lambda_expr_1(trait_span, call, blkarg);
-            let ret = cx.expr_method_call(trait_span, encoder,
+            let ret = cx.expr_method_call(trait_span,
+                                          encoder,
                                           cx.ident_of("emit_enum"),
-                                          ~[cx.expr_str(trait_span,
-                                            cx.str_of(substr.type_ident)),
-                                            blk]);
+                                          ~[
+                cx.expr_str(trait_span,
+                            token::get_ident(substr.type_ident.name)),
+                blk
+            ]);
             cx.expr_block(cx.block(trait_span, ~[me], Some(ret)))
         }
 
diff --git a/src/libsyntax/ext/deriving/generic.rs b/src/libsyntax/ext/deriving/generic.rs
index 6449d0aab5e..8eaff592765 100644
--- a/src/libsyntax/ext/deriving/generic.rs
+++ b/src/libsyntax/ext/deriving/generic.rs
@@ -184,6 +184,8 @@ use ext::build::AstBuilder;
 use codemap;
 use codemap::Span;
 use opt_vec;
+use parse::token::InternedString;
+use parse::token;
 
 use std::vec;
 
@@ -396,8 +398,10 @@ impl<'a> TraitDef<'a> {
         let doc_attr = cx.attribute(
             self.span,
             cx.meta_name_value(self.span,
-                               @"doc",
-                               ast::LitStr(@"Automatically derived.", ast::CookedStr)));
+                               InternedString::new("doc"),
+                               ast::LitStr(token::intern_and_get_ident(
+                                       "Automatically derived."),
+                                       ast::CookedStr)));
         cx.item(
             self.span,
             ::parse::token::special_idents::clownshoes_extensions,
@@ -567,7 +571,14 @@ impl<'a> MethodDef<'a> {
         let body_block = trait_.cx.block_expr(body);
 
         let attrs = if self.inline {
-            ~[trait_.cx.attribute(trait_.span, trait_.cx.meta_word(trait_.span, @"inline"))]
+            ~[
+                trait_.cx
+                      .attribute(trait_.span,
+                                 trait_.cx
+                                       .meta_word(trait_.span,
+                                                  InternedString::new(
+                                                      "inline")))
+            ]
         } else {
             ~[]
         };
@@ -933,7 +944,7 @@ impl<'a> TraitDef<'a> {
         to_set.expn_info = Some(@codemap::ExpnInfo {
             call_site: to_set,
             callee: codemap::NameAndSpan {
-                name: format!("deriving({})", trait_name).to_managed(),
+                name: format!("deriving({})", trait_name),
                 format: codemap::MacroAttribute,
                 span: Some(self.span)
             }
diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs
index 652f5ebe6c7..9c487146639 100644
--- a/src/libsyntax/ext/deriving/mod.rs
+++ b/src/libsyntax/ext/deriving/mod.rs
@@ -75,12 +75,12 @@ pub fn expand_meta_deriving(cx: &ExtCtxt,
         MetaList(_, ref titems) => {
             titems.rev_iter().fold(in_items, |in_items, &titem| {
                 match titem.node {
-                    MetaNameValue(tname, _) |
-                    MetaList(tname, _) |
-                    MetaWord(tname) => {
+                    MetaNameValue(ref tname, _) |
+                    MetaList(ref tname, _) |
+                    MetaWord(ref tname) => {
                         macro_rules! expand(($func:path) => ($func(cx, titem.span,
                                                                    titem, in_items)));
-                        match tname.as_slice() {
+                        match tname.get() {
                             "Clone" => expand!(clone::expand_deriving_clone),
                             "DeepClone" => expand!(clone::expand_deriving_deep_clone),
 
diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs
index a4e606f53c0..e2f72e87085 100644
--- a/src/libsyntax/ext/deriving/primitive.rs
+++ b/src/libsyntax/ext/deriving/primitive.rs
@@ -14,6 +14,7 @@ use codemap::Span;
 use ext::base::ExtCtxt;
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
+use parse::token::InternedString;
 
 pub fn expand_deriving_from_primitive(cx: &ExtCtxt,
                                       span: Span,
@@ -73,13 +74,13 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) ->
     match *substr.fields {
         StaticStruct(..) => {
             cx.span_err(trait_span, "`FromPrimitive` cannot be derived for structs");
-            return cx.expr_fail(trait_span, @"");
+            return cx.expr_fail(trait_span, InternedString::new(""));
         }
         StaticEnum(enum_def, _) => {
             if enum_def.variants.is_empty() {
                 cx.span_err(trait_span,
                             "`FromPrimitive` cannot be derived for enums with no variants");
-                return cx.expr_fail(trait_span, @"");
+                return cx.expr_fail(trait_span, InternedString::new(""));
             }
 
             let mut arms = ~[];
@@ -91,7 +92,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) ->
                             cx.span_err(trait_span,
                                         "`FromPrimitive` cannot be derived for \
                                         enum variants with arguments");
-                            return cx.expr_fail(trait_span, @"");
+                            return cx.expr_fail(trait_span,
+                                                InternedString::new(""));
                         }
                         let span = variant.span;
 
@@ -117,7 +119,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) ->
                         cx.span_err(trait_span,
                                     "`FromPrimitive` cannot be derived for enums \
                                     with struct variants");
-                        return cx.expr_fail(trait_span, @"");
+                        return cx.expr_fail(trait_span,
+                                            InternedString::new(""));
                     }
                 }
             }
diff --git a/src/libsyntax/ext/deriving/to_str.rs b/src/libsyntax/ext/deriving/to_str.rs
index 81453a5a10b..6101d647ca5 100644
--- a/src/libsyntax/ext/deriving/to_str.rs
+++ b/src/libsyntax/ext/deriving/to_str.rs
@@ -14,6 +14,8 @@ use codemap::Span;
 use ext::base::ExtCtxt;
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
+use parse::token::InternedString;
+use parse::token;
 
 pub fn expand_deriving_to_str(cx: &ExtCtxt,
                               span: Span,
@@ -47,18 +49,22 @@ pub fn expand_deriving_to_str(cx: &ExtCtxt,
 // doesn't invoke the to_str() method on each field. Hence we mirror
 // the logic of the repr_to_str() method, but with tweaks to call to_str()
 // on sub-fields.
-fn to_str_substructure(cx: &ExtCtxt, span: Span,
-                       substr: &Substructure) -> @Expr {
+fn to_str_substructure(cx: &ExtCtxt, span: Span, substr: &Substructure)
+                       -> @Expr {
     let to_str = cx.ident_of("to_str");
 
-    let doit = |start: &str, end: @str, name: ast::Ident,
+    let doit = |start: &str,
+                end: InternedString,
+                name: ast::Ident,
                 fields: &[FieldInfo]| {
         if fields.len() == 0 {
-            cx.expr_str_uniq(span, cx.str_of(name))
+            cx.expr_str_uniq(span, token::get_ident(name.name))
         } else {
             let buf = cx.ident_of("buf");
-            let start = cx.str_of(name) + start;
-            let init = cx.expr_str_uniq(span, start.to_managed());
+            let interned_str = token::get_ident(name.name);
+            let start =
+                token::intern_and_get_ident(interned_str.get() + start);
+            let init = cx.expr_str_uniq(span, start);
             let mut stmts = ~[cx.stmt_let(span, true, buf, init)];
             let push_str = cx.ident_of("push_str");
 
@@ -70,38 +76,53 @@ fn to_str_substructure(cx: &ExtCtxt, span: Span,
 
             for (i, &FieldInfo {name, span, self_, .. }) in fields.iter().enumerate() {
                 if i > 0 {
-                    push(cx.expr_str(span, @", "));
+                    push(cx.expr_str(span, InternedString::new(", ")));
                 }
                 match name {
                     None => {}
                     Some(id) => {
-                        let name = cx.str_of(id) + ": ";
-                        push(cx.expr_str(span, name.to_managed()));
+                        let interned_id = token::get_ident(id.name);
+                        let name = interned_id.get() + ": ";
+                        push(cx.expr_str(span,
+                                         token::intern_and_get_ident(name)));
                     }
                 }
                 push(cx.expr_method_call(span, self_, to_str, ~[]));
             }
             push(cx.expr_str(span, end));
 
-            cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span, buf))))
+            cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span,
+                                                                   buf))))
         }
     };
 
     return match *substr.fields {
         Struct(ref fields) => {
             if fields.len() == 0 || fields[0].name.is_none() {
-                doit("(", @")", substr.type_ident, *fields)
+                doit("(",
+                     InternedString::new(")"),
+                     substr.type_ident,
+                     *fields)
             } else {
-                doit("{", @"}", substr.type_ident, *fields)
+                doit("{",
+                     InternedString::new("}"),
+                     substr.type_ident,
+                     *fields)
             }
         }
 
         EnumMatching(_, variant, ref fields) => {
             match variant.node.kind {
                 ast::TupleVariantKind(..) =>
-                    doit("(", @")", variant.node.name, *fields),
+                    doit("(",
+                         InternedString::new(")"),
+                         variant.node.name,
+                         *fields),
                 ast::StructVariantKind(..) =>
-                    doit("{", @"}", variant.node.name, *fields),
+                    doit("{",
+                         InternedString::new("}"),
+                         variant.node.name,
+                         *fields),
             }
         }
 
diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs
index a9b40ea7ec6..c23a1ce1e28 100644
--- a/src/libsyntax/ext/env.rs
+++ b/src/libsyntax/ext/env.rs
@@ -19,6 +19,7 @@ use codemap::Span;
 use ext::base::*;
 use ext::base;
 use ext::build::AstBuilder;
+use parse::token;
 
 use std::os;
 
@@ -52,7 +53,11 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         Some((v, _style)) => v
     };
     let msg = match exprs.len() {
-        1 => format!("environment variable `{}` not defined", var).to_managed(),
+        1 => {
+            token::intern_and_get_ident(format!("environment variable `{}` \
+                                                 not defined",
+                                                var))
+        }
         2 => {
             match expr_to_str(cx, exprs[1], "expected string literal") {
                 None => return MacResult::dummy_expr(),
@@ -65,12 +70,12 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         }
     };
 
-    let e = match os::getenv(var) {
+    let e = match os::getenv(var.get()) {
         None => {
-            cx.span_err(sp, msg);
+            cx.span_err(sp, msg.get());
             cx.expr_uint(sp, 0)
         }
-        Some(s) => cx.expr_str(sp, s.to_managed())
+        Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s))
     };
     MRExpr(e)
 }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 1ffff03a80f..d8d98b27793 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -22,8 +22,8 @@ use codemap::{Span, Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
 use ext::base::*;
 use fold::*;
 use parse;
+use parse::token::{fresh_mark, fresh_name, intern};
 use parse::token;
-use parse::token::{fresh_mark, fresh_name, ident_to_str, intern};
 use visit;
 use visit::Visitor;
 use util::small_vector::SmallVector;
@@ -54,13 +54,14 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
                         return e;
                     }
                     let extname = &pth.segments[0].identifier;
-                    let extnamestr = ident_to_str(extname);
+                    let extnamestr = token::get_ident(extname.name);
                     // leaving explicit deref here to highlight unbox op:
                     let marked_after = match fld.extsbox.find(&extname.name) {
                         None => {
                             fld.cx.span_err(
                                 pth.span,
-                                format!("macro undefined: '{}'", extnamestr));
+                                format!("macro undefined: '{}'",
+                                        extnamestr.get()));
 
                             // let compilation continue
                             return e;
@@ -69,7 +70,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
                             fld.cx.bt_push(ExpnInfo {
                                 call_site: e.span,
                                 callee: NameAndSpan {
-                                    name: extnamestr,
+                                    name: extnamestr.get().to_str(),
                                     format: MacroBang,
                                     span: exp_span,
                                 },
@@ -94,7 +95,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
                                         pth.span,
                                         format!(
                                             "non-expr macro in expr pos: {}",
-                                            extnamestr
+                                            extnamestr.get()
                                         )
                                     );
                                     return e;
@@ -107,7 +108,8 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
                         _ => {
                             fld.cx.span_err(
                                 pth.span,
-                                format!("'{}' is not a tt-style macro", extnamestr)
+                                format!("'{}' is not a tt-style macro",
+                                        extnamestr.get())
                             );
                             return e;
                         }
@@ -221,12 +223,12 @@ pub fn expand_mod_items(module_: &ast::Mod, fld: &mut MacroExpander) -> ast::Mod
         item.attrs.rev_iter().fold(~[*item], |items, attr| {
             let mname = attr.name();
 
-            match fld.extsbox.find(&intern(mname)) {
+            match fld.extsbox.find(&intern(mname.get())) {
               Some(&ItemDecorator(dec_fn)) => {
                   fld.cx.bt_push(ExpnInfo {
                       call_site: attr.span,
                       callee: NameAndSpan {
-                          name: mname,
+                          name: mname.get().to_str(),
                           format: MacroAttribute,
                           span: None
                       }
@@ -295,28 +297,31 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
     };
 
     let extname = &pth.segments[0].identifier;
-    let extnamestr = ident_to_str(extname);
+    let extnamestr = token::get_ident(extname.name);
     let fm = fresh_mark();
     let expanded = match fld.extsbox.find(&extname.name) {
         None => {
             fld.cx.span_err(pth.span,
-                            format!("macro undefined: '{}!'", extnamestr));
+                            format!("macro undefined: '{}!'",
+                                    extnamestr.get()));
             // let compilation continue
             return SmallVector::zero();
         }
 
         Some(&NormalTT(ref expander, span)) => {
             if it.ident.name != parse::token::special_idents::invalid.name {
+                let string = token::get_ident(it.ident.name);
                 fld.cx.span_err(pth.span,
                                 format!("macro {}! expects no ident argument, \
-                                        given '{}'", extnamestr,
-                                        ident_to_str(&it.ident)));
+                                        given '{}'",
+                                        extnamestr.get(),
+                                        string.get()));
                 return SmallVector::zero();
             }
             fld.cx.bt_push(ExpnInfo {
                 call_site: it.span,
                 callee: NameAndSpan {
-                    name: extnamestr,
+                    name: extnamestr.get().to_str(),
                     format: MacroBang,
                     span: span
                 }
@@ -328,13 +333,14 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
         Some(&IdentTT(ref expander, span)) => {
             if it.ident.name == parse::token::special_idents::invalid.name {
                 fld.cx.span_err(pth.span,
-                                format!("macro {}! expects an ident argument", extnamestr));
+                                format!("macro {}! expects an ident argument",
+                                        extnamestr.get()));
                 return SmallVector::zero();
             }
             fld.cx.bt_push(ExpnInfo {
                 call_site: it.span,
                 callee: NameAndSpan {
-                    name: extnamestr,
+                    name: extnamestr.get().to_str(),
                     format: MacroBang,
                     span: span
                 }
@@ -344,7 +350,9 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
             expander.expand(fld.cx, it.span, it.ident, marked_tts)
         }
         _ => {
-            fld.cx.span_err(it.span, format!("{}! is not legal in item position", extnamestr));
+            fld.cx.span_err(it.span,
+                            format!("{}! is not legal in item position",
+                                    extnamestr.get()));
             return SmallVector::zero();
         }
     };
@@ -356,7 +364,9 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
                 .collect()
         }
         MRExpr(_) => {
-            fld.cx.span_err(pth.span, format!("expr macro in item position: {}", extnamestr));
+            fld.cx.span_err(pth.span,
+                            format!("expr macro in item position: {}",
+                                    extnamestr.get()));
             return SmallVector::zero();
         }
         MRAny(any_macro) => {
@@ -385,7 +395,7 @@ pub fn expand_view_item(vi: &ast::ViewItem,
                         fld: &mut MacroExpander)
                         -> ast::ViewItem {
     let should_load = vi.attrs.iter().any(|attr| {
-        "phase" == attr.name() &&
+        attr.name().get() == "phase" &&
             attr.meta_item_list().map_or(false, |phases| {
                 attr::contains_name(phases, "syntax")
             })
@@ -402,15 +412,18 @@ fn load_extern_macros(crate: &ast::ViewItem, fld: &mut MacroExpander) {
     let MacroCrate { lib, cnum } = fld.cx.loader.load_crate(crate);
 
     let crate_name = match crate.node {
-        ast::ViewItemExternMod(ref name, _, _) => token::ident_to_str(name),
+        ast::ViewItemExternMod(ref name, _, _) => {
+            let string = token::get_ident(name.name);
+            string.get().to_str()
+        },
         _ => unreachable!(),
     };
-    let name = format!("<{} macros>", crate_name).to_managed();
+    let name = format!("<{} macros>", crate_name);
 
     let exported_macros = fld.cx.loader.get_exported_macros(cnum);
     for source in exported_macros.iter() {
-        let item = parse::parse_item_from_source_str(name,
-                                                     source.to_managed(),
+        let item = parse::parse_item_from_source_str(name.clone(),
+                                                     (*source).clone(),
                                                      fld.cx.cfg(),
                                                      fld.cx.parse_sess())
                 .expect("expected a serialized item");
@@ -475,10 +488,11 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
         return SmallVector::zero();
     }
     let extname = &pth.segments[0].identifier;
-    let extnamestr = ident_to_str(extname);
+    let extnamestr = token::get_ident(extname.name);
     let marked_after = match fld.extsbox.find(&extname.name) {
         None => {
-            fld.cx.span_err(pth.span, format!("macro undefined: '{}'", extnamestr));
+            fld.cx.span_err(pth.span, format!("macro undefined: '{}'",
+                                              extnamestr.get()));
             return SmallVector::zero();
         }
 
@@ -486,7 +500,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
             fld.cx.bt_push(ExpnInfo {
                 call_site: s.span,
                 callee: NameAndSpan {
-                    name: extnamestr,
+                    name: extnamestr.get().to_str(),
                     format: MacroBang,
                     span: exp_span,
                 }
@@ -511,7 +525,8 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
                 MRAny(any_macro) => any_macro.make_stmt(),
                 _ => {
                     fld.cx.span_err(pth.span,
-                                    format!("non-stmt macro in stmt pos: {}", extnamestr));
+                                    format!("non-stmt macro in stmt pos: {}",
+                                            extnamestr.get()));
                     return SmallVector::zero();
                 }
             };
@@ -520,7 +535,8 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
         }
 
         _ => {
-            fld.cx.span_err(pth.span, format!("'{}' is not a tt-style macro", extnamestr));
+            fld.cx.span_err(pth.span, format!("'{}' is not a tt-style macro",
+                                              extnamestr.get()));
             return SmallVector::zero();
         }
     };
@@ -945,7 +961,7 @@ mod test {
     use fold::*;
     use ext::base::{CrateLoader, MacroCrate};
     use parse;
-    use parse::token::{fresh_mark, gensym, intern, ident_to_str};
+    use parse::token::{fresh_mark, gensym, intern};
     use parse::token;
     use util::parser_testing::{string_to_crate, string_to_crate_and_sess};
     use util::parser_testing::{string_to_pat, string_to_tts, strs_to_idents};
@@ -1009,11 +1025,11 @@ mod test {
     // make sure that macros can leave scope
     #[should_fail]
     #[test] fn macros_cant_escape_fns_test () {
-        let src = @"fn bogus() {macro_rules! z (() => (3+4))}\
+        let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\
                     fn inty() -> int { z!() }";
         let sess = parse::new_parse_sess(None);
         let crate_ast = parse::parse_crate_from_source_str(
-            @"<test>",
+            ~"<test>",
             src,
             ~[],sess);
         // should fail:
@@ -1024,11 +1040,11 @@ mod test {
     // make sure that macros can leave scope for modules
     #[should_fail]
     #[test] fn macros_cant_escape_mods_test () {
-        let src = @"mod foo {macro_rules! z (() => (3+4))}\
+        let src = ~"mod foo {macro_rules! z (() => (3+4))}\
                     fn inty() -> int { z!() }";
         let sess = parse::new_parse_sess(None);
         let crate_ast = parse::parse_crate_from_source_str(
-            @"<test>",
+            ~"<test>",
             src,
             ~[],sess);
         // should fail:
@@ -1038,22 +1054,22 @@ mod test {
 
     // macro_escape modules shouldn't cause macros to leave scope
     #[test] fn macros_can_escape_flattened_mods_test () {
-        let src = @"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
+        let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
                     fn inty() -> int { z!() }";
         let sess = parse::new_parse_sess(None);
         let crate_ast = parse::parse_crate_from_source_str(
-            @"<test>",
+            ~"<test>",
             src,
             ~[], sess);
         // should fail:
         let mut loader = ErrLoader;
-        expand_crate(sess,&mut loader,~[],crate_ast);
+        expand_crate(sess, &mut loader, ~[], crate_ast);
     }
 
     #[test] fn test_contains_flatten (){
-        let attr1 = make_dummy_attr (@"foo");
-        let attr2 = make_dummy_attr (@"bar");
-        let escape_attr = make_dummy_attr (@"macro_escape");
+        let attr1 = make_dummy_attr ("foo");
+        let attr2 = make_dummy_attr ("bar");
+        let escape_attr = make_dummy_attr ("macro_escape");
         let attrs1 = ~[attr1, escape_attr, attr2];
         assert_eq!(contains_macro_escape (attrs1),true);
         let attrs2 = ~[attr1,attr2];
@@ -1061,13 +1077,13 @@ mod test {
     }
 
     // make a MetaWord outer attribute with the given name
-    fn make_dummy_attr(s: @str) -> ast::Attribute {
+    fn make_dummy_attr(s: &str) -> ast::Attribute {
         Spanned {
             span:codemap::DUMMY_SP,
             node: Attribute_ {
                 style: AttrOuter,
                 value: @Spanned {
-                    node: MetaWord(s),
+                    node: MetaWord(token::intern_and_get_ident(s)),
                     span: codemap::DUMMY_SP,
                 },
                 is_sugared_doc: false,
@@ -1077,7 +1093,7 @@ mod test {
 
     #[test]
     fn renaming () {
-        let item_ast = string_to_crate(@"fn f() -> int { a }");
+        let item_ast = string_to_crate(~"fn f() -> int { a }");
         let a_name = intern("a");
         let a2_name = gensym("a2");
         let mut renamer = new_rename_folder(ast::Ident{name:a_name,ctxt:EMPTY_CTXT},
@@ -1116,7 +1132,7 @@ mod test {
     //    pprust::print_crate_(&mut s, crate);
     //}
 
-    fn expand_crate_str(crate_str: @str) -> ast::Crate {
+    fn expand_crate_str(crate_str: ~str) -> ast::Crate {
         let (crate_ast,ps) = string_to_crate_and_sess(crate_str);
         // the cfg argument actually does matter, here...
         let mut loader = ErrLoader;
@@ -1134,7 +1150,7 @@ mod test {
     //}
 
     #[test] fn macro_tokens_should_match(){
-        expand_crate_str(@"macro_rules! m((a)=>(13)) fn main(){m!(a);}");
+        expand_crate_str(~"macro_rules! m((a)=>(13)) fn main(){m!(a);}");
     }
 
     // renaming tests expand a crate and then check that the bindings match
@@ -1208,9 +1224,9 @@ mod test {
     fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
         let invalid_name = token::special_idents::invalid.name;
         let (teststr, bound_connections, bound_ident_check) = match *t {
-            (ref str,ref conns, bic) => (str.to_managed(), conns.clone(), bic)
+            (ref str,ref conns, bic) => (str.to_owned(), conns.clone(), bic)
         };
-        let cr = expand_crate_str(teststr.to_managed());
+        let cr = expand_crate_str(teststr.to_owned());
         // find the bindings:
         let mut name_finder = new_name_finder(~[]);
         visit::walk_crate(&mut name_finder,&cr,());
@@ -1260,9 +1276,12 @@ mod test {
                         println!("uh oh, matches but shouldn't:");
                         println!("varref: {:?}",varref);
                         // good lord, you can't make a path with 0 segments, can you?
+                        let string = token::get_ident(varref.segments[0]
+                                                            .identifier
+                                                            .name);
                         println!("varref's first segment's uint: {}, and string: \"{}\"",
                                  varref.segments[0].identifier.name,
-                                 ident_to_str(&varref.segments[0].identifier));
+                                 string.get());
                         println!("binding: {:?}", bindings[binding_idx]);
                         ast_util::display_sctable(get_sctable());
                     }
@@ -1273,7 +1292,7 @@ mod test {
     }
 
     #[test] fn fmt_in_macro_used_inside_module_macro() {
-        let crate_str = @"macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
+        let crate_str = ~"macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
 macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}}))
 foo_module!()
 ";
@@ -1284,7 +1303,10 @@ foo_module!()
         let bindings = name_finder.ident_accumulator;
 
         let cxbinds : ~[&ast::Ident] =
-            bindings.iter().filter(|b|{@"xx" == (ident_to_str(*b))}).collect();
+            bindings.iter().filter(|b| {
+                let string = token::get_ident(b.name);
+                "xx" == string.get()
+            }).collect();
         let cxbind = match cxbinds {
             [b] => b,
             _ => fail!("expected just one binding for ext_cx")
@@ -1296,9 +1318,13 @@ foo_module!()
         let varrefs = path_finder.path_accumulator;
 
         // the xx binding should bind all of the xx varrefs:
-        for (idx,v) in varrefs.iter().filter(|p|{ p.segments.len() == 1
-                                          && (@"xx" == (ident_to_str(&p.segments[0].identifier)))
-                                     }).enumerate() {
+        for (idx,v) in varrefs.iter().filter(|p|{
+            p.segments.len() == 1
+            && {
+                let string = token::get_ident(p.segments[0].identifier.name);
+                "xx" == string.get()
+            }
+        }).enumerate() {
             if (mtwt_resolve(v.segments[0].identifier) != resolved_binding) {
                 println!("uh oh, xx binding didn't match xx varref:");
                 println!("this is xx varref \\# {:?}",idx);
@@ -1323,7 +1349,7 @@ foo_module!()
 
     #[test]
     fn pat_idents(){
-        let pat = string_to_pat(@"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
+        let pat = string_to_pat(~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
         let mut pat_idents = new_name_finder(~[]);
         pat_idents.visit_pat(pat, ());
         assert_eq!(pat_idents.ident_accumulator,
diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs
index bbf6f7fff7f..ba1d5efdd49 100644
--- a/src/libsyntax/ext/format.rs
+++ b/src/libsyntax/ext/format.rs
@@ -14,23 +14,24 @@ use codemap::{Span, respan};
 use ext::base::*;
 use ext::base;
 use ext::build::AstBuilder;
-use rsparse = parse;
-use parse::token;
 use opt_vec;
+use parse::token::InternedString;
+use parse::token;
+use rsparse = parse;
 use std::fmt::parse;
 use std::hashmap::{HashMap, HashSet};
 use std::vec;
 
 #[deriving(Eq)]
 enum ArgumentType {
-    Known(@str),
+    Known(~str),
     Unsigned,
     String,
 }
 
 enum Position {
     Exact(uint),
-    Named(@str),
+    Named(~str),
 }
 
 struct Context<'a> {
@@ -42,12 +43,12 @@ struct Context<'a> {
     args: ~[@ast::Expr],
     arg_types: ~[Option<ArgumentType>],
     // Parsed named expressions and the types that we've found for them so far
-    names: HashMap<@str, @ast::Expr>,
-    name_types: HashMap<@str, ArgumentType>,
+    names: HashMap<~str, @ast::Expr>,
+    name_types: HashMap<~str, ArgumentType>,
 
     // Collection of the compiled `rt::Piece` structures
     pieces: ~[@ast::Expr],
-    name_positions: HashMap<@str, uint>,
+    name_positions: HashMap<~str, uint>,
     method_statics: ~[@ast::Item],
 
     // Updated as arguments are consumed or methods are entered
@@ -104,10 +105,11 @@ impl<'a> Context<'a> {
                         return (extra, None);
                     }
                 };
-                let name = self.ecx.str_of(ident);
+                let interned_name = token::get_ident(ident.name);
+                let name = interned_name.get();
                 p.expect(&token::EQ);
                 let e = p.parse_expr();
-                match self.names.find(&name) {
+                match self.names.find_equiv(&name) {
                     None => {}
                     Some(prev) => {
                         self.ecx.span_err(e.span, format!("duplicate argument \
@@ -117,7 +119,7 @@ impl<'a> Context<'a> {
                         continue
                     }
                 }
-                self.names.insert(name, e);
+                self.names.insert(name.to_str(), e);
             } else {
                 self.args.push(p.parse_expr());
                 self.arg_types.push(None);
@@ -156,13 +158,13 @@ impl<'a> Context<'a> {
                         Exact(i)
                     }
                     parse::ArgumentIs(i) => Exact(i),
-                    parse::ArgumentNamed(s) => Named(s.to_managed()),
+                    parse::ArgumentNamed(s) => Named(s.to_str()),
                 };
 
                 // and finally the method being applied
                 match arg.method {
                     None => {
-                        let ty = Known(arg.format.ty.to_managed());
+                        let ty = Known(arg.format.ty.to_str());
                         self.verify_arg_type(pos, ty);
                     }
                     Some(ref method) => { self.verify_method(pos, *method); }
@@ -184,7 +186,7 @@ impl<'a> Context<'a> {
                 self.verify_arg_type(Exact(i), Unsigned);
             }
             parse::CountIsName(s) => {
-                self.verify_arg_type(Named(s.to_managed()), Unsigned);
+                self.verify_arg_type(Named(s.to_str()), Unsigned);
             }
             parse::CountIsNextParam => {
                 if self.check_positional_ok() {
@@ -259,7 +261,13 @@ impl<'a> Context<'a> {
                     self.ecx.span_err(self.fmtsp, msg);
                     return;
                 }
-                self.verify_same(self.args[arg].span, ty, self.arg_types[arg]);
+                {
+                    let arg_type = match self.arg_types[arg] {
+                        None => None,
+                        Some(ref x) => Some(x)
+                    };
+                    self.verify_same(self.args[arg].span, &ty, arg_type);
+                }
                 if self.arg_types[arg].is_none() {
                     self.arg_types[arg] = Some(ty);
                 }
@@ -274,10 +282,9 @@ impl<'a> Context<'a> {
                         return;
                     }
                 };
-                self.verify_same(span, ty,
-                                 self.name_types.find(&name).map(|&x| x));
+                self.verify_same(span, &ty, self.name_types.find(&name));
                 if !self.name_types.contains_key(&name) {
-                    self.name_types.insert(name, ty);
+                    self.name_types.insert(name.clone(), ty);
                 }
                 // Assign this named argument a slot in the arguments array if
                 // it hasn't already been assigned a slot.
@@ -297,30 +304,36 @@ impl<'a> Context<'a> {
     ///
     /// Obviously `Some(Some(x)) != Some(Some(y))`, but we consider it true
     /// that: `Some(None) == Some(Some(x))`
-    fn verify_same(&self, sp: Span, ty: ArgumentType,
-                   before: Option<ArgumentType>) {
+    fn verify_same(&self,
+                   sp: Span,
+                   ty: &ArgumentType,
+                   before: Option<&ArgumentType>) {
         let cur = match before {
             None => return,
             Some(t) => t,
         };
-        if ty == cur { return }
+        if *ty == *cur {
+            return
+        }
         match (cur, ty) {
-            (Known(cur), Known(ty)) => {
+            (&Known(ref cur), &Known(ref ty)) => {
                 self.ecx.span_err(sp,
                                   format!("argument redeclared with type `{}` when \
-                                           it was previously `{}`", ty, cur));
+                                           it was previously `{}`",
+                                          *ty,
+                                          *cur));
             }
-            (Known(cur), _) => {
+            (&Known(ref cur), _) => {
                 self.ecx.span_err(sp,
                                   format!("argument used to format with `{}` was \
                                            attempted to not be used for formatting",
-                                           cur));
+                                           *cur));
             }
-            (_, Known(ty)) => {
+            (_, &Known(ref ty)) => {
                 self.ecx.span_err(sp,
                                   format!("argument previously used as a format \
                                            argument attempted to be used as `{}`",
-                                           ty));
+                                           *ty));
             }
             (_, _) => {
                 self.ecx.span_err(sp, "argument declared with multiple formats");
@@ -333,13 +346,18 @@ impl<'a> Context<'a> {
     fn static_attrs(&self) -> ~[ast::Attribute] {
         // Flag statics as `address_insignificant` so LLVM can merge duplicate
         // globals as much as possible (which we're generating a whole lot of).
-        let unnamed = self.ecx.meta_word(self.fmtsp, @"address_insignificant");
+        let unnamed = self.ecx
+                          .meta_word(self.fmtsp,
+                                     InternedString::new(
+                                         "address_insignificant"));
         let unnamed = self.ecx.attribute(self.fmtsp, unnamed);
 
         // Do not warn format string as dead code
-        let dead_code = self.ecx.meta_word(self.fmtsp, @"dead_code");
+        let dead_code = self.ecx.meta_word(self.fmtsp,
+                                           InternedString::new("dead_code"));
         let allow_dead_code = self.ecx.meta_list(self.fmtsp,
-                                                 @"allow", ~[dead_code]);
+                                                 InternedString::new("allow"),
+                                                 ~[dead_code]);
         let allow_dead_code = self.ecx.attribute(self.fmtsp, allow_dead_code);
         return ~[unnamed, allow_dead_code];
     }
@@ -391,9 +409,8 @@ impl<'a> Context<'a> {
                     self.ecx.expr_path(path)
                 }
                 parse::CountIsName(n) => {
-                    let n = n.to_managed();
-                    let i = match self.name_positions.find_copy(&n) {
-                        Some(i) => i,
+                    let i = match self.name_positions.find_equiv(&n) {
+                        Some(&i) => i,
                         None => 0, // error already emitted elsewhere
                     };
                     let i = i + self.args.len();
@@ -410,7 +427,7 @@ impl<'a> Context<'a> {
                         let result = arm.result.iter().map(|p| {
                             self.trans_piece(p)
                         }).collect();
-                        let s = arm.selector.to_managed();
+                        let s = token::intern_and_get_ident(arm.selector);
                         let selector = self.ecx.expr_str(sp, s);
                         self.ecx.expr_struct(sp, p, ~[
                             self.ecx.field_imm(sp,
@@ -486,8 +503,12 @@ impl<'a> Context<'a> {
 
         match *piece {
             parse::String(s) => {
-                self.ecx.expr_call_global(sp, rtpath("String"),
-                                          ~[self.ecx.expr_str(sp, s.to_managed())])
+                let s = token::intern_and_get_ident(s);
+                self.ecx.expr_call_global(sp,
+                                          rtpath("String"),
+                                          ~[
+                    self.ecx.expr_str(sp, s)
+                ])
             }
             parse::CurrentArgument => {
                 let nil = self.ecx.expr_lit(sp, ast::LitNil);
@@ -509,9 +530,8 @@ impl<'a> Context<'a> {
                     // Named arguments are converted to positional arguments at
                     // the end of the list of arguments
                     parse::ArgumentNamed(n) => {
-                        let n = n.to_managed();
-                        let i = match self.name_positions.find_copy(&n) {
-                            Some(i) => i,
+                        let i = match self.name_positions.find_equiv(&n) {
+                            Some(&i) => i,
                             None => 0, // error already emitted elsewhere
                         };
                         let i = i + self.args.len();
@@ -623,14 +643,17 @@ impl<'a> Context<'a> {
             locals.push(self.format_arg(e.span, Exact(i),
                                         self.ecx.expr_ident(e.span, name)));
         }
-        for (&name, &e) in self.names.iter() {
-            if !self.name_types.contains_key(&name) { continue }
+        for (name, &e) in self.names.iter() {
+            if !self.name_types.contains_key(name) {
+                continue
+            }
 
-            let lname = self.ecx.ident_of(format!("__arg{}", name));
+            let lname = self.ecx.ident_of(format!("__arg{}", *name));
             let e = self.ecx.expr_addr_of(e.span, e);
             lets.push(self.ecx.stmt_let(e.span, false, lname, e));
-            names[*self.name_positions.get(&name)] =
-                Some(self.format_arg(e.span, Named(name),
+            names[*self.name_positions.get(name)] =
+                Some(self.format_arg(e.span,
+                                     Named((*name).clone()),
                                      self.ecx.expr_ident(e.span, lname)));
         }
 
@@ -672,16 +695,16 @@ impl<'a> Context<'a> {
                                            Some(result)))
     }
 
-    fn format_arg(&self, sp: Span, argno: Position,
-                  arg: @ast::Expr) -> @ast::Expr {
+    fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr)
+                  -> @ast::Expr {
         let ty = match argno {
-            Exact(i) => self.arg_types[i].unwrap(),
-            Named(s) => *self.name_types.get(&s)
+            Exact(ref i) => self.arg_types[*i].get_ref(),
+            Named(ref s) => self.name_types.get(s)
         };
 
-        let fmt_trait = match ty {
-            Known(tyname) => {
-                match tyname.as_slice() {
+        let fmt_trait = match *ty {
+            Known(ref tyname) => {
+                match (*tyname).as_slice() {
                     ""  => "Default",
                     "?" => "Poly",
                     "b" => "Bool",
@@ -698,8 +721,9 @@ impl<'a> Context<'a> {
                     "x" => "LowerHex",
                     "X" => "UpperHex",
                     _ => {
-                        self.ecx.span_err(sp, format!("unknown format trait \
-                                                       `{}`", tyname));
+                        self.ecx.span_err(sp,
+                                          format!("unknown format trait `{}`",
+                                                  *tyname));
                         "Dummy"
                     }
                 }
@@ -757,8 +781,9 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span,
     // Be sure to recursively expand macros just in case the format string uses
     // a macro to build the format expression.
     let expr = cx.ecx.expand_expr(efmt);
-    let fmt = match expr_to_str(cx.ecx, expr,
-                                     "format argument must be a string literal.") {
+    let fmt = match expr_to_str(cx.ecx,
+                                expr,
+                                "format argument must be a string literal.") {
         Some((fmt, _)) => fmt,
         None => return MacResult::dummy_expr()
     };
@@ -770,7 +795,7 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span,
             cx.ecx.span_err(efmt.span, m);
         }
     }).inside(|| {
-        for piece in parse::Parser::new(fmt) {
+        for piece in parse::Parser::new(fmt.get()) {
             if !err {
                 cx.verify_piece(&piece);
                 let piece = cx.trans_piece(&piece);
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index 6faed270875..bd1ac616f52 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -31,6 +31,7 @@ use parse;
 pub mod rt {
     use ast;
     use ext::base::ExtCtxt;
+    use parse::token;
     use parse;
     use print::pprust;
 
@@ -65,132 +66,134 @@ pub mod rt {
 
     pub trait ToSource {
         // Takes a thing and generates a string containing rust code for it.
-        fn to_source(&self) -> @str;
+        fn to_source(&self) -> ~str;
     }
 
     impl ToSource for ast::Ident {
-        fn to_source(&self) -> @str {
-            ident_to_str(self)
+        fn to_source(&self) -> ~str {
+            let this = get_ident(self.name);
+            this.get().to_owned()
         }
     }
 
     impl ToSource for @ast::Item {
-        fn to_source(&self) -> @str {
-            pprust::item_to_str(*self, get_ident_interner()).to_managed()
+        fn to_source(&self) -> ~str {
+            pprust::item_to_str(*self, get_ident_interner())
         }
     }
 
     impl<'a> ToSource for &'a [@ast::Item] {
-        fn to_source(&self) -> @str {
-            self.map(|i| i.to_source()).connect("\n\n").to_managed()
+        fn to_source(&self) -> ~str {
+            self.map(|i| i.to_source()).connect("\n\n")
         }
     }
 
     impl ToSource for ast::Ty {
-        fn to_source(&self) -> @str {
-            pprust::ty_to_str(self, get_ident_interner()).to_managed()
+        fn to_source(&self) -> ~str {
+            pprust::ty_to_str(self, get_ident_interner())
         }
     }
 
     impl<'a> ToSource for &'a [ast::Ty] {
-        fn to_source(&self) -> @str {
-            self.map(|i| i.to_source()).connect(", ").to_managed()
+        fn to_source(&self) -> ~str {
+            self.map(|i| i.to_source()).connect(", ")
         }
     }
 
     impl ToSource for Generics {
-        fn to_source(&self) -> @str {
-            pprust::generics_to_str(self, get_ident_interner()).to_managed()
+        fn to_source(&self) -> ~str {
+            pprust::generics_to_str(self, get_ident_interner())
         }
     }
 
     impl ToSource for @ast::Expr {
-        fn to_source(&self) -> @str {
-            pprust::expr_to_str(*self, get_ident_interner()).to_managed()
+        fn to_source(&self) -> ~str {
+            pprust::expr_to_str(*self, get_ident_interner())
         }
     }
 
     impl ToSource for ast::Block {
-        fn to_source(&self) -> @str {
-            pprust::block_to_str(self, get_ident_interner()).to_managed()
+        fn to_source(&self) -> ~str {
+            pprust::block_to_str(self, get_ident_interner())
         }
     }
 
     impl<'a> ToSource for &'a str {
-        fn to_source(&self) -> @str {
-            let lit = dummy_spanned(ast::LitStr(self.to_managed(), ast::CookedStr));
-            pprust::lit_to_str(&lit).to_managed()
+        fn to_source(&self) -> ~str {
+            let lit = dummy_spanned(ast::LitStr(
+                    token::intern_and_get_ident(*self), ast::CookedStr));
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for int {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for i8 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for i16 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
 
     impl ToSource for i32 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for i64 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for uint {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for u8 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for u16 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for u32 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
     impl ToSource for u64 {
-        fn to_source(&self) -> @str {
+        fn to_source(&self) -> ~str {
             let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64));
-            pprust::lit_to_str(&lit).to_managed()
+            pprust::lit_to_str(&lit)
         }
     }
 
@@ -237,52 +240,49 @@ pub mod rt {
     impl_to_tokens!(u64)
 
     pub trait ExtParseUtils {
-        fn parse_item(&self, s: @str) -> @ast::Item;
-        fn parse_expr(&self, s: @str) -> @ast::Expr;
-        fn parse_stmt(&self, s: @str) -> @ast::Stmt;
-        fn parse_tts(&self, s: @str) -> ~[ast::TokenTree];
+        fn parse_item(&self, s: ~str) -> @ast::Item;
+        fn parse_expr(&self, s: ~str) -> @ast::Expr;
+        fn parse_stmt(&self, s: ~str) -> @ast::Stmt;
+        fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree];
     }
 
     impl<'a> ExtParseUtils for ExtCtxt<'a> {
 
-        fn parse_item(&self, s: @str) -> @ast::Item {
+        fn parse_item(&self, s: ~str) -> @ast::Item {
             let res = parse::parse_item_from_source_str(
-                @"<quote expansion>",
+                "<quote expansion>".to_str(),
                 s,
                 self.cfg(),
                 self.parse_sess());
             match res {
                 Some(ast) => ast,
                 None => {
-                    error!("Parse error with ```\n{}\n```", s);
+                    error!("Parse error");
                     fail!()
                 }
             }
         }
 
-        fn parse_stmt(&self, s: @str) -> @ast::Stmt {
-            parse::parse_stmt_from_source_str(
-                @"<quote expansion>",
-                s,
-                self.cfg(),
-                ~[],
-                self.parse_sess())
+        fn parse_stmt(&self, s: ~str) -> @ast::Stmt {
+            parse::parse_stmt_from_source_str("<quote expansion>".to_str(),
+                                              s,
+                                              self.cfg(),
+                                              ~[],
+                                              self.parse_sess())
         }
 
-        fn parse_expr(&self, s: @str) -> @ast::Expr {
-            parse::parse_expr_from_source_str(
-                @"<quote expansion>",
-                s,
-                self.cfg(),
-                self.parse_sess())
+        fn parse_expr(&self, s: ~str) -> @ast::Expr {
+            parse::parse_expr_from_source_str("<quote expansion>".to_str(),
+                                              s,
+                                              self.cfg(),
+                                              self.parse_sess())
         }
 
-        fn parse_tts(&self, s: @str) -> ~[ast::TokenTree] {
-            parse::parse_tts_from_source_str(
-                @"<quote expansion>",
-                s,
-                self.cfg(),
-                self.parse_sess())
+        fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree] {
+            parse::parse_tts_from_source_str("<quote expansion>".to_str(),
+                                             s,
+                                             self.cfg(),
+                                             self.parse_sess())
         }
     }
 
@@ -349,7 +349,7 @@ fn id_ext(str: &str) -> ast::Ident {
 
 // Lift an ident to the expr that evaluates to that ident.
 fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr {
-    let e_str = cx.expr_str(sp, cx.str_of(ident));
+    let e_str = cx.expr_str(sp, token::get_ident(ident.name));
     cx.expr_method_call(sp,
                         cx.expr_ident(sp, id_ext("ext_cx")),
                         id_ext("ident_of"),
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index a9f94da7a98..f3f947ec00d 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -16,11 +16,13 @@ use ext::base::*;
 use ext::base;
 use ext::build::AstBuilder;
 use parse;
-use parse::token::{get_ident_interner};
+use parse::token::get_ident_interner;
+use parse::token;
 use print::pprust;
 
 use std::io;
 use std::io::File;
+use std::rc::Rc;
 use std::str;
 
 // These macros all relate to the file system; they either return
@@ -57,21 +59,26 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
     let topmost = topmost_expn_info(cx.backtrace().unwrap());
     let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
-    let filename = loc.file.name;
+    let filename = token::intern_and_get_ident(loc.file.name);
     base::MRExpr(cx.expr_str(topmost.call_site, filename))
 }
 
 pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     -> base::MacResult {
     let s = pprust::tts_to_str(tts, get_ident_interner());
-    base::MRExpr(cx.expr_str(sp, s.to_managed()))
+    base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(s)))
 }
 
 pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     -> base::MacResult {
     base::check_zero_tts(cx, sp, tts, "module_path!");
-    base::MRExpr(cx.expr_str(sp,
-                             cx.mod_path().map(|x| cx.str_of(*x)).connect("::").to_managed()))
+    let string = cx.mod_path()
+                   .map(|x| {
+                        let interned_str = token::get_ident(x.name);
+                        interned_str.get().to_str()
+                    })
+                   .connect("::");
+    base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string)))
 }
 
 // include! : parse the given file as an expr
@@ -113,11 +120,11 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         Some(src) => {
             // Add this input file to the code map to make it available as
             // dependency information
-            let src = src.to_managed();
-            let filename = file.display().to_str().to_managed();
+            let filename = file.display().to_str();
+            let interned = token::intern_and_get_ident(src);
             cx.parse_sess.cm.new_filemap(filename, src);
 
-            base::MRExpr(cx.expr_str(sp, src))
+            base::MRExpr(cx.expr_str(sp, interned))
         }
         None => {
             cx.span_err(sp, format!("{} wasn't a utf-8 file", file.display()));
@@ -129,8 +136,6 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         -> base::MacResult
 {
-    use std::at_vec;
-
     let file = match get_single_str_from_tts(cx, sp, tts, "include_bin!") {
         Some(f) => f,
         None => return MacResult::dummy_expr()
@@ -142,8 +147,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             return MacResult::dummy_expr();
         }
         Ok(bytes) => {
-            let bytes = at_vec::to_managed_move(bytes);
-            base::MRExpr(cx.expr_lit(sp, ast::LitBinary(bytes)))
+            base::MRExpr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes))))
         }
     }
 }
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index d5a30a7cf11..6d1b8dd2358 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -18,7 +18,7 @@ use parse::lexer::*; //resolve bug?
 use parse::ParseSess;
 use parse::attr::ParserAttr;
 use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
-use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner, ident_to_str};
+use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner};
 use parse::token;
 
 use std::hashmap::HashMap;
@@ -183,8 +183,9 @@ pub fn nameize(p_s: @ParseSess, ms: &[Matcher], res: &[@NamedMatch])
                 node: MatchNonterminal(ref bind_name, _, idx), span: sp
           } => {
             if ret_val.contains_key(bind_name) {
-                p_s.span_diagnostic.span_fatal(sp,
-                                               "Duplicated bind name: "+ ident_to_str(bind_name))
+                let string = token::get_ident(bind_name.name);
+                p_s.span_diagnostic
+                   .span_fatal(sp, "Duplicated bind name: " + string.get())
             }
             ret_val.insert(*bind_name, res[idx]);
           }
@@ -364,8 +365,11 @@ pub fn parse(sess: @ParseSess,
                 let nts = bb_eis.map(|ei| {
                     match ei.elts[ei.idx].node {
                       MatchNonterminal(ref bind,ref name,_) => {
-                        format!("{} ('{}')", ident_to_str(name),
-                             ident_to_str(bind))
+                        let bind_string = token::get_ident(bind.name);
+                        let name_string = token::get_ident(name.name);
+                        format!("{} ('{}')",
+                                name_string.get(),
+                                bind_string.get())
                       }
                       _ => fail!()
                     } }).connect(" or ");
@@ -388,8 +392,9 @@ pub fn parse(sess: @ParseSess,
                 let mut ei = bb_eis.pop().unwrap();
                 match ei.elts[ei.idx].node {
                   MatchNonterminal(_, ref name, idx) => {
+                    let name_string = token::get_ident(name.name);
                     ei.matches[idx].push(@MatchedNonterminal(
-                        parse_nt(&mut rust_parser, ident_to_str(name))));
+                        parse_nt(&mut rust_parser, name_string.get())));
                     ei.idx += 1u;
                   }
                   _ => fail!()
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index aabd9c694f7..c179e9959e0 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -21,8 +21,9 @@ use ext::tt::macro_parser::{parse, parse_or_else};
 use parse::lexer::{new_tt_reader, Reader};
 use parse::parser::Parser;
 use parse::attr::ParserAttr;
-use parse::token::{get_ident_interner, special_idents, gensym_ident, ident_to_str};
+use parse::token::{get_ident_interner, special_idents, gensym_ident};
 use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF};
+use parse::token;
 use print;
 use std::cell::RefCell;
 use util::small_vector::SmallVector;
@@ -112,10 +113,11 @@ fn generic_extension(cx: &ExtCtxt,
                      rhses: &[@NamedMatch])
                      -> MacResult {
     if cx.trace_macros() {
+        let interned_name = token::get_ident(name.name);
         println!("{}! \\{ {} \\}",
-                  cx.str_of(name),
-                  print::pprust::tt_to_str(&TTDelim(@arg.to_owned()),
-                                           get_ident_interner()));
+                 interned_name.get(),
+                 print::pprust::tt_to_str(&TTDelim(@arg.to_owned()),
+                                          get_ident_interner()));
     }
 
     // Which arm's failure should we report? (the one furthest along)
@@ -229,7 +231,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt,
     };
 
     return MRDef(MacroDef {
-        name: ident_to_str(&name),
+        name: token::get_ident(name.name).get().to_str(),
         ext: NormalTT(exp, Some(sp))
     });
 }
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 87a2f374c90..d2fa24b1cfe 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -14,7 +14,7 @@ use codemap::{Span, DUMMY_SP};
 use diagnostic::SpanHandler;
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
 use parse::token::{EOF, INTERPOLATED, IDENT, Token, NtIdent};
-use parse::token::{ident_to_str};
+use parse::token;
 use parse::lexer::TokenAndSpan;
 
 use std::cell::{Cell, RefCell};
@@ -122,9 +122,10 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> @NamedMatch {
     match matched_opt {
         Some(s) => lookup_cur_matched_by_matched(r, s),
         None => {
+            let name_string = token::get_ident(name.name);
             r.sp_diag.span_fatal(r.cur_span.get(),
                                  format!("unknown macro variable `{}`",
-                                         ident_to_str(&name)));
+                                         name_string.get()));
         }
     }
 }
@@ -145,11 +146,11 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
             LisContradiction(_) => rhs.clone(),
             LisConstraint(r_len, _) if l_len == r_len => lhs.clone(),
             LisConstraint(r_len, ref r_id) => {
-                let l_n = ident_to_str(l_id);
-                let r_n = ident_to_str(r_id);
+                let l_n = token::get_ident(l_id.name);
+                let r_n = token::get_ident(r_id.name);
                 LisContradiction(format!("Inconsistent lockstep iteration: \
                                           '{}' has {} items, but '{}' has {}",
-                                          l_n, l_len, r_n, r_len))
+                                          l_n.get(), l_len, r_n.get(), r_len))
             }
         }
     }
@@ -313,10 +314,11 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
                 return ret_val;
               }
               MatchedSeq(..) => {
+                let string = token::get_ident(ident.name);
                 r.sp_diag.span_fatal(
                     r.cur_span.get(), /* blame the macro writer */
                     format!("variable '{}' is still repeating at this depth",
-                         ident_to_str(&ident)));
+                            string.get()));
               }
             }
           }
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 8dac13f1e31..8f5bbc2cdad 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -321,15 +321,14 @@ fn fold_meta_item_<T: Folder>(mi: @MetaItem, fld: &mut T) -> @MetaItem {
     @Spanned {
         node:
             match mi.node {
-                MetaWord(id) => MetaWord(id),
-                MetaList(id, ref mis) => {
+                MetaWord(ref id) => MetaWord((*id).clone()),
+                MetaList(ref id, ref mis) => {
                     let fold_meta_item = |x| fold_meta_item_(x, fld);
-                    MetaList(
-                        id,
-                        mis.map(|e| fold_meta_item(*e))
-                    )
+                    MetaList((*id).clone(), mis.map(|e| fold_meta_item(*e)))
+                }
+                MetaNameValue(ref id, ref s) => {
+                    MetaNameValue((*id).clone(), (*s).clone())
                 }
-                MetaNameValue(id, s) => MetaNameValue(id, s)
             },
         span: fld.new_span(mi.span) }
 }
@@ -498,12 +497,10 @@ fn fold_variant_arg_<T: Folder>(va: &VariantArg, folder: &mut T) -> VariantArg {
 pub fn noop_fold_view_item<T: Folder>(vi: &ViewItem, folder: &mut T)
                                        -> ViewItem{
     let inner_view_item = match vi.node {
-        ViewItemExternMod(ref ident,
-                             string,
-                             node_id) => {
+        ViewItemExternMod(ref ident, ref string, node_id) => {
             ViewItemExternMod(ident.clone(),
-                                 string,
-                                 folder.new_id(node_id))
+                              (*string).clone(),
+                              folder.new_id(node_id))
         }
         ViewItemUse(ref view_paths) => {
             ViewItemUse(folder.fold_view_paths(*view_paths))
@@ -815,8 +812,12 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
         }
         ExprInlineAsm(ref a) => {
             ExprInlineAsm(InlineAsm {
-                inputs: a.inputs.map(|&(c, input)| (c, folder.fold_expr(input))),
-                outputs: a.outputs.map(|&(c, out)| (c, folder.fold_expr(out))),
+                inputs: a.inputs.map(|&(ref c, input)| {
+                    ((*c).clone(), folder.fold_expr(input))
+                }),
+                outputs: a.outputs.map(|&(ref c, out)| {
+                    ((*c).clone(), folder.fold_expr(out))
+                }),
                 .. (*a).clone()
             })
         }
@@ -898,7 +899,8 @@ mod test {
     // make sure idents get transformed everywhere
     #[test] fn ident_transformation () {
         let mut zz_fold = ToZzIdentFolder;
-        let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
+        let ast = string_to_crate(
+            ~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
         assert_pred!(matches_codepattern,
                      "matches_codepattern",
                      pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate,
@@ -909,8 +911,9 @@ mod test {
     // even inside macro defs....
     #[test] fn ident_transformation_in_defs () {
         let mut zz_fold = ToZzIdentFolder;
-        let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+
-=> (g $(d $d $e)+))} ");
+        let ast = string_to_crate(
+            ~"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
+              (g $(d $d $e)+))} ");
         assert_pred!(matches_codepattern,
                      "matches_codepattern",
                      pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate,
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index e7630a66855..c9bea78d02d 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -45,7 +45,7 @@ impl ParserAttr for Parser {
               }
               token::DOC_COMMENT(s) => {
                 let attr = ::attr::mk_sugared_doc_attr(
-                    self.id_to_str(s),
+                    self.id_to_interned_str(s),
                     self.span.lo,
                     self.span.hi
                 );
@@ -133,7 +133,7 @@ impl ParserAttr for Parser {
                 }
                 token::DOC_COMMENT(s) => {
                     self.bump();
-                    ::attr::mk_sugared_doc_attr(self.id_to_str(s),
+                    ::attr::mk_sugared_doc_attr(self.id_to_interned_str(s),
                                                 self.span.lo,
                                                 self.span.hi)
                 }
@@ -157,7 +157,7 @@ impl ParserAttr for Parser {
     fn parse_meta_item(&mut self) -> @ast::MetaItem {
         let lo = self.span.lo;
         let ident = self.parse_ident();
-        let name = self.id_to_str(ident);
+        let name = self.id_to_interned_str(ident);
         match self.token {
             token::EQ => {
                 self.bump();
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs
index aa5e4e01ae0..7165e7b404f 100644
--- a/src/libsyntax/parse/comments.rs
+++ b/src/libsyntax/parse/comments.rs
@@ -54,7 +54,6 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
 }
 
 pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
-
     /// remove whitespace-only lines from the start/end of lines
     fn vertical_trim(lines: ~[~str]) -> ~[~str] {
         let mut i = 0u;
@@ -348,10 +347,10 @@ pub struct Literal {
 // probably not a good thing.
 pub fn gather_comments_and_literals(span_diagnostic:
                                         @diagnostic::SpanHandler,
-                                    path: @str,
+                                    path: ~str,
                                     srdr: &mut io::Reader)
                                  -> (~[Comment], ~[Literal]) {
-    let src = str::from_utf8_owned(srdr.read_to_end()).unwrap().to_managed();
+    let src = str::from_utf8_owned(srdr.read_to_end()).unwrap();
     let cm = CodeMap::new();
     let filemap = cm.new_filemap(path, src);
     let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 2521bb515f7..8c55990289a 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -42,7 +42,6 @@ pub struct TokenAndSpan {
 
 pub struct StringReader {
     span_diagnostic: @SpanHandler,
-    src: @str,
     // The absolute offset within the codemap of the next character to read
     pos: Cell<BytePos>,
     // The absolute offset within the codemap of the last character read(curr)
@@ -73,7 +72,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler,
     let initial_char = '\n';
     let r = @StringReader {
         span_diagnostic: span_diagnostic,
-        src: filemap.src,
         pos: Cell::new(filemap.start_pos),
         last_pos: Cell::new(filemap.start_pos),
         col: Cell::new(CharPos(0)),
@@ -93,7 +91,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler,
 fn dup_string_reader(r: @StringReader) -> @StringReader {
     @StringReader {
         span_diagnostic: r.span_diagnostic,
-        src: r.src,
         pos: Cell::new(r.pos.get()),
         last_pos: Cell::new(r.last_pos.get()),
         col: Cell::new(r.col.get()),
@@ -188,7 +185,7 @@ fn fatal_span_verbose(rdr: @StringReader,
                    -> ! {
     let mut m = m;
     m.push_str(": ");
-    let s = rdr.src.slice(
+    let s = rdr.filemap.src.slice(
                   byte_offset(rdr, from_pos).to_uint(),
                   byte_offset(rdr, to_pos).to_uint());
     m.push_str(s);
@@ -239,7 +236,7 @@ fn with_str_from_to<T>(
                     end: BytePos,
                     f: |s: &str| -> T)
                     -> T {
-    f(rdr.src.slice(
+    f(rdr.filemap.src.slice(
             byte_offset(rdr, start).to_uint(),
             byte_offset(rdr, end).to_uint()))
 }
@@ -249,12 +246,12 @@ fn with_str_from_to<T>(
 pub fn bump(rdr: &StringReader) {
     rdr.last_pos.set(rdr.pos.get());
     let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint();
-    if current_byte_offset < (rdr.src).len() {
+    if current_byte_offset < (rdr.filemap.src).len() {
         assert!(rdr.curr.get() != unsafe {
             transmute(-1u32)
         }); // FIXME: #8971: unsound
         let last_char = rdr.curr.get();
-        let next = rdr.src.char_range_at(current_byte_offset);
+        let next = rdr.filemap.src.char_range_at(current_byte_offset);
         let byte_offset_diff = next.next - current_byte_offset;
         rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff));
         rdr.curr.set(next.ch);
@@ -277,8 +274,8 @@ pub fn is_eof(rdr: @StringReader) -> bool {
 }
 pub fn nextch(rdr: @StringReader) -> char {
     let offset = byte_offset(rdr, rdr.pos.get()).to_uint();
-    if offset < (rdr.src).len() {
-        return rdr.src.char_at(offset);
+    if offset < (rdr.filemap.src).len() {
+        return rdr.filemap.src.char_at(offset);
     } else { return unsafe { transmute(-1u32) }; } // FIXME: #8971: unsound
 }
 
@@ -975,9 +972,9 @@ mod test {
     }
 
     // open a string reader for the given string
-    fn setup(teststr: @str) -> Env {
+    fn setup(teststr: ~str) -> Env {
         let cm = CodeMap::new();
-        let fm = cm.new_filemap(@"zebra.rs", teststr);
+        let fm = cm.new_filemap(~"zebra.rs", teststr);
         let span_handler =
             diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
         Env {
@@ -987,7 +984,7 @@ mod test {
 
     #[test] fn t1 () {
         let Env {string_reader} =
-            setup(@"/* my source file */ \
+            setup(~"/* my source file */ \
                     fn main() { println!(\"zebra\"); }\n");
         let id = str_to_ident("fn");
         let tok1 = string_reader.next_token();
@@ -1023,14 +1020,14 @@ mod test {
     }
 
     #[test] fn doublecolonparsing () {
-        let env = setup (@"a b");
+        let env = setup (~"a b");
         check_tokenization (env,
                            ~[mk_ident("a",false),
                              mk_ident("b",false)]);
     }
 
     #[test] fn dcparsing_2 () {
-        let env = setup (@"a::b");
+        let env = setup (~"a::b");
         check_tokenization (env,
                            ~[mk_ident("a",true),
                              token::MOD_SEP,
@@ -1038,7 +1035,7 @@ mod test {
     }
 
     #[test] fn dcparsing_3 () {
-        let env = setup (@"a ::b");
+        let env = setup (~"a ::b");
         check_tokenization (env,
                            ~[mk_ident("a",false),
                              token::MOD_SEP,
@@ -1046,7 +1043,7 @@ mod test {
     }
 
     #[test] fn dcparsing_4 () {
-        let env = setup (@"a:: b");
+        let env = setup (~"a:: b");
         check_tokenization (env,
                            ~[mk_ident("a",true),
                              token::MOD_SEP,
@@ -1054,28 +1051,28 @@ mod test {
     }
 
     #[test] fn character_a() {
-        let env = setup(@"'a'");
+        let env = setup(~"'a'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok,token::LIT_CHAR('a' as u32));
     }
 
     #[test] fn character_space() {
-        let env = setup(@"' '");
+        let env = setup(~"' '");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok, token::LIT_CHAR(' ' as u32));
     }
 
     #[test] fn character_escaped() {
-        let env = setup(@"'\\n'");
+        let env = setup(~"'\\n'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok, token::LIT_CHAR('\n' as u32));
     }
 
     #[test] fn lifetime_name() {
-        let env = setup(@"'abc");
+        let env = setup(~"'abc");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         let id = token::str_to_ident("abc");
@@ -1083,7 +1080,7 @@ mod test {
     }
 
     #[test] fn raw_string() {
-        let env = setup(@"r###\"\"#a\\b\x00c\"\"###");
+        let env = setup(~"r###\"\"#a\\b\x00c\"\"###");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         let id = token::str_to_ident("\"#a\\b\x00c\"");
@@ -1097,7 +1094,7 @@ mod test {
     }
 
     #[test] fn nested_block_comments() {
-        let env = setup(@"/* /* */ */'a'");
+        let env = setup(~"/* /* */ */'a'");
         let TokenAndSpan {tok, sp: _} =
             env.string_reader.next_token();
         assert_eq!(tok,token::LIT_CHAR('a' as u32));
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index e026a11cafe..cec9f7c2d9f 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -89,12 +89,11 @@ pub fn parse_crate_attrs_from_file(
     return inner;
 }
 
-pub fn parse_crate_from_source_str(
-    name: @str,
-    source: @str,
-    cfg: ast::CrateConfig,
-    sess: @ParseSess
-) -> ast::Crate {
+pub fn parse_crate_from_source_str(name: ~str,
+                                   source: ~str,
+                                   cfg: ast::CrateConfig,
+                                   sess: @ParseSess)
+                                   -> ast::Crate {
     let mut p = new_parser_from_source_str(sess,
                                            /*bad*/ cfg.clone(),
                                            name,
@@ -102,12 +101,11 @@ pub fn parse_crate_from_source_str(
     maybe_aborted(p.parse_crate_mod(),p)
 }
 
-pub fn parse_crate_attrs_from_source_str(
-    name: @str,
-    source: @str,
-    cfg: ast::CrateConfig,
-    sess: @ParseSess
-) -> ~[ast::Attribute] {
+pub fn parse_crate_attrs_from_source_str(name: ~str,
+                                         source: ~str,
+                                         cfg: ast::CrateConfig,
+                                         sess: @ParseSess)
+                                         -> ~[ast::Attribute] {
     let mut p = new_parser_from_source_str(sess,
                                            /*bad*/ cfg.clone(),
                                            name,
@@ -116,44 +114,40 @@ pub fn parse_crate_attrs_from_source_str(
     return inner;
 }
 
-pub fn parse_expr_from_source_str(
-    name: @str,
-    source: @str,
-    cfg: ast::CrateConfig,
-    sess: @ParseSess
-) -> @ast::Expr {
+pub fn parse_expr_from_source_str(name: ~str,
+                                  source: ~str,
+                                  cfg: ast::CrateConfig,
+                                  sess: @ParseSess)
+                                  -> @ast::Expr {
     let mut p = new_parser_from_source_str(sess, cfg, name, source);
     maybe_aborted(p.parse_expr(), p)
 }
 
-pub fn parse_item_from_source_str(
-    name: @str,
-    source: @str,
-    cfg: ast::CrateConfig,
-    sess: @ParseSess
-) -> Option<@ast::Item> {
+pub fn parse_item_from_source_str(name: ~str,
+                                  source: ~str,
+                                  cfg: ast::CrateConfig,
+                                  sess: @ParseSess)
+                                  -> Option<@ast::Item> {
     let mut p = new_parser_from_source_str(sess, cfg, name, source);
     let attrs = p.parse_outer_attributes();
     maybe_aborted(p.parse_item(attrs),p)
 }
 
-pub fn parse_meta_from_source_str(
-    name: @str,
-    source: @str,
-    cfg: ast::CrateConfig,
-    sess: @ParseSess
-) -> @ast::MetaItem {
+pub fn parse_meta_from_source_str(name: ~str,
+                                  source: ~str,
+                                  cfg: ast::CrateConfig,
+                                  sess: @ParseSess)
+                                  -> @ast::MetaItem {
     let mut p = new_parser_from_source_str(sess, cfg, name, source);
     maybe_aborted(p.parse_meta_item(),p)
 }
 
-pub fn parse_stmt_from_source_str(
-    name: @str,
-    source: @str,
-    cfg: ast::CrateConfig,
-    attrs: ~[ast::Attribute],
-    sess: @ParseSess
-) -> @ast::Stmt {
+pub fn parse_stmt_from_source_str(name: ~str,
+                                  source: ~str,
+                                  cfg: ast::CrateConfig,
+                                  attrs: ~[ast::Attribute],
+                                  sess: @ParseSess)
+                                  -> @ast::Stmt {
     let mut p = new_parser_from_source_str(
         sess,
         cfg,
@@ -163,12 +157,11 @@ pub fn parse_stmt_from_source_str(
     maybe_aborted(p.parse_stmt(attrs),p)
 }
 
-pub fn parse_tts_from_source_str(
-    name: @str,
-    source: @str,
-    cfg: ast::CrateConfig,
-    sess: @ParseSess
-) -> ~[ast::TokenTree] {
+pub fn parse_tts_from_source_str(name: ~str,
+                                 source: ~str,
+                                 cfg: ast::CrateConfig,
+                                 sess: @ParseSess)
+                                 -> ~[ast::TokenTree] {
     let mut p = new_parser_from_source_str(
         sess,
         cfg,
@@ -183,9 +176,9 @@ pub fn parse_tts_from_source_str(
 // Create a new parser from a source string
 pub fn new_parser_from_source_str(sess: @ParseSess,
                                   cfg: ast::CrateConfig,
-                                  name: @str,
-                                  source: @str)
-                               -> Parser {
+                                  name: ~str,
+                                  source: ~str)
+                                  -> Parser {
     filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
 }
 
@@ -248,20 +241,17 @@ pub fn file_to_filemap(sess: @ParseSess, path: &Path, spanopt: Option<Span>)
     };
     match str::from_utf8_owned(bytes) {
         Some(s) => {
-            return string_to_filemap(sess, s.to_managed(),
-                                     path.as_str().unwrap().to_managed());
-        }
-        None => {
-            err(format!("{} is not UTF-8 encoded", path.display()))
+            return string_to_filemap(sess, s, path.as_str().unwrap().to_str())
         }
+        None => err(format!("{} is not UTF-8 encoded", path.display())),
     }
     unreachable!()
 }
 
 // given a session and a string, add the string to
 // the session's codemap and return the new filemap
-pub fn string_to_filemap(sess: @ParseSess, source: @str, path: @str)
-    -> @FileMap {
+pub fn string_to_filemap(sess: @ParseSess, source: ~str, path: ~str)
+                         -> @FileMap {
     sess.cm.new_filemap(path, source)
 }
 
@@ -324,7 +314,7 @@ mod test {
     }
 
     #[test] fn path_exprs_1() {
-        assert_eq!(string_to_expr(@"a"),
+        assert_eq!(string_to_expr(~"a"),
                    @ast::Expr{
                     id: ast::DUMMY_NODE_ID,
                     node: ast::ExprPath(ast::Path {
@@ -343,7 +333,7 @@ mod test {
     }
 
     #[test] fn path_exprs_2 () {
-        assert_eq!(string_to_expr(@"::a::b"),
+        assert_eq!(string_to_expr(~"::a::b"),
                    @ast::Expr {
                     id: ast::DUMMY_NODE_ID,
                     node: ast::ExprPath(ast::Path {
@@ -368,12 +358,12 @@ mod test {
 
     #[should_fail]
     #[test] fn bad_path_expr_1() {
-        string_to_expr(@"::abc::def::return");
+        string_to_expr(~"::abc::def::return");
     }
 
     // check the token-tree-ization of macros
     #[test] fn string_to_tts_macro () {
-        let tts = string_to_tts(@"macro_rules! zip (($a)=>($a))");
+        let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))");
         match tts {
             [ast::TTTok(_,_),
              ast::TTTok(_,token::NOT),
@@ -417,7 +407,7 @@ mod test {
     }
 
     #[test] fn string_to_tts_1 () {
-        let tts = string_to_tts(@"fn a (b : int) { b; }");
+        let tts = string_to_tts(~"fn a (b : int) { b; }");
         assert_eq!(to_json_str(&tts),
         ~"[\
     {\
@@ -546,7 +536,7 @@ mod test {
     }
 
     #[test] fn ret_expr() {
-        assert_eq!(string_to_expr(@"return d"),
+        assert_eq!(string_to_expr(~"return d"),
                    @ast::Expr{
                     id: ast::DUMMY_NODE_ID,
                     node:ast::ExprRet(Some(@ast::Expr{
@@ -569,7 +559,7 @@ mod test {
     }
 
     #[test] fn parse_stmt_1 () {
-        assert_eq!(string_to_stmt(@"b;"),
+        assert_eq!(string_to_stmt(~"b;"),
                    @Spanned{
                        node: ast::StmtExpr(@ast::Expr {
                            id: ast::DUMMY_NODE_ID,
@@ -595,7 +585,7 @@ mod test {
     }
 
     #[test] fn parse_ident_pat () {
-        let mut parser = string_to_parser(@"b");
+        let mut parser = string_to_parser(~"b");
         assert_eq!(parser.parse_pat(),
                    @ast::Pat{id: ast::DUMMY_NODE_ID,
                              node: ast::PatIdent(
@@ -619,7 +609,7 @@ mod test {
     // check the contents of the tt manually:
     #[test] fn parse_fundecl () {
         // this test depends on the intern order of "fn" and "int"
-        assert_eq!(string_to_item(@"fn a (b : int) { b; }"),
+        assert_eq!(string_to_item(~"fn a (b : int) { b; }"),
                   Some(
                       @ast::Item{ident:str_to_ident("a"),
                             attrs:~[],
@@ -711,12 +701,12 @@ mod test {
 
     #[test] fn parse_exprs () {
         // just make sure that they parse....
-        string_to_expr(@"3 + 4");
-        string_to_expr(@"a::z.froob(b,@(987+3))");
+        string_to_expr(~"3 + 4");
+        string_to_expr(~"a::z.froob(b,@(987+3))");
     }
 
     #[test] fn attrs_fix_bug () {
-        string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
+        string_to_item(~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
                    -> Result<@Writer, ~str> {
     #[cfg(windows)]
     fn wb() -> c_int {
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index c4887d55e2a..b85d89cf804 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -22,7 +22,6 @@ use codemap::{Span, respan};
 use parse::parser::Parser;
 use parse::token;
 
-use std::str;
 use std::to_bytes;
 
 /// The specific types of unsupported syntax
@@ -45,6 +44,8 @@ pub enum ObsoleteSyntax {
     ObsoleteMultipleImport,
     ObsoleteExternModAttributesInParens,
     ObsoleteManagedPattern,
+    ObsoleteManagedString,
+    ObsoleteManagedVec,
 }
 
 impl to_bytes::IterBytes for ObsoleteSyntax {
@@ -150,6 +151,14 @@ impl ParserObsoleteMethods for Parser {
                 "use a nested `match` expression instead of a managed box \
                  pattern"
             ),
+            ObsoleteManagedString => (
+                "managed string",
+                "use `Rc<~str>` instead of a managed string"
+            ),
+            ObsoleteManagedVec => (
+                "managed vector",
+                "use `Rc<~[T]>` instead of a managed vector"
+            ),
         };
 
         self.report(sp, kind, kind_str, desc);
@@ -178,7 +187,8 @@ impl ParserObsoleteMethods for Parser {
     fn is_obsolete_ident(&mut self, ident: &str) -> bool {
         match self.token {
             token::IDENT(sid, _) => {
-                str::eq_slice(self.id_to_str(sid), ident)
+                let interned_string = token::get_ident(sid.name);
+                interned_string.equiv(&ident)
             }
             _ => false
         }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 642624adfb2..dd7cc3a2314 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -29,7 +29,7 @@ use ast::{ExprField, ExprFnBlock, ExprIf, ExprIndex};
 use ast::{ExprLit, ExprLogLevel, ExprLoop, ExprMac};
 use ast::{ExprMethodCall, ExprParen, ExprPath, ExprProc};
 use ast::{ExprRepeat, ExprRet, ExprStruct, ExprTup, ExprUnary};
-use ast::{ExprVec, ExprVstore, ExprVstoreSlice, ExprVstoreBox};
+use ast::{ExprVec, ExprVstore, ExprVstoreSlice};
 use ast::{ExprVstoreMutSlice, ExprWhile, ExprForLoop, ExternFn, Field, FnDecl};
 use ast::{ExprVstoreUniq, Onceness, Once, Many};
 use ast::{ForeignItem, ForeignItemStatic, ForeignItemFn, ForeignMod};
@@ -71,10 +71,9 @@ use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed};
 use parse::lexer::Reader;
 use parse::lexer::TokenAndSpan;
 use parse::obsolete::*;
-use parse::token::{can_begin_expr, get_ident_interner, ident_to_str, is_ident};
-use parse::token::{is_ident_or_path};
-use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents};
-use parse::token::{token_to_binop};
+use parse::token::{INTERPOLATED, InternedString, can_begin_expr, get_ident};
+use parse::token::{get_ident_interner, is_ident, is_ident_or_path};
+use parse::token::{is_plain_ident, keywords, special_idents, token_to_binop};
 use parse::token;
 use parse::{new_sub_parser_from_file, ParseSess};
 use opt_vec;
@@ -345,7 +344,7 @@ pub struct Parser {
     /// extra detail when the same error is seen twice
     obsolete_set: HashSet<ObsoleteSyntax>,
     /// Used to determine the path to externally loaded source files
-    mod_path_stack: ~[@str],
+    mod_path_stack: ~[InternedString],
     /// Stack of spans of open delimiters. Used for error message.
     open_braces: ~[Span],
     /* do not copy the parser; its state is tied to outside state */
@@ -531,10 +530,11 @@ impl Parser {
     // otherwise, eat it.
     pub fn expect_keyword(&mut self, kw: keywords::Keyword) {
         if !self.eat_keyword(kw) {
-            let id_str = self.id_to_str(kw.to_ident()).to_str();
+            let id_ident = kw.to_ident();
+            let id_interned_str = token::get_ident(id_ident.name);
             let token_str = self.this_token_to_str();
             self.fatal(format!("expected `{}`, found `{}`",
-                               id_str,
+                               id_interned_str.get(),
                                token_str))
         }
     }
@@ -802,8 +802,8 @@ impl Parser {
         self.sess.span_diagnostic.handler().abort_if_errors();
     }
 
-    pub fn id_to_str(&mut self, id: Ident) -> @str {
-        get_ident_interner().get(id.name)
+    pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
+        get_ident(id.name)
     }
 
     // Is the current token one of the keywords that signals a bare function
@@ -1291,7 +1291,7 @@ impl Parser {
         }
 
         // other things are parsed as @/~ + a type.  Note that constructs like
-        // @[] and @str will be resolved during typeck to slices and so forth,
+        // ~[] and ~str will be resolved during typeck to slices and so forth,
         // rather than boxed ptrs.  But the special casing of str/vec is not
         // reflected in the AST type.
         if sigil == OwnedSigil {
@@ -1401,11 +1401,18 @@ impl Parser {
             token::LIT_INT(i, it) => LitInt(i, it),
             token::LIT_UINT(u, ut) => LitUint(u, ut),
             token::LIT_INT_UNSUFFIXED(i) => LitIntUnsuffixed(i),
-            token::LIT_FLOAT(s, ft) => LitFloat(self.id_to_str(s), ft),
-            token::LIT_FLOAT_UNSUFFIXED(s) =>
-                LitFloatUnsuffixed(self.id_to_str(s)),
-            token::LIT_STR(s) => LitStr(self.id_to_str(s), ast::CookedStr),
-            token::LIT_STR_RAW(s, n) => LitStr(self.id_to_str(s), ast::RawStr(n)),
+            token::LIT_FLOAT(s, ft) => {
+                LitFloat(self.id_to_interned_str(s), ft)
+            }
+            token::LIT_FLOAT_UNSUFFIXED(s) => {
+                LitFloatUnsuffixed(self.id_to_interned_str(s))
+            }
+            token::LIT_STR(s) => {
+                LitStr(self.id_to_interned_str(s), ast::CookedStr)
+            }
+            token::LIT_STR_RAW(s, n) => {
+                LitStr(self.id_to_interned_str(s), ast::RawStr(n))
+            }
             token::LPAREN => { self.expect(&token::RPAREN); LitNil },
             _ => { self.unexpected_last(tok); }
         }
@@ -2284,11 +2291,19 @@ impl Parser {
             self.bump();
             let e = self.parse_prefix_expr();
             hi = e.span.hi;
-            // HACK: turn @[...] into a @-vec
+            // HACK: pretending @[] is a (removed) @-vec
             ex = match e.node {
               ExprVec(..) |
-              ExprRepeat(..) => ExprVstore(e, ExprVstoreBox),
-              ExprLit(lit) if lit_is_str(lit) => ExprVstore(e, ExprVstoreBox),
+              ExprRepeat(..) => {
+                  self.obsolete(e.span, ObsoleteManagedVec);
+                  // the above error means that no-one will know we're
+                  // lying... hopefully.
+                  ExprVstore(e, ExprVstoreUniq)
+              }
+              ExprLit(lit) if lit_is_str(lit) => {
+                  self.obsolete(self.last_span, ObsoleteManagedString);
+                  ExprVstore(e, ExprVstoreUniq)
+              }
               _ => self.mk_unary(UnBox, e)
             };
           }
@@ -2806,34 +2821,11 @@ impl Parser {
           token::AT => {
             self.bump();
             let sub = self.parse_pat();
-            hi = sub.span.hi;
-            // HACK: parse @"..." as a literal of a vstore @str
-            pat = match sub.node {
-              PatLit(e) => {
-                  match e.node {
-                      ExprLit(lit) if lit_is_str(lit) => {
-                        let vst = @Expr {
-                            id: ast::DUMMY_NODE_ID,
-                            node: ExprVstore(e, ExprVstoreBox),
-                            span: mk_sp(lo, hi),
-                        };
-                        PatLit(vst)
-                      }
-                      _ => {
-                        self.obsolete(self.span, ObsoleteManagedPattern);
-                        PatUniq(sub)
-                      }
-                  }
-              }
-              _ => {
-                self.obsolete(self.span, ObsoleteManagedPattern);
-                PatUniq(sub)
-              }
-            };
-            hi = self.last_span.hi;
+            self.obsolete(self.span, ObsoleteManagedPattern);
+            let hi = self.last_span.hi;
             return @ast::Pat {
                 id: ast::DUMMY_NODE_ID,
-                node: pat,
+                node: PatUniq(sub),
                 span: mk_sp(lo, hi)
             }
           }
@@ -3429,7 +3421,9 @@ impl Parser {
         loop {
             match self.token {
                 token::LIFETIME(lifetime) => {
-                    if "static" == self.id_to_str(lifetime) {
+                    let lifetime_interned_string =
+                        token::get_ident(lifetime.name);
+                    if lifetime_interned_string.equiv(&("static")) {
                         result.push(RegionTyParamBound);
                     } else {
                         self.span_err(self.span,
@@ -3970,8 +3964,9 @@ impl Parser {
                 fields.push(self.parse_struct_decl_field());
             }
             if fields.len() == 0 {
+                let string = get_ident_interner().get(class_name.name);
                 self.fatal(format!("Unit-like struct definition should be written as `struct {};`",
-                                get_ident_interner().get(class_name.name)));
+                                   string.as_slice()));
             }
             self.bump();
         } else if self.token == token::LPAREN {
@@ -4142,11 +4137,11 @@ impl Parser {
     }
 
     fn push_mod_path(&mut self, id: Ident, attrs: &[Attribute]) {
-        let default_path = token::interner_get(id.name);
+        let default_path = self.id_to_interned_str(id);
         let file_path = match ::attr::first_attr_value_str_by_name(attrs,
                                                                    "path") {
             Some(d) => d,
-            None => default_path
+            None => default_path,
         };
         self.mod_path_stack.push(file_path)
     }
@@ -4169,7 +4164,8 @@ impl Parser {
                 outer_attrs, "path") {
             Some(d) => dir_path.join(d),
             None => {
-                let mod_name = token::interner_get(id.name).to_owned();
+                let mod_string = token::get_ident(id.name);
+                let mod_name = mod_string.get().to_owned();
                 let default_path_str = mod_name + ".rs";
                 let secondary_path_str = mod_name + "/mod.rs";
                 let default_path = dir_path.join(default_path_str.as_slice());
@@ -4524,7 +4520,8 @@ impl Parser {
             token::LIT_STR(s)
             | token::LIT_STR_RAW(s, _) => {
                 self.bump();
-                let the_string = ident_to_str(&s);
+                let identifier_string = token::get_ident(s.name);
+                let the_string = identifier_string.get();
                 let mut abis = AbiSet::empty();
                 for word in the_string.words() {
                     match abi::lookup(word) {
@@ -4860,7 +4857,6 @@ impl Parser {
 
         let first_ident = self.parse_ident();
         let mut path = ~[first_ident];
-        debug!("parsed view path: {}", self.id_to_str(first_ident));
         match self.token {
           token::EQ => {
             // x = foo::bar
@@ -5119,17 +5115,20 @@ impl Parser {
         }
     }
 
-    pub fn parse_optional_str(&mut self) -> Option<(@str, ast::StrStyle)> {
+    pub fn parse_optional_str(&mut self)
+                              -> Option<(InternedString, ast::StrStyle)> {
         let (s, style) = match self.token {
-            token::LIT_STR(s) => (s, ast::CookedStr),
-            token::LIT_STR_RAW(s, n) => (s, ast::RawStr(n)),
+            token::LIT_STR(s) => (self.id_to_interned_str(s), ast::CookedStr),
+            token::LIT_STR_RAW(s, n) => {
+                (self.id_to_interned_str(s), ast::RawStr(n))
+            }
             _ => return None
         };
         self.bump();
-        Some((ident_to_str(&s), style))
+        Some((s, style))
     }
 
-    pub fn parse_str(&mut self) -> (@str, StrStyle) {
+    pub fn parse_str(&mut self) -> (InternedString, StrStyle) {
         match self.parse_optional_str() {
             Some(s) => { s }
             _ =>  self.fatal("expected string literal")
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 68e2f44ebb1..d6edccd33a4 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -12,12 +12,15 @@ use ast;
 use ast::{P, Name, Mrk};
 use ast_util;
 use parse::token;
-use util::interner::StrInterner;
+use util::interner::{RcStr, StrInterner};
 use util::interner;
 
+use extra::serialize::{Decodable, Decoder, Encodable, Encoder};
 use std::cast;
 use std::char;
+use std::fmt;
 use std::local_data;
+use std::path::BytesContainer;
 
 #[allow(non_camel_case_types)]
 #[deriving(Clone, Encodable, Decodable, Eq, IterBytes)]
@@ -185,32 +188,44 @@ pub fn to_str(input: @IdentInterner, t: &Token) -> ~str {
       }
       LIT_INT_UNSUFFIXED(i) => { i.to_str() }
       LIT_FLOAT(ref s, t) => {
-        let mut body = ident_to_str(s).to_owned();
+        let body_string = get_ident(s.name);
+        let mut body = body_string.get().to_str();
         if body.ends_with(".") {
             body.push_char('0');  // `10.f` is not a float literal
         }
         body + ast_util::float_ty_to_str(t)
       }
       LIT_FLOAT_UNSUFFIXED(ref s) => {
-        let mut body = ident_to_str(s).to_owned();
+        let body_string = get_ident(s.name);
+        let mut body = body_string.get().to_owned();
         if body.ends_with(".") {
             body.push_char('0');  // `10.f` is not a float literal
         }
         body
       }
-      LIT_STR(ref s) => { format!("\"{}\"", ident_to_str(s).escape_default()) }
+      LIT_STR(ref s) => {
+          let literal_string = get_ident(s.name);
+          format!("\"{}\"", literal_string.get().escape_default())
+      }
       LIT_STR_RAW(ref s, n) => {
+          let literal_string = get_ident(s.name);
           format!("r{delim}\"{string}\"{delim}",
-                  delim="#".repeat(n), string=ident_to_str(s))
+                  delim="#".repeat(n), string=literal_string.get())
       }
 
       /* Name components */
-      IDENT(s, _) => input.get(s.name).to_owned(),
-      LIFETIME(s) => format!("'{}", input.get(s.name)),
+      IDENT(s, _) => input.get(s.name).into_owned(),
+      LIFETIME(s) => {
+          let name = input.get(s.name);
+          format!("'{}", name.as_slice())
+      }
       UNDERSCORE => ~"_",
 
       /* Other */
-      DOC_COMMENT(ref s) => ident_to_str(s).to_owned(),
+      DOC_COMMENT(ref s) => {
+          let comment_string = get_ident(s.name);
+          comment_string.get().to_str()
+      }
       EOF => ~"<eof>",
       INTERPOLATED(ref nt) => {
         match nt {
@@ -525,6 +540,93 @@ pub fn get_ident_interner() -> @IdentInterner {
     }
 }
 
+/// Represents a string stored in the task-local interner. Because the
+/// interner lives for the life of the task, this can be safely treated as an
+/// immortal string, as long as it never crosses between tasks.
+///
+/// FIXME(pcwalton): You must be careful about what you do in the destructors
+/// of objects stored in TLS, because they may run after the interner is
+/// destroyed. In particular, they must not access string contents. This can
+/// be fixed in the future by just leaking all strings until task death
+/// somehow.
+#[deriving(Clone, Eq, IterBytes, Ord, TotalEq, TotalOrd)]
+pub struct InternedString {
+    priv string: RcStr,
+}
+
+impl InternedString {
+    #[inline]
+    pub fn new(string: &'static str) -> InternedString {
+        InternedString {
+            string: RcStr::new(string),
+        }
+    }
+
+    #[inline]
+    fn new_from_rc_str(string: RcStr) -> InternedString {
+        InternedString {
+            string: string,
+        }
+    }
+
+    #[inline]
+    pub fn get<'a>(&'a self) -> &'a str {
+        self.string.as_slice()
+    }
+}
+
+impl BytesContainer for InternedString {
+    fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
+        // FIXME(pcwalton): This is a workaround for the incorrect signature
+        // of `BytesContainer`, which is itself a workaround for the lack of
+        // DST.
+        unsafe {
+            let this = self.get();
+            cast::transmute(this.container_as_bytes())
+        }
+    }
+}
+
+impl fmt::Default for InternedString {
+    fn fmt(obj: &InternedString, f: &mut fmt::Formatter) {
+        write!(f.buf, "{}", obj.string.as_slice());
+    }
+}
+
+impl<'a> Equiv<&'a str> for InternedString {
+    fn equiv(&self, other: & &'a str) -> bool {
+        (*other) == self.string.as_slice()
+    }
+}
+
+impl<D:Decoder> Decodable<D> for InternedString {
+    fn decode(d: &mut D) -> InternedString {
+        let interner = get_ident_interner();
+        get_ident(interner.intern(d.read_str()))
+    }
+}
+
+impl<E:Encoder> Encodable<E> for InternedString {
+    fn encode(&self, e: &mut E) {
+        e.emit_str(self.string.as_slice())
+    }
+}
+
+/// Returns the string contents of an identifier, using the task-local
+/// interner.
+#[inline]
+pub fn get_ident(idx: Name) -> InternedString {
+    let interner = get_ident_interner();
+    InternedString::new_from_rc_str(interner.get(idx))
+}
+
+/// Interns and returns the string contents of an identifier, using the
+/// task-local interner.
+#[inline]
+pub fn intern_and_get_ident(s: &str) -> InternedString {
+    get_ident(intern(s))
+}
+
 /* for when we don't care about the contents; doesn't interact with TLD or
    serialization */
 pub fn mk_fake_ident_interner() -> @IdentInterner {
@@ -532,6 +634,7 @@ pub fn mk_fake_ident_interner() -> @IdentInterner {
 }
 
 // maps a string to its interned representation
+#[inline]
 pub fn intern(str : &str) -> Name {
     let interner = get_ident_interner();
     interner.intern(str)
@@ -543,16 +646,6 @@ pub fn gensym(str : &str) -> Name {
     interner.gensym(str)
 }
 
-// map an interned representation back to a string
-pub fn interner_get(name : Name) -> @str {
-    get_ident_interner().get(name)
-}
-
-// maps an identifier to the string that it corresponds to
-pub fn ident_to_str(id : &ast::Ident) -> @str {
-    interner_get(id.name)
-}
-
 // maps a string to an identifier with an empty syntax context
 pub fn str_to_ident(str : &str) -> ast::Ident {
     ast::Ident::new(intern(str))
@@ -576,28 +669,6 @@ pub fn fresh_name(src : &ast::Ident) -> Name {
     gensym(format!("{}_{}",ident_to_str(src),num))*/
 }
 
-// it looks like there oughta be a str_ptr_eq fn, but no one bothered to implement it?
-
-// determine whether two @str values are pointer-equal
-pub fn str_ptr_eq(a : @str, b : @str) -> bool {
-    unsafe {
-        let p : uint = cast::transmute(a);
-        let q : uint = cast::transmute(b);
-        let result = p == q;
-        // got to transmute them back, to make sure the ref count is correct:
-        let _junk1 : @str = cast::transmute(p);
-        let _junk2 : @str = cast::transmute(q);
-        result
-    }
-}
-
-// return true when two identifiers refer (through the intern table) to the same ptr_eq
-// string. This is used to compare identifiers in places where hygienic comparison is
-// not wanted (i.e. not lexical vars).
-pub fn ident_spelling_eq(a : &ast::Ident, b : &ast::Ident) -> bool {
-    str_ptr_eq(interner_get(a.name),interner_get(b.name))
-}
-
 // create a fresh mark.
 pub fn fresh_mark() -> Mrk {
     gensym("mark")
@@ -669,23 +740,4 @@ mod test {
         let a1 = mark_ident(a,92);
         assert!(mtwt_token_eq(&IDENT(a,true),&IDENT(a1,false)));
     }
-
-
-    #[test] fn str_ptr_eq_tests(){
-        let a = @"abc";
-        let b = @"abc";
-        let c = a;
-        assert!(str_ptr_eq(a,c));
-        assert!(!str_ptr_eq(a,b));
-    }
-
-    #[test] fn fresh_name_pointer_sharing() {
-        let ghi = str_to_ident("ghi");
-        assert_eq!(ident_to_str(&ghi),@"ghi");
-        assert!(str_ptr_eq(ident_to_str(&ghi),ident_to_str(&ghi)))
-        let fresh = ast::Ident::new(fresh_name(&ghi));
-        assert_eq!(ident_to_str(&fresh),@"ghi");
-        assert!(str_ptr_eq(ident_to_str(&ghi),ident_to_str(&fresh)));
-    }
-
 }
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 902d9e1c284..3e1f5b4cfb3 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -84,7 +84,7 @@ pub struct BeginToken {
 
 #[deriving(Clone)]
 pub enum Token {
-    String(@str, int),
+    String(~str, int),
     Break(BreakToken),
     Begin(BeginToken),
     End,
@@ -131,7 +131,7 @@ pub fn buf_str(toks: ~[Token], szs: ~[int], left: uint, right: uint,
         if i != left {
             s.push_str(", ");
         }
-        s.push_str(format!("{}={}", szs[i], tok_str(toks[i])));
+        s.push_str(format!("{}={}", szs[i], tok_str(toks[i].clone())));
         i += 1u;
         i %= n;
     }
@@ -285,7 +285,9 @@ pub struct Printer {
 }
 
 impl Printer {
-    pub fn last_token(&mut self) -> Token { self.token[self.right] }
+    pub fn last_token(&mut self) -> Token {
+        self.token[self.right].clone()
+    }
     // be very careful with this!
     pub fn replace_last_token(&mut self, t: Token) {
         self.token[self.right] = t;
@@ -296,8 +298,8 @@ impl Printer {
           Eof => {
             if !self.scan_stack_empty {
                 self.check_stack(0);
-                self.advance_left(self.token[self.left],
-                                  self.size[self.left]);
+                let left = self.token[self.left].clone();
+                self.advance_left(left, self.size[self.left]);
             }
             self.indent(0);
           }
@@ -341,16 +343,16 @@ impl Printer {
             self.size[self.right] = -self.right_total;
             self.right_total += b.blank_space;
           }
-          String(s, len) => {
+          String(ref s, len) => {
             if self.scan_stack_empty {
                 debug!("pp String('{}')/print ~[{},{}]",
-                       s, self.left, self.right);
-                self.print(t, len);
+                       *s, self.left, self.right);
+                self.print(t.clone(), len);
             } else {
                 debug!("pp String('{}')/buffer ~[{},{}]",
-                       s, self.left, self.right);
+                       *s, self.left, self.right);
                 self.advance_right();
-                self.token[self.right] = t;
+                self.token[self.right] = t.clone();
                 self.size[self.right] = len;
                 self.right_total += len;
                 self.check_stream();
@@ -370,7 +372,8 @@ impl Printer {
                     self.size[self.scan_pop_bottom()] = SIZE_INFINITY;
                 }
             }
-            self.advance_left(self.token[self.left], self.size[self.left]);
+            let left = self.token[self.left].clone();
+            self.advance_left(left, self.size[self.left]);
             if self.left != self.right { self.check_stream(); }
         }
     }
@@ -414,7 +417,7 @@ impl Printer {
         debug!("advnce_left ~[{},{}], sizeof({})={}", self.left, self.right,
                self.left, L);
         if L >= 0 {
-            self.print(x, L);
+            self.print(x.clone(), L);
             match x {
               Break(b) => self.left_total += b.blank_space,
               String(_, len) => {
@@ -425,8 +428,8 @@ impl Printer {
             if self.left != self.right {
                 self.left += 1u;
                 self.left %= self.buf_len;
-                self.advance_left(self.token[self.left],
-                                  self.size[self.left]);
+                let left = self.token[self.left].clone();
+                self.advance_left(left, self.size[self.left]);
             }
         }
     }
@@ -483,7 +486,7 @@ impl Printer {
         write!(self.out, "{}", s);
     }
     pub fn print(&mut self, x: Token, L: int) {
-        debug!("print {} {} (remaining line space={})", tok_str(x), L,
+        debug!("print {} {} (remaining line space={})", tok_str(x.clone()), L,
                self.space);
         debug!("{}", buf_str(self.token.clone(),
                              self.size.clone(),
@@ -583,15 +586,15 @@ pub fn end(p: &mut Printer) { p.pretty_print(End); }
 pub fn eof(p: &mut Printer) { p.pretty_print(Eof); }
 
 pub fn word(p: &mut Printer, wrd: &str) {
-    p.pretty_print(String(/* bad */ wrd.to_managed(), wrd.len() as int));
+    p.pretty_print(String(/* bad */ wrd.to_str(), wrd.len() as int));
 }
 
 pub fn huge_word(p: &mut Printer, wrd: &str) {
-    p.pretty_print(String(/* bad */ wrd.to_managed(), SIZE_INFINITY));
+    p.pretty_print(String(/* bad */ wrd.to_str(), SIZE_INFINITY));
 }
 
 pub fn zero_word(p: &mut Printer, wrd: &str) {
-    p.pretty_print(String(/* bad */ wrd.to_managed(), 0));
+    p.pretty_print(String(/* bad */ wrd.to_str(), 0));
 }
 
 pub fn spaces(p: &mut Printer, n: uint) { break_offset(p, n, 0); }
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 2783284ea8b..037c69eb918 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -19,7 +19,7 @@ use codemap::{CodeMap, BytePos};
 use codemap;
 use diagnostic;
 use parse::classify::expr_is_simple_block;
-use parse::token::{IdentInterner, ident_to_str, interner_get};
+use parse::token::IdentInterner;
 use parse::{comments, token};
 use parse;
 use print::pp::{break_offset, word, space, zerobreak, hardbreak};
@@ -117,7 +117,7 @@ pub fn print_crate(cm: @CodeMap,
                    intr: @IdentInterner,
                    span_diagnostic: @diagnostic::SpanHandler,
                    crate: &ast::Crate,
-                   filename: @str,
+                   filename: ~str,
                    input: &mut io::Reader,
                    out: ~io::Writer,
                    ann: @PpAnn,
@@ -897,7 +897,7 @@ pub fn print_attribute(s: &mut State, attr: &ast::Attribute) {
     maybe_print_comment(s, attr.span.lo);
     if attr.node.is_sugared_doc {
         let comment = attr.value_str().unwrap();
-        word(&mut s.s, comment);
+        word(&mut s.s, comment.get());
     } else {
         word(&mut s.s, "#[");
         print_meta_item(s, attr.meta());
@@ -1058,23 +1058,9 @@ pub fn print_mac(s: &mut State, m: &ast::Mac) {
     }
 }
 
-pub fn print_vstore(s: &mut State, t: ast::Vstore) {
-    match t {
-        ast::VstoreFixed(Some(i)) => word(&mut s.s, format!("{}", i)),
-        ast::VstoreFixed(None) => word(&mut s.s, "_"),
-        ast::VstoreUniq => word(&mut s.s, "~"),
-        ast::VstoreBox => word(&mut s.s, "@"),
-        ast::VstoreSlice(ref r) => {
-            word(&mut s.s, "&");
-            print_opt_lifetime(s, r);
-        }
-    }
-}
-
 pub fn print_expr_vstore(s: &mut State, t: ast::ExprVstore) {
     match t {
       ast::ExprVstoreUniq => word(&mut s.s, "~"),
-      ast::ExprVstoreBox => word(&mut s.s, "@"),
       ast::ExprVstoreSlice => word(&mut s.s, "&"),
       ast::ExprVstoreMutSlice => {
         word(&mut s.s, "&");
@@ -1466,25 +1452,25 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) {
             word(&mut s.s, "asm!");
         }
         popen(s);
-        print_string(s, a.asm, a.asm_str_style);
+        print_string(s, a.asm.get(), a.asm_str_style);
         word_space(s, ":");
-        for &(co, o) in a.outputs.iter() {
-            print_string(s, co, ast::CookedStr);
+        for &(ref co, o) in a.outputs.iter() {
+            print_string(s, co.get(), ast::CookedStr);
             popen(s);
             print_expr(s, o);
             pclose(s);
             word_space(s, ",");
         }
         word_space(s, ":");
-        for &(co, o) in a.inputs.iter() {
-            print_string(s, co, ast::CookedStr);
+        for &(ref co, o) in a.inputs.iter() {
+            print_string(s, co.get(), ast::CookedStr);
             popen(s);
             print_expr(s, o);
             pclose(s);
             word_space(s, ",");
         }
         word_space(s, ":");
-        print_string(s, a.clobbers, ast::CookedStr);
+        print_string(s, a.clobbers.get(), ast::CookedStr);
         pclose(s);
       }
       ast::ExprMac(ref m) => print_mac(s, m),
@@ -1539,11 +1525,13 @@ pub fn print_decl(s: &mut State, decl: &ast::Decl) {
 }
 
 pub fn print_ident(s: &mut State, ident: ast::Ident) {
-    word(&mut s.s, ident_to_str(&ident));
+    let string = token::get_ident(ident.name);
+    word(&mut s.s, string.get());
 }
 
 pub fn print_name(s: &mut State, name: ast::Name) {
-    word(&mut s.s, interner_get(name));
+    let string = token::get_ident(name);
+    word(&mut s.s, string.get());
 }
 
 pub fn print_for_decl(s: &mut State, loc: &ast::Local, coll: &ast::Expr) {
@@ -1930,14 +1918,14 @@ pub fn print_generics(s: &mut State, generics: &ast::Generics) {
 pub fn print_meta_item(s: &mut State, item: &ast::MetaItem) {
     ibox(s, indent_unit);
     match item.node {
-      ast::MetaWord(name) => word(&mut s.s, name),
-      ast::MetaNameValue(name, value) => {
-        word_space(s, name);
+      ast::MetaWord(ref name) => word(&mut s.s, name.get()),
+      ast::MetaNameValue(ref name, ref value) => {
+        word_space(s, name.get());
         word_space(s, "=");
-        print_literal(s, &value);
+        print_literal(s, value);
       }
-      ast::MetaList(name, ref items) => {
-        word(&mut s.s, name);
+      ast::MetaList(ref name, ref items) => {
+        word(&mut s.s, name.get());
         popen(s);
         commasep(s,
                  Consistent,
@@ -1998,7 +1986,7 @@ pub fn print_view_item(s: &mut State, item: &ast::ViewItem) {
                 space(&mut s.s);
                 word(&mut s.s, "=");
                 space(&mut s.s);
-                print_string(s, *p, style);
+                print_string(s, p.get(), style);
             }
         }
 
@@ -2172,7 +2160,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) {
       _ => ()
     }
     match lit.node {
-      ast::LitStr(st, style) => print_string(s, st, style),
+      ast::LitStr(ref st, style) => print_string(s, st.get(), style),
       ast::LitChar(ch) => {
           let mut res = ~"'";
           char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c));
@@ -2202,18 +2190,18 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) {
             word(&mut s.s, (i as u64).to_str_radix(10u));
         }
       }
-      ast::LitFloat(f, t) => {
-        word(&mut s.s, f.to_owned() + ast_util::float_ty_to_str(t));
+      ast::LitFloat(ref f, t) => {
+        word(&mut s.s, f.get() + ast_util::float_ty_to_str(t));
       }
-      ast::LitFloatUnsuffixed(f) => word(&mut s.s, f),
+      ast::LitFloatUnsuffixed(ref f) => word(&mut s.s, f.get()),
       ast::LitNil => word(&mut s.s, "()"),
       ast::LitBool(val) => {
         if val { word(&mut s.s, "true"); } else { word(&mut s.s, "false"); }
       }
-      ast::LitBinary(arr) => {
+      ast::LitBinary(ref arr) => {
         ibox(s, indent_unit);
         word(&mut s.s, "[");
-        commasep_cmnt(s, Inconsistent, arr, |s, u| word(&mut s.s, format!("{}", *u)),
+        commasep_cmnt(s, Inconsistent, *arr.borrow(), |s, u| word(&mut s.s, format!("{}", *u)),
                       |_| lit.span);
         word(&mut s.s, "]");
         end(s);
diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs
index fdc54f1f140..fc3e55dcde2 100644
--- a/src/libsyntax/util/interner.rs
+++ b/src/libsyntax/util/interner.rs
@@ -14,9 +14,11 @@
 
 use ast::Name;
 
+use std::cast;
 use std::cell::RefCell;
 use std::cmp::Equiv;
 use std::hashmap::HashMap;
+use std::rc::Rc;
 
 pub struct Interner<T> {
     priv map: @RefCell<HashMap<T, Name>>,
@@ -82,11 +84,49 @@ impl<T:Eq + IterBytes + Hash + Freeze + Clone + 'static> Interner<T> {
     }
 }
 
+#[deriving(Clone, Eq, IterBytes, Ord)]
+pub struct RcStr {
+    priv string: Rc<~str>,
+}
+
+impl TotalEq for RcStr {
+    fn equals(&self, other: &RcStr) -> bool {
+        self.as_slice().equals(&other.as_slice())
+    }
+}
+
+impl TotalOrd for RcStr {
+    fn cmp(&self, other: &RcStr) -> Ordering {
+        self.as_slice().cmp(&other.as_slice())
+    }
+}
+
+impl Str for RcStr {
+    #[inline]
+    fn as_slice<'a>(&'a self) -> &'a str {
+        let s: &'a str = *self.string.borrow();
+        s
+    }
+
+    #[inline]
+    fn into_owned(self) -> ~str {
+        self.string.borrow().to_owned()
+    }
+}
+
+impl RcStr {
+    pub fn new(string: &str) -> RcStr {
+        RcStr {
+            string: Rc::new(string.to_owned()),
+        }
+    }
+}
+
 // A StrInterner differs from Interner<String> in that it accepts
 // references rather than @ ones, resulting in less allocation.
 pub struct StrInterner {
-    priv map: @RefCell<HashMap<@str, Name>>,
-    priv vect: @RefCell<~[@str]>,
+    priv map: @RefCell<HashMap<RcStr, Name>>,
+    priv vect: @RefCell<~[RcStr]>,
 }
 
 // when traits can extend traits, we should extend index<Name,T> to get []
@@ -112,8 +152,8 @@ impl StrInterner {
         }
 
         let new_idx = self.len() as Name;
-        let val = val.to_managed();
-        map.get().insert(val, new_idx);
+        let val = RcStr::new(val);
+        map.get().insert(val.clone(), new_idx);
         let mut vect = self.vect.borrow_mut();
         vect.get().push(val);
         new_idx
@@ -123,7 +163,7 @@ impl StrInterner {
         let new_idx = self.len() as Name;
         // leave out of .map to avoid colliding
         let mut vect = self.vect.borrow_mut();
-        vect.get().push(val.to_managed());
+        vect.get().push(RcStr::new(val));
         new_idx
     }
 
@@ -141,14 +181,24 @@ impl StrInterner {
         let new_idx = self.len() as Name;
         // leave out of map to avoid colliding
         let mut vect = self.vect.borrow_mut();
-        let existing = vect.get()[idx];
+        let existing = vect.get()[idx].clone();
         vect.get().push(existing);
         new_idx
     }
 
-    pub fn get(&self, idx: Name) -> @str {
+    pub fn get(&self, idx: Name) -> RcStr {
         let vect = self.vect.borrow();
-        vect.get()[idx]
+        vect.get()[idx].clone()
+    }
+
+    /// Returns this string with lifetime tied to the interner. Since
+    /// strings may never be removed from the interner, this is safe.
+    pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str {
+        let vect = self.vect.borrow();
+        let s: &str = vect.get()[idx].as_slice();
+        unsafe {
+            cast::transmute(s)
+        }
     }
 
     pub fn len(&self) -> uint {
@@ -156,7 +206,7 @@ impl StrInterner {
         vect.get().len()
     }
 
-    pub fn find_equiv<Q:Hash + IterBytes + Equiv<@str>>(&self, val: &Q)
+    pub fn find_equiv<Q:Hash + IterBytes + Equiv<RcStr>>(&self, val: &Q)
                                                          -> Option<Name> {
         let map = self.map.borrow();
         match map.get().find_equiv(val) {
@@ -172,42 +222,46 @@ mod tests {
     #[test]
     #[should_fail]
     fn i1 () {
-        let i : Interner<@str> = Interner::new();
+        let i : Interner<RcStr> = Interner::new();
         i.get(13);
     }
 
     #[test]
     fn interner_tests () {
-        let i : Interner<@str> = Interner::new();
+        let i : Interner<RcStr> = Interner::new();
         // first one is zero:
-        assert_eq!(i.intern(@"dog"), 0);
+        assert_eq!(i.intern(RcStr::new("dog")), 0);
         // re-use gets the same entry:
-        assert_eq!(i.intern(@"dog"), 0);
+        assert_eq!(i.intern(RcStr::new("dog")), 0);
         // different string gets a different #:
-        assert_eq!(i.intern(@"cat"), 1);
-        assert_eq!(i.intern(@"cat"), 1);
+        assert_eq!(i.intern(RcStr::new("cat")), 1);
+        assert_eq!(i.intern(RcStr::new("cat")), 1);
         // dog is still at zero
-        assert_eq!(i.intern(@"dog"), 0);
+        assert_eq!(i.intern(RcStr::new("dog")), 0);
         // gensym gets 3
-        assert_eq!(i.gensym(@"zebra" ), 2);
+        assert_eq!(i.gensym(RcStr::new("zebra") ), 2);
         // gensym of same string gets new number :
-        assert_eq!(i.gensym (@"zebra" ), 3);
+        assert_eq!(i.gensym (RcStr::new("zebra") ), 3);
         // gensym of *existing* string gets new number:
-        assert_eq!(i.gensym(@"dog"), 4);
-        assert_eq!(i.get(0), @"dog");
-        assert_eq!(i.get(1), @"cat");
-        assert_eq!(i.get(2), @"zebra");
-        assert_eq!(i.get(3), @"zebra");
-        assert_eq!(i.get(4), @"dog");
+        assert_eq!(i.gensym(RcStr::new("dog")), 4);
+        assert_eq!(i.get(0), RcStr::new("dog"));
+        assert_eq!(i.get(1), RcStr::new("cat"));
+        assert_eq!(i.get(2), RcStr::new("zebra"));
+        assert_eq!(i.get(3), RcStr::new("zebra"));
+        assert_eq!(i.get(4), RcStr::new("dog"));
     }
 
     #[test]
     fn i3 () {
-        let i : Interner<@str> = Interner::prefill([@"Alan",@"Bob",@"Carol"]);
-        assert_eq!(i.get(0), @"Alan");
-        assert_eq!(i.get(1), @"Bob");
-        assert_eq!(i.get(2), @"Carol");
-        assert_eq!(i.intern(@"Bob"), 1);
+        let i : Interner<RcStr> = Interner::prefill([
+            RcStr::new("Alan"),
+            RcStr::new("Bob"),
+            RcStr::new("Carol")
+        ]);
+        assert_eq!(i.get(0), RcStr::new("Alan"));
+        assert_eq!(i.get(1), RcStr::new("Bob"));
+        assert_eq!(i.get(2), RcStr::new("Carol"));
+        assert_eq!(i.intern(RcStr::new("Bob")), 1);
     }
 
     #[test]
@@ -230,13 +284,13 @@ mod tests {
         assert_eq!(i.gensym("dog"), 4);
         // gensym tests again with gensym_copy:
         assert_eq!(i.gensym_copy(2), 5);
-        assert_eq!(i.get(5), @"zebra");
+        assert_eq!(i.get(5), RcStr::new("zebra"));
         assert_eq!(i.gensym_copy(2), 6);
-        assert_eq!(i.get(6), @"zebra");
-        assert_eq!(i.get(0), @"dog");
-        assert_eq!(i.get(1), @"cat");
-        assert_eq!(i.get(2), @"zebra");
-        assert_eq!(i.get(3), @"zebra");
-        assert_eq!(i.get(4), @"dog");
+        assert_eq!(i.get(6), RcStr::new("zebra"));
+        assert_eq!(i.get(0), RcStr::new("dog"));
+        assert_eq!(i.get(1), RcStr::new("cat"));
+        assert_eq!(i.get(2), RcStr::new("zebra"));
+        assert_eq!(i.get(3), RcStr::new("zebra"));
+        assert_eq!(i.get(4), RcStr::new("dog"));
     }
 }
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index dd3ae168149..58c2bed7a45 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -17,29 +17,29 @@ use parse::token;
 
 // map a string to tts, using a made-up filename: return both the TokenTree's
 // and the ParseSess
-pub fn string_to_tts_and_sess (source_str : @str) -> (~[ast::TokenTree], @ParseSess) {
+pub fn string_to_tts_and_sess (source_str : ~str) -> (~[ast::TokenTree], @ParseSess) {
     let ps = new_parse_sess(None);
-    (filemap_to_tts(ps,string_to_filemap(ps,source_str,@"bogofile")),ps)
+    (filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps)
 }
 
 // map a string to tts, using a made-up filename:
-pub fn string_to_tts(source_str : @str) -> ~[ast::TokenTree] {
+pub fn string_to_tts(source_str : ~str) -> ~[ast::TokenTree] {
     let (tts,_) = string_to_tts_and_sess(source_str);
     tts
 }
 
-pub fn string_to_parser_and_sess(source_str: @str) -> (Parser,@ParseSess) {
+pub fn string_to_parser_and_sess(source_str: ~str) -> (Parser,@ParseSess) {
     let ps = new_parse_sess(None);
-    (new_parser_from_source_str(ps,~[],@"bogofile",source_str),ps)
+    (new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps)
 }
 
 // map string to parser (via tts)
-pub fn string_to_parser(source_str: @str) -> Parser {
+pub fn string_to_parser(source_str: ~str) -> Parser {
     let (p,_) = string_to_parser_and_sess(source_str);
     p
 }
 
-fn with_error_checking_parse<T>(s: @str, f: |&mut Parser| -> T) -> T {
+fn with_error_checking_parse<T>(s: ~str, f: |&mut Parser| -> T) -> T {
     let mut p = string_to_parser(s);
     let x = f(&mut p);
     p.abort_if_errors();
@@ -47,34 +47,34 @@ fn with_error_checking_parse<T>(s: @str, f: |&mut Parser| -> T) -> T {
 }
 
 // parse a string, return a crate.
-pub fn string_to_crate (source_str : @str) -> ast::Crate {
+pub fn string_to_crate (source_str : ~str) -> ast::Crate {
     with_error_checking_parse(source_str, |p| {
         p.parse_crate_mod()
     })
 }
 
 // parse a string, return a crate and the ParseSess
-pub fn string_to_crate_and_sess (source_str : @str) -> (ast::Crate,@ParseSess) {
+pub fn string_to_crate_and_sess (source_str : ~str) -> (ast::Crate,@ParseSess) {
     let (mut p,ps) = string_to_parser_and_sess(source_str);
     (p.parse_crate_mod(),ps)
 }
 
 // parse a string, return an expr
-pub fn string_to_expr (source_str : @str) -> @ast::Expr {
+pub fn string_to_expr (source_str : ~str) -> @ast::Expr {
     with_error_checking_parse(source_str, |p| {
         p.parse_expr()
     })
 }
 
 // parse a string, return an item
-pub fn string_to_item (source_str : @str) -> Option<@ast::Item> {
+pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> {
     with_error_checking_parse(source_str, |p| {
         p.parse_item(~[])
     })
 }
 
 // parse a string, return a stmt
-pub fn string_to_stmt(source_str : @str) -> @ast::Stmt {
+pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt {
     with_error_checking_parse(source_str, |p| {
         p.parse_stmt(~[])
     })
@@ -82,7 +82,7 @@ pub fn string_to_stmt(source_str : @str) -> @ast::Stmt {
 
 // parse a string, return a pat. Uses "irrefutable"... which doesn't
 // (currently) affect parsing.
-pub fn string_to_pat(source_str : @str) -> @ast::Pat {
+pub fn string_to_pat(source_str : ~str) -> @ast::Pat {
     string_to_parser(source_str).parse_pat()
 }