about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2016-11-21 08:08:47 -0600
committerGitHub <noreply@github.com>2016-11-21 08:08:47 -0600
commitebec55406ba94faf8b2cd23b27a8f74df97d1ca4 (patch)
tree446c06caed20d58ba039ba0e7739a89a136e74aa /src/libsyntax
parent59b87b3975c97820b32ba6ebee8eac2a13ab883b (diff)
parenta8e86f0f816c9666915c73e80969dbf85a5afd56 (diff)
downloadrust-ebec55406ba94faf8b2cd23b27a8f74df97d1ca4.tar.gz
rust-ebec55406ba94faf8b2cd23b27a8f74df97d1ca4.zip
Auto merge of #37824 - jseyfried:symbols, r=eddyb
Clean up `ast::Attribute`, `ast::CrateConfig`, and string interning

This PR
 - removes `ast::Attribute_` (changing `Attribute` from `Spanned<Attribute_>` to a struct),
 - moves a `MetaItem`'s name from the `MetaItemKind` variants to a field of `MetaItem`,
 - avoids needlessly wrapping `ast::MetaItem` with `P`,
 - moves string interning into `syntax::symbol` (`ast::Name` is a reexport of `symbol::Symbol` for now),
 - replaces `InternedString` with `Symbol` in the AST, HIR, and various other places, and
 - refactors `ast::CrateConfig` from a `Vec` to a `HashSet`.

r? @eddyb
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs117
-rw-r--r--src/libsyntax/attr.rs233
-rw-r--r--src/libsyntax/codemap.rs12
-rw-r--r--src/libsyntax/config.rs15
-rw-r--r--src/libsyntax/diagnostics/plugin.rs9
-rw-r--r--src/libsyntax/entry.rs2
-rw-r--r--src/libsyntax/ext/base.rs14
-rw-r--r--src/libsyntax/ext/build.rs45
-rw-r--r--src/libsyntax/ext/expand.rs15
-rw-r--r--src/libsyntax/ext/placeholders.rs4
-rw-r--r--src/libsyntax/ext/proc_macro_shim.rs3
-rw-r--r--src/libsyntax/ext/quote.rs24
-rw-r--r--src/libsyntax/ext/source_util.rs14
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs18
-rw-r--r--src/libsyntax/feature_gate.rs25
-rw-r--r--src/libsyntax/fold.rs53
-rw-r--r--src/libsyntax/lib.rs3
-rw-r--r--src/libsyntax/parse/attr.rs61
-rw-r--r--src/libsyntax/parse/lexer/mod.rs74
-rw-r--r--src/libsyntax/parse/mod.rs103
-rw-r--r--src/libsyntax/parse/parser.rs75
-rw-r--r--src/libsyntax/parse/token.rs274
-rw-r--r--src/libsyntax/print/pprust.rs57
-rw-r--r--src/libsyntax/std_inject.rs29
-rw-r--r--src/libsyntax/symbol.rs303
-rw-r--r--src/libsyntax/test.rs80
-rw-r--r--src/libsyntax/tokenstream.rs42
-rw-r--r--src/libsyntax/util/interner.rs111
-rw-r--r--src/libsyntax/util/lev_distance.rs11
-rw-r--r--src/libsyntax/util/parser.rs3
-rw-r--r--src/libsyntax/util/parser_testing.rs9
31 files changed, 821 insertions, 1017 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index f5cd089e923..bb07efdd9e7 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -14,71 +14,43 @@ pub use self::TyParamBound::*;
 pub use self::UnsafeSource::*;
 pub use self::ViewPath_::*;
 pub use self::PathParameters::*;
+pub use symbol::Symbol as Name;
 pub use util::ThinVec;
 
 use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId};
 use codemap::{respan, Spanned};
 use abi::Abi;
 use ext::hygiene::SyntaxContext;
-use parse::token::{self, keywords, InternedString};
 use print::pprust;
 use ptr::P;
+use symbol::{Symbol, keywords};
 use tokenstream::{TokenTree};
 
+use std::collections::HashSet;
 use std::fmt;
 use std::rc::Rc;
 use std::u32;
 
 use serialize::{self, Encodable, Decodable, Encoder, Decoder};
 
-/// A name is a part of an identifier, representing a string or gensym. It's
-/// the result of interning.
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct Name(pub u32);
-
 /// An identifier contains a Name (index into the interner
 /// table) and a SyntaxContext to track renaming and
 /// macro expansion per Flatt et al., "Macros That Work Together"
 #[derive(Clone, Copy, PartialEq, Eq, Hash)]
 pub struct Ident {
-    pub name: Name,
+    pub name: Symbol,
     pub ctxt: SyntaxContext
 }
 
-impl Name {
-    pub fn as_str(self) -> token::InternedString {
-        token::InternedString::new_from_name(self)
-    }
-}
-
-impl fmt::Debug for Name {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{}({})", self, self.0)
-    }
-}
-
-impl fmt::Display for Name {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Display::fmt(&self.as_str(), f)
-    }
-}
-
-impl Encodable for Name {
-    fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
-        s.emit_str(&self.as_str())
-    }
-}
-
-impl Decodable for Name {
-    fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
-        Ok(token::intern(&d.read_str()?))
-    }
-}
-
 impl Ident {
     pub const fn with_empty_ctxt(name: Name) -> Ident {
         Ident { name: name, ctxt: SyntaxContext::empty() }
     }
+
+   /// Maps a string to an identifier with an empty syntax context.
+   pub fn from_str(s: &str) -> Ident {
+       Ident::with_empty_ctxt(Symbol::intern(s))
+   }
 }
 
 impl fmt::Debug for Ident {
@@ -401,7 +373,7 @@ impl Generics {
     }
     pub fn span_for_name(&self, name: &str) -> Option<Span> {
         for t in &self.ty_params {
-            if t.ident.name.as_str() == name {
+            if t.ident.name == name {
                 return Some(t.span);
             }
         }
@@ -479,7 +451,7 @@ pub struct WhereEqPredicate {
 
 /// The set of MetaItems that define the compilation environment of the crate,
 /// used to drive conditional compilation
-pub type CrateConfig = Vec<P<MetaItem>>;
+pub type CrateConfig = HashSet<(Name, Option<Symbol>)>;
 
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct Crate {
@@ -498,7 +470,7 @@ pub type NestedMetaItem = Spanned<NestedMetaItemKind>;
 #[derive(Clone, Eq, RustcEncodable, RustcDecodable, Hash, Debug, PartialEq)]
 pub enum NestedMetaItemKind {
     /// A full MetaItem, for recursive meta items.
-    MetaItem(P<MetaItem>),
+    MetaItem(MetaItem),
     /// A literal.
     ///
     /// E.g. "foo", 64, true
@@ -508,53 +480,30 @@ pub enum NestedMetaItemKind {
 /// A spanned compile-time attribute item.
 ///
 /// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`
-pub type MetaItem = Spanned<MetaItemKind>;
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+pub struct MetaItem {
+    pub name: Name,
+    pub node: MetaItemKind,
+    pub span: Span,
+}
 
 /// A compile-time attribute item.
 ///
 /// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`
-#[derive(Clone, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum MetaItemKind {
     /// Word meta item.
     ///
     /// E.g. `test` as in `#[test]`
-    Word(InternedString),
+    Word,
     /// List meta item.
     ///
     /// E.g. `derive(..)` as in `#[derive(..)]`
-    List(InternedString, Vec<NestedMetaItem>),
+    List(Vec<NestedMetaItem>),
     /// Name value meta item.
     ///
     /// E.g. `feature = "foo"` as in `#[feature = "foo"]`
-    NameValue(InternedString, Lit),
-}
-
-// can't be derived because the MetaItemKind::List requires an unordered comparison
-impl PartialEq for MetaItemKind {
-    fn eq(&self, other: &MetaItemKind) -> bool {
-        use self::MetaItemKind::*;
-        match *self {
-            Word(ref ns) => match *other {
-                Word(ref no) => (*ns) == (*no),
-                _ => false
-            },
-            List(ref ns, ref miss) => match *other {
-                List(ref no, ref miso) => {
-                    ns == no &&
-                        miss.iter().all(|mi| {
-                            miso.iter().any(|x| x.node == mi.node)
-                        })
-                }
-                _ => false
-            },
-            NameValue(ref ns, ref vs) => match *other {
-                NameValue(ref no, ref vo) => {
-                    (*ns) == (*no) && vs.node == vo.node
-                }
-                _ => false
-            },
-        }
-    }
+    NameValue(Lit)
 }
 
 /// A Block (`{ .. }`).
@@ -1149,7 +1098,7 @@ pub enum LitIntType {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum LitKind {
     /// A string literal (`"foo"`)
-    Str(InternedString, StrStyle),
+    Str(Symbol, StrStyle),
     /// A byte string (`b"foo"`)
     ByteStr(Rc<Vec<u8>>),
     /// A byte char (`b'f'`)
@@ -1159,9 +1108,9 @@ pub enum LitKind {
     /// An integer literal (`1`)
     Int(u64, LitIntType),
     /// A float literal (`1f64` or `1E10f64`)
-    Float(InternedString, FloatTy),
+    Float(Symbol, FloatTy),
     /// A float literal without a suffix (`1.0 or 1.0E10`)
-    FloatUnsuffixed(InternedString),
+    FloatUnsuffixed(Symbol),
     /// A boolean literal
     Bool(bool),
 }
@@ -1493,7 +1442,7 @@ pub enum AsmDialect {
 /// E.g. `"={eax}"(result)` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")``
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct InlineAsmOutput {
-    pub constraint: InternedString,
+    pub constraint: Symbol,
     pub expr: P<Expr>,
     pub is_rw: bool,
     pub is_indirect: bool,
@@ -1504,11 +1453,11 @@ pub struct InlineAsmOutput {
 /// E.g. `asm!("NOP");`
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct InlineAsm {
-    pub asm: InternedString,
+    pub asm: Symbol,
     pub asm_str_style: StrStyle,
     pub outputs: Vec<InlineAsmOutput>,
-    pub inputs: Vec<(InternedString, P<Expr>)>,
-    pub clobbers: Vec<InternedString>,
+    pub inputs: Vec<(Symbol, P<Expr>)>,
+    pub clobbers: Vec<Symbol>,
     pub volatile: bool,
     pub alignstack: bool,
     pub dialect: AsmDialect,
@@ -1755,8 +1704,6 @@ impl ViewPath_ {
     }
 }
 
-/// Meta-data associated with an item
-pub type Attribute = Spanned<Attribute_>;
 
 /// Distinguishes between Attributes that decorate items and Attributes that
 /// are contained as statements within items. These two cases need to be
@@ -1770,13 +1717,15 @@ pub enum AttrStyle {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub struct AttrId(pub usize);
 
+/// Meta-data associated with an item
 /// Doc-comments are promoted to attributes that have is_sugared_doc = true
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub struct Attribute_ {
+pub struct Attribute {
     pub id: AttrId,
     pub style: AttrStyle,
-    pub value: P<MetaItem>,
+    pub value: MetaItem,
     pub is_sugared_doc: bool,
+    pub span: Span,
 }
 
 /// TraitRef's appear in impls.
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 2977e340a3c..45c120e0b95 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -15,17 +15,17 @@ pub use self::ReprAttr::*;
 pub use self::IntType::*;
 
 use ast;
-use ast::{AttrId, Attribute, Attribute_};
+use ast::{AttrId, Attribute, Name};
 use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
 use ast::{Lit, Expr, Item, Local, Stmt, StmtKind};
-use codemap::{respan, spanned, dummy_spanned};
+use codemap::{spanned, dummy_spanned, mk_sp};
 use syntax_pos::{Span, BytePos, DUMMY_SP};
 use errors::Handler;
 use feature_gate::{Features, GatedCfg};
 use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use parse::token::InternedString;
-use parse::{ParseSess, token};
+use parse::ParseSess;
 use ptr::P;
+use symbol::Symbol;
 use util::ThinVec;
 
 use std::cell::{RefCell, Cell};
@@ -37,8 +37,8 @@ thread_local! {
 }
 
 enum AttrError {
-    MultipleItem(InternedString),
-    UnknownMetaItem(InternedString),
+    MultipleItem(Name),
+    UnknownMetaItem(Name),
     MissingSince,
     MissingFeature,
     MultipleStabilityLevels,
@@ -61,7 +61,7 @@ fn handle_errors(diag: &Handler, span: Span, error: AttrError) {
 
 pub fn mark_used(attr: &Attribute) {
     debug!("Marking {:?} as used.", attr);
-    let AttrId(id) = attr.node.id;
+    let AttrId(id) = attr.id;
     USED_ATTRS.with(|slot| {
         let idx = (id / 64) as usize;
         let shift = id % 64;
@@ -73,7 +73,7 @@ pub fn mark_used(attr: &Attribute) {
 }
 
 pub fn is_used(attr: &Attribute) -> bool {
-    let AttrId(id) = attr.node.id;
+    let AttrId(id) = attr.id;
     USED_ATTRS.with(|slot| {
         let idx = (id / 64) as usize;
         let shift = id % 64;
@@ -84,7 +84,7 @@ pub fn is_used(attr: &Attribute) -> bool {
 
 pub fn mark_known(attr: &Attribute) {
     debug!("Marking {:?} as known.", attr);
-    let AttrId(id) = attr.node.id;
+    let AttrId(id) = attr.id;
     KNOWN_ATTRS.with(|slot| {
         let idx = (id / 64) as usize;
         let shift = id % 64;
@@ -96,7 +96,7 @@ pub fn mark_known(attr: &Attribute) {
 }
 
 pub fn is_known(attr: &Attribute) -> bool {
-    let AttrId(id) = attr.node.id;
+    let AttrId(id) = attr.id;
     KNOWN_ATTRS.with(|slot| {
         let idx = (id / 64) as usize;
         let shift = id % 64;
@@ -107,7 +107,7 @@ pub fn is_known(attr: &Attribute) -> bool {
 
 impl NestedMetaItem {
     /// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem.
-    pub fn meta_item(&self) -> Option<&P<MetaItem>> {
+    pub fn meta_item(&self) -> Option<&MetaItem> {
         match self.node {
             NestedMetaItemKind::MetaItem(ref item) => Some(&item),
             _ => None
@@ -134,18 +134,18 @@ impl NestedMetaItem {
 
     /// Returns the name of the meta item, e.g. `foo` in `#[foo]`,
     /// `#[foo="bar"]` and `#[foo(bar)]`, if self is a MetaItem
-    pub fn name(&self) -> Option<InternedString> {
+    pub fn name(&self) -> Option<Name> {
         self.meta_item().and_then(|meta_item| Some(meta_item.name()))
     }
 
     /// Gets the string value if self is a MetaItem and the MetaItem is a
     /// MetaItemKind::NameValue variant containing a string, otherwise None.
-    pub fn value_str(&self) -> Option<InternedString> {
+    pub fn value_str(&self) -> Option<Symbol> {
         self.meta_item().and_then(|meta_item| meta_item.value_str())
     }
 
     /// Returns a MetaItem if self is a MetaItem with Kind Word.
-    pub fn word(&self) -> Option<&P<MetaItem>> {
+    pub fn word(&self) -> Option<&MetaItem> {
         self.meta_item().and_then(|meta_item| if meta_item.is_word() {
             Some(meta_item)
         } else {
@@ -186,16 +186,16 @@ impl NestedMetaItem {
 
 impl Attribute {
     pub fn check_name(&self, name: &str) -> bool {
-        let matches = name == &self.name()[..];
+        let matches = self.name() == name;
         if matches {
             mark_used(self);
         }
         matches
     }
 
-    pub fn name(&self) -> InternedString { self.meta().name() }
+    pub fn name(&self) -> Name { self.meta().name() }
 
-    pub fn value_str(&self) -> Option<InternedString> {
+    pub fn value_str(&self) -> Option<Symbol> {
         self.meta().value_str()
     }
 
@@ -218,17 +218,13 @@ impl Attribute {
 }
 
 impl MetaItem {
-    pub fn name(&self) -> InternedString {
-        match self.node {
-            MetaItemKind::Word(ref n) => (*n).clone(),
-            MetaItemKind::NameValue(ref n, _) => (*n).clone(),
-            MetaItemKind::List(ref n, _) => (*n).clone(),
-        }
+    pub fn name(&self) -> Name {
+        self.name
     }
 
-    pub fn value_str(&self) -> Option<InternedString> {
+    pub fn value_str(&self) -> Option<Symbol> {
         match self.node {
-            MetaItemKind::NameValue(_, ref v) => {
+            MetaItemKind::NameValue(ref v) => {
                 match v.node {
                     ast::LitKind::Str(ref s, _) => Some((*s).clone()),
                     _ => None,
@@ -240,14 +236,14 @@ impl MetaItem {
 
     pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> {
         match self.node {
-            MetaItemKind::List(_, ref l) => Some(&l[..]),
+            MetaItemKind::List(ref l) => Some(&l[..]),
             _ => None
         }
     }
 
     pub fn is_word(&self) -> bool {
         match self.node {
-            MetaItemKind::Word(_) => true,
+            MetaItemKind::Word => true,
             _ => false,
         }
     }
@@ -255,7 +251,7 @@ impl MetaItem {
     pub fn span(&self) -> Span { self.span }
 
     pub fn check_name(&self, name: &str) -> bool {
-        name == &self.name()[..]
+        self.name() == name
     }
 
     pub fn is_value_str(&self) -> bool {
@@ -270,7 +266,7 @@ impl MetaItem {
 impl Attribute {
     /// Extract the MetaItem from inside this Attribute.
     pub fn meta(&self) -> &MetaItem {
-        &self.node.value
+        &self.value
     }
 
     /// Convert self to a normal #[doc="foo"] comment, if it is a
@@ -279,16 +275,15 @@ impl Attribute {
     pub fn with_desugared_doc<T, F>(&self, f: F) -> T where
         F: FnOnce(&Attribute) -> T,
     {
-        if self.node.is_sugared_doc {
+        if self.is_sugared_doc {
             let comment = self.value_str().unwrap();
             let meta = mk_name_value_item_str(
-                InternedString::new("doc"),
-                token::intern_and_get_ident(&strip_doc_comment_decoration(
-                        &comment)));
-            if self.node.style == ast::AttrStyle::Outer {
-                f(&mk_attr_outer(self.node.id, meta))
+                Symbol::intern("doc"),
+                Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())));
+            if self.style == ast::AttrStyle::Outer {
+                f(&mk_attr_outer(self.id, meta))
             } else {
-                f(&mk_attr_inner(self.node.id, meta))
+                f(&mk_attr_inner(self.id, meta))
             }
         } else {
             f(self)
@@ -298,41 +293,37 @@ impl Attribute {
 
 /* Constructors */
 
-pub fn mk_name_value_item_str(name: InternedString, value: InternedString)
-                              -> P<MetaItem> {
+pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem {
     let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked));
     mk_spanned_name_value_item(DUMMY_SP, name, value_lit)
 }
 
-pub fn mk_name_value_item(name: InternedString, value: ast::Lit)
-                          -> P<MetaItem> {
+pub fn mk_name_value_item(name: Name, value: ast::Lit) -> MetaItem {
     mk_spanned_name_value_item(DUMMY_SP, name, value)
 }
 
-pub fn mk_list_item(name: InternedString, items: Vec<NestedMetaItem>) -> P<MetaItem> {
+pub fn mk_list_item(name: Name, items: Vec<NestedMetaItem>) -> MetaItem {
     mk_spanned_list_item(DUMMY_SP, name, items)
 }
 
-pub fn mk_list_word_item(name: InternedString) -> ast::NestedMetaItem {
+pub fn mk_list_word_item(name: Name) -> ast::NestedMetaItem {
     dummy_spanned(NestedMetaItemKind::MetaItem(mk_spanned_word_item(DUMMY_SP, name)))
 }
 
-pub fn mk_word_item(name: InternedString) -> P<MetaItem> {
+pub fn mk_word_item(name: Name) -> MetaItem {
     mk_spanned_word_item(DUMMY_SP, name)
 }
 
-pub fn mk_spanned_name_value_item(sp: Span, name: InternedString, value: ast::Lit)
-                          -> P<MetaItem> {
-    P(respan(sp, MetaItemKind::NameValue(name, value)))
+pub fn mk_spanned_name_value_item(sp: Span, name: Name, value: ast::Lit) -> MetaItem {
+    MetaItem { span: sp, name: name, node: MetaItemKind::NameValue(value) }
 }
 
-pub fn mk_spanned_list_item(sp: Span, name: InternedString, items: Vec<NestedMetaItem>)
-                            -> P<MetaItem> {
-    P(respan(sp, MetaItemKind::List(name, items)))
+pub fn mk_spanned_list_item(sp: Span, name: Name, items: Vec<NestedMetaItem>) -> MetaItem {
+    MetaItem { span: sp, name: name, node: MetaItemKind::List(items) }
 }
 
-pub fn mk_spanned_word_item(sp: Span, name: InternedString) -> P<MetaItem> {
-    P(respan(sp, MetaItemKind::Word(name)))
+pub fn mk_spanned_word_item(sp: Span, name: Name) -> MetaItem {
+    MetaItem { span: sp, name: name, node: MetaItemKind::Word }
 }
 
 
@@ -349,71 +340,63 @@ pub fn mk_attr_id() -> AttrId {
 }
 
 /// Returns an inner attribute with the given value.
-pub fn mk_attr_inner(id: AttrId, item: P<MetaItem>) -> Attribute {
+pub fn mk_attr_inner(id: AttrId, item: MetaItem) -> Attribute {
     mk_spanned_attr_inner(DUMMY_SP, id, item)
 }
 
 /// Returns an innter attribute with the given value and span.
-pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: P<MetaItem>) -> Attribute {
-    respan(sp,
-           Attribute_ {
-            id: id,
-            style: ast::AttrStyle::Inner,
-            value: item,
-            is_sugared_doc: false,
-          })
+pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: MetaItem) -> Attribute {
+    Attribute {
+        id: id,
+        style: ast::AttrStyle::Inner,
+        value: item,
+        is_sugared_doc: false,
+        span: sp,
+    }
 }
 
 
 /// Returns an outer attribute with the given value.
-pub fn mk_attr_outer(id: AttrId, item: P<MetaItem>) -> Attribute {
+pub fn mk_attr_outer(id: AttrId, item: MetaItem) -> Attribute {
     mk_spanned_attr_outer(DUMMY_SP, id, item)
 }
 
 /// Returns an outer attribute with the given value and span.
-pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: P<MetaItem>) -> Attribute {
-    respan(sp,
-           Attribute_ {
-            id: id,
-            style: ast::AttrStyle::Outer,
-            value: item,
-            is_sugared_doc: false,
-          })
+pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute {
+    Attribute {
+        id: id,
+        style: ast::AttrStyle::Outer,
+        value: item,
+        is_sugared_doc: false,
+        span: sp,
+    }
 }
 
-pub fn mk_doc_attr_outer(id: AttrId, item: P<MetaItem>, is_sugared_doc: bool) -> Attribute {
-    dummy_spanned(Attribute_ {
+pub fn mk_doc_attr_outer(id: AttrId, item: MetaItem, is_sugared_doc: bool) -> Attribute {
+    Attribute {
         id: id,
         style: ast::AttrStyle::Outer,
         value: item,
         is_sugared_doc: is_sugared_doc,
-    })
+        span: DUMMY_SP,
+    }
 }
 
-pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos,
-                           hi: BytePos)
+pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos)
                            -> Attribute {
-    let style = doc_comment_style(&text);
+    let style = doc_comment_style(&text.as_str());
     let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked));
-    let attr = Attribute_ {
+    Attribute {
         id: id,
         style: style,
-        value: P(spanned(lo, hi, MetaItemKind::NameValue(InternedString::new("doc"), lit))),
-        is_sugared_doc: true
-    };
-    spanned(lo, hi, attr)
-}
-
-/* Searching */
-/// Check if `needle` occurs in `haystack` by a structural
-/// comparison. This is slightly subtle, and relies on ignoring the
-/// span included in the `==` comparison a plain MetaItem.
-pub fn contains(haystack: &[P<MetaItem>], needle: &MetaItem) -> bool {
-    debug!("attr::contains (name={})", needle.name());
-    haystack.iter().any(|item| {
-        debug!("  testing: {}", item.name());
-        item.node == needle.node
-    })
+        value: MetaItem {
+            span: mk_sp(lo, hi),
+            name: Symbol::intern("doc"),
+            node: MetaItemKind::NameValue(lit),
+        },
+        is_sugared_doc: true,
+        span: mk_sp(lo, hi),
+    }
 }
 
 pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool {
@@ -432,15 +415,13 @@ pub fn contains_name(attrs: &[Attribute], name: &str) -> bool {
     })
 }
 
-pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
-                                 -> Option<InternedString> {
+pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) -> Option<Symbol> {
     attrs.iter()
         .find(|at| at.check_name(name))
         .and_then(|at| at.value_str())
 }
 
-pub fn last_meta_item_value_str_by_name(items: &[P<MetaItem>], name: &str)
-                                     -> Option<InternedString> {
+pub fn last_meta_item_value_str_by_name(items: &[MetaItem], name: &str) -> Option<Symbol> {
     items.iter()
          .rev()
          .find(|mi| mi.check_name(name))
@@ -449,12 +430,12 @@ pub fn last_meta_item_value_str_by_name(items: &[P<MetaItem>], name: &str)
 
 /* Higher-level applications */
 
-pub fn find_crate_name(attrs: &[Attribute]) -> Option<InternedString> {
+pub fn find_crate_name(attrs: &[Attribute]) -> Option<Symbol> {
     first_attr_value_str_by_name(attrs, "crate_name")
 }
 
 /// Find the value of #[export_name=*] attribute and check its validity.
-pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option<InternedString> {
+pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option<Symbol> {
     attrs.iter().fold(None, |ia,attr| {
         if attr.check_name("export_name") {
             if let s@Some(_) = attr.value_str() {
@@ -488,13 +469,14 @@ pub enum InlineAttr {
 
 /// Determine what `#[inline]` attribute is present in `attrs`, if any.
 pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr {
-    attrs.iter().fold(InlineAttr::None, |ia,attr| {
-        match attr.node.value.node {
-            MetaItemKind::Word(ref n) if n == "inline" => {
+    attrs.iter().fold(InlineAttr::None, |ia, attr| {
+        match attr.value.node {
+            _ if attr.value.name != "inline" => ia,
+            MetaItemKind::Word => {
                 mark_used(attr);
                 InlineAttr::Hint
             }
-            MetaItemKind::List(ref n, ref items) if n == "inline" => {
+            MetaItemKind::List(ref items) => {
                 mark_used(attr);
                 if items.len() != 1 {
                     diagnostic.map(|d|{ span_err!(d, attr.span, E0534, "expected one argument"); });
@@ -527,7 +509,7 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool {
 /// Tests if a cfg-pattern matches the cfg set
 pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Features>) -> bool {
     match cfg.node {
-        ast::MetaItemKind::List(ref pred, ref mis) => {
+        ast::MetaItemKind::List(ref mis) => {
             for mi in mis.iter() {
                 if !mi.is_meta_item() {
                     handle_errors(&sess.span_diagnostic, mi.span, AttrError::UnsupportedLiteral);
@@ -537,7 +519,7 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat
 
             // The unwraps below may look dangerous, but we've already asserted
             // that they won't fail with the loop above.
-            match &pred[..] {
+            match &*cfg.name.as_str() {
                 "any" => mis.iter().any(|mi| {
                     cfg_matches(mi.meta_item().unwrap(), sess, features)
                 }),
@@ -558,11 +540,11 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat
                 }
             }
         },
-        ast::MetaItemKind::Word(_) | ast::MetaItemKind::NameValue(..) => {
+        ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => {
             if let (Some(feats), Some(gated_cfg)) = (features, GatedCfg::gate(cfg)) {
                 gated_cfg.check_and_emit(sess, feats);
             }
-            contains(&sess.config, cfg)
+            sess.config.contains(&(cfg.name(), cfg.value_str()))
         }
     }
 }
@@ -571,7 +553,7 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat
 #[derive(RustcEncodable, RustcDecodable, Clone, Debug, PartialEq, Eq, Hash)]
 pub struct Stability {
     pub level: StabilityLevel,
-    pub feature: InternedString,
+    pub feature: Symbol,
     pub rustc_depr: Option<RustcDeprecation>,
 }
 
@@ -579,20 +561,20 @@ pub struct Stability {
 #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
 pub enum StabilityLevel {
     // Reason for the current stability level and the relevant rust-lang issue
-    Unstable { reason: Option<InternedString>, issue: u32 },
-    Stable { since: InternedString },
+    Unstable { reason: Option<Symbol>, issue: u32 },
+    Stable { since: Symbol },
 }
 
 #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
 pub struct RustcDeprecation {
-    pub since: InternedString,
-    pub reason: InternedString,
+    pub since: Symbol,
+    pub reason: Symbol,
 }
 
 #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
 pub struct Deprecation {
-    pub since: Option<InternedString>,
-    pub note: Option<InternedString>,
+    pub since: Option<Symbol>,
+    pub note: Option<Symbol>,
 }
 
 impl StabilityLevel {
@@ -611,7 +593,6 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
 
     'outer: for attr in attrs_iter {
         let tag = attr.name();
-        let tag = &*tag;
         if tag != "rustc_deprecated" && tag != "unstable" && tag != "stable" {
             continue // not a stability level
         }
@@ -619,7 +600,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
         mark_used(attr);
 
         if let Some(metas) = attr.meta_item_list() {
-            let get = |meta: &MetaItem, item: &mut Option<InternedString>| {
+            let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
                 if item.is_some() {
                     handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
                     return false
@@ -633,7 +614,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                 }
             };
 
-            match tag {
+            match &*tag.as_str() {
                 "rustc_deprecated" => {
                     if rustc_depr.is_some() {
                         span_err!(diagnostic, item_sp, E0540,
@@ -645,7 +626,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                     let mut reason = None;
                     for meta in metas {
                         if let Some(mi) = meta.meta_item() {
-                            match &*mi.name() {
+                            match &*mi.name().as_str() {
                                 "since" => if !get(mi, &mut since) { continue 'outer },
                                 "reason" => if !get(mi, &mut reason) { continue 'outer },
                                 _ => {
@@ -688,7 +669,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                     let mut issue = None;
                     for meta in metas {
                         if let Some(mi) = meta.meta_item() {
-                            match &*mi.name() {
+                            match &*mi.name().as_str() {
                                 "feature" => if !get(mi, &mut feature) { continue 'outer },
                                 "reason" => if !get(mi, &mut reason) { continue 'outer },
                                 "issue" => if !get(mi, &mut issue) { continue 'outer },
@@ -710,7 +691,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                                 level: Unstable {
                                     reason: reason,
                                     issue: {
-                                        if let Ok(issue) = issue.parse() {
+                                        if let Ok(issue) = issue.as_str().parse() {
                                             issue
                                         } else {
                                             span_err!(diagnostic, attr.span(), E0545,
@@ -743,7 +724,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
                     let mut since = None;
                     for meta in metas {
                         if let NestedMetaItemKind::MetaItem(ref mi) = meta.node {
-                            match &*mi.name() {
+                            match &*mi.name().as_str() {
                                 "feature" => if !get(mi, &mut feature) { continue 'outer },
                                 "since" => if !get(mi, &mut since) { continue 'outer },
                                 _ => {
@@ -821,7 +802,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler,
         }
 
         depr = if let Some(metas) = attr.meta_item_list() {
-            let get = |meta: &MetaItem, item: &mut Option<InternedString>| {
+            let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
                 if item.is_some() {
                     handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
                     return false
@@ -839,7 +820,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler,
             let mut note = None;
             for meta in metas {
                 if let NestedMetaItemKind::MetaItem(ref mi) = meta.node {
-                    match &*mi.name() {
+                    match &*mi.name().as_str() {
                         "since" => if !get(mi, &mut since) { continue 'outer },
                         "note" => if !get(mi, &mut note) { continue 'outer },
                         _ => {
@@ -875,7 +856,7 @@ pub fn find_deprecation(diagnostic: &Handler, attrs: &[Attribute],
     find_deprecation_generic(diagnostic, attrs.iter(), item_sp)
 }
 
-pub fn require_unique_names(diagnostic: &Handler, metas: &[P<MetaItem>]) {
+pub fn require_unique_names(diagnostic: &Handler, metas: &[MetaItem]) {
     let mut set = HashSet::new();
     for meta in metas {
         let name = meta.name();
@@ -896,8 +877,8 @@ pub fn require_unique_names(diagnostic: &Handler, metas: &[P<MetaItem>]) {
 /// structure layout, and `packed` to remove padding.
 pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr> {
     let mut acc = Vec::new();
-    match attr.node.value.node {
-        ast::MetaItemKind::List(ref s, ref items) if s == "repr" => {
+    match attr.value.node {
+        ast::MetaItemKind::List(ref items) if attr.value.name == "repr" => {
             mark_used(attr);
             for item in items {
                 if !item.is_meta_item() {
@@ -906,7 +887,7 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr>
                 }
 
                 if let Some(mi) = item.word() {
-                    let word = &*mi.name();
+                    let word = &*mi.name().as_str();
                     let hint = match word {
                         // Can't use "extern" because it's not a lexical identifier.
                         "C" => Some(ReprExtern),
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index 49012ad036a..3cdfa718eab 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -871,6 +871,7 @@ impl CodeMapper for CodeMap {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use symbol::keywords;
     use std::rc::Rc;
 
     #[test]
@@ -1097,10 +1098,9 @@ mod tests {
     #[test]
     fn t11() {
         // Test span_to_expanded_string works with expansion
-        use ast::Name;
         let cm = init_code_map();
         let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
-        let format = ExpnFormat::MacroBang(Name(0u32));
+        let format = ExpnFormat::MacroBang(keywords::Invalid.name());
         let callee = NameAndSpan { format: format,
                                    allow_internal_unstable: false,
                                    span: None };
@@ -1197,11 +1197,9 @@ mod tests {
     fn init_expansion_chain(cm: &CodeMap) -> Span {
         // Creates an expansion chain containing two recursive calls
         // root -> expA -> expA -> expB -> expB -> end
-        use ast::Name;
-
         let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
 
-        let format_root = ExpnFormat::MacroBang(Name(0u32));
+        let format_root = ExpnFormat::MacroBang(keywords::Invalid.name());
         let callee_root = NameAndSpan { format: format_root,
                                         allow_internal_unstable: false,
                                         span: Some(root) };
@@ -1210,7 +1208,7 @@ mod tests {
         let id_a1 = cm.record_expansion(info_a1);
         let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 };
 
-        let format_a = ExpnFormat::MacroBang(Name(1u32));
+        let format_a = ExpnFormat::MacroBang(keywords::As.name());
         let callee_a = NameAndSpan { format: format_a,
                                       allow_internal_unstable: false,
                                       span: Some(span_a1) };
@@ -1223,7 +1221,7 @@ mod tests {
         let id_b1 = cm.record_expansion(info_b1);
         let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 };
 
-        let format_b = ExpnFormat::MacroBang(Name(2u32));
+        let format_b = ExpnFormat::MacroBang(keywords::Box.name());
         let callee_b = NameAndSpan { format: format_b,
                                      allow_internal_unstable: false,
                                      span: None };
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index 02429f02738..89eea3f6f8b 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -12,7 +12,7 @@ use attr::HasAttrs;
 use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue};
 use {fold, attr};
 use ast;
-use codemap::{Spanned, respan};
+use codemap::Spanned;
 use parse::ParseSess;
 use ptr::P;
 
@@ -106,12 +106,13 @@ impl<'a> StripUnconfigured<'a> {
         match (cfg.meta_item(), mi.meta_item()) {
             (Some(cfg), Some(mi)) =>
                 if cfg_matches(&cfg, self.sess, self.features) {
-                    self.process_cfg_attr(respan(mi.span, ast::Attribute_ {
+                    self.process_cfg_attr(ast::Attribute {
                         id: attr::mk_attr_id(),
-                        style: attr.node.style,
+                        style: attr.style,
                         value: mi.clone(),
                         is_sugared_doc: false,
-                    }))
+                        span: mi.span,
+                    })
                 } else {
                     None
                 },
@@ -131,8 +132,8 @@ impl<'a> StripUnconfigured<'a> {
                 return false;
             }
 
-            let mis = match attr.node.value.node {
-                ast::MetaItemKind::List(_, ref mis) if is_cfg(&attr) => mis,
+            let mis = match attr.value.node {
+                ast::MetaItemKind::List(ref mis) if is_cfg(&attr) => mis,
                 _ => return true
             };
 
@@ -160,7 +161,7 @@ impl<'a> StripUnconfigured<'a> {
                                           attr.span,
                                           GateIssue::Language,
                                           EXPLAIN_STMT_ATTR_SYNTAX);
-                if attr.node.is_sugared_doc {
+                if attr.is_sugared_doc {
                     err.help("`///` is for documentation comments. For a plain comment, use `//`.");
                 }
                 err.emit();
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index 81c8e0bdb82..fe5cb87ad59 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -19,6 +19,7 @@ use ext::base::{ExtCtxt, MacEager, MacResult};
 use ext::build::AstBuilder;
 use parse::token;
 use ptr::P;
+use symbol::Symbol;
 use tokenstream::{TokenTree};
 use util::small_vector::SmallVector;
 
@@ -141,7 +142,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
             ));
         }
     });
-    let sym = Ident::with_empty_ctxt(token::gensym(&format!(
+    let sym = Ident::with_empty_ctxt(Symbol::gensym(&format!(
         "__register_diagnostic_{}", code
     )));
     MacEager::items(SmallVector::many(vec![
@@ -194,11 +195,11 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
     let (count, expr) =
         with_registered_diagnostics(|diagnostics| {
             let descriptions: Vec<P<ast::Expr>> =
-                diagnostics.iter().filter_map(|(code, info)| {
+                diagnostics.iter().filter_map(|(&code, info)| {
                     info.description.map(|description| {
                         ecx.expr_tuple(span, vec![
-                            ecx.expr_str(span, code.as_str()),
-                            ecx.expr_str(span, description.as_str())
+                            ecx.expr_str(span, code),
+                            ecx.expr_str(span, description)
                         ])
                     })
                 }).collect();
diff --git a/src/libsyntax/entry.rs b/src/libsyntax/entry.rs
index 7014e576e2b..93ca1948ed8 100644
--- a/src/libsyntax/entry.rs
+++ b/src/libsyntax/entry.rs
@@ -28,7 +28,7 @@ pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType {
                 EntryPointType::Start
             } else if attr::contains_name(&item.attrs, "main") {
                 EntryPointType::MainAttr
-            } else if item.ident.name.as_str() == "main" {
+            } else if item.ident.name == "main" {
                 if depth == 1 {
                     // This is a top-level function so can be 'main'
                     EntryPointType::MainNamed
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 7f66b060052..ddf4cf11f20 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -20,8 +20,8 @@ use ext::hygiene::Mark;
 use fold::{self, Folder};
 use parse::{self, parser};
 use parse::token;
-use parse::token::{InternedString, str_to_ident};
 use ptr::P;
+use symbol::Symbol;
 use util::small_vector::SmallVector;
 
 use std::path::PathBuf;
@@ -643,7 +643,7 @@ impl<'a> ExtCtxt<'a> {
         loop {
             if self.codemap().with_expn_info(expn_id, |info| {
                 info.map_or(None, |i| {
-                    if i.callee.name().as_str() == "include" {
+                    if i.callee.name() == "include" {
                         // Stop going up the backtrace once include! is encountered
                         return None;
                     }
@@ -735,7 +735,7 @@ impl<'a> ExtCtxt<'a> {
         self.ecfg.trace_mac = x
     }
     pub fn ident_of(&self, st: &str) -> ast::Ident {
-        str_to_ident(st)
+        ast::Ident::from_str(st)
     }
     pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
         let mut v = Vec::new();
@@ -746,7 +746,7 @@ impl<'a> ExtCtxt<'a> {
         return v
     }
     pub fn name_of(&self, st: &str) -> ast::Name {
-        token::intern(st)
+        Symbol::intern(st)
     }
 }
 
@@ -754,7 +754,7 @@ impl<'a> ExtCtxt<'a> {
 /// emitting `err_msg` if `expr` is not a string literal. This does not stop
 /// compilation on error, merely emits a non-fatal error and returns None.
 pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
-                              -> Option<Spanned<(InternedString, ast::StrStyle)>> {
+                              -> Option<Spanned<(Symbol, ast::StrStyle)>> {
     // Update `expr.span`'s expn_id now in case expr is an `include!` macro invocation.
     let expr = expr.map(|mut expr| {
         expr.span.expn_id = cx.backtrace();
@@ -765,7 +765,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &st
     let expr = cx.expander().fold_expr(expr);
     match expr.node {
         ast::ExprKind::Lit(ref l) => match l.node {
-            ast::LitKind::Str(ref s, style) => return Some(respan(expr.span, (s.clone(), style))),
+            ast::LitKind::Str(s, style) => return Some(respan(expr.span, (s, style))),
             _ => cx.span_err(l.span, err_msg)
         },
         _ => cx.span_err(expr.span, err_msg)
@@ -774,7 +774,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &st
 }
 
 pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
-                      -> Option<(InternedString, ast::StrStyle)> {
+                      -> Option<(Symbol, ast::StrStyle)> {
     expr_to_spanned_string(cx, expr, err_msg).map(|s| s.node)
 }
 
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index c3e28cbb006..324afc20051 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -14,8 +14,8 @@ use attr;
 use syntax_pos::{Span, DUMMY_SP, Pos};
 use codemap::{dummy_spanned, respan, Spanned};
 use ext::base::ExtCtxt;
-use parse::token::{self, keywords, InternedString};
 use ptr::P;
+use symbol::{Symbol, keywords};
 
 // Transitional reexports so qquote can find the paths it is looking for
 mod syntax {
@@ -149,7 +149,7 @@ pub trait AstBuilder {
     fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
     fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr>;
     fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
-    fn expr_str(&self, sp: Span, s: InternedString) -> P<ast::Expr>;
+    fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr>;
 
     fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
     fn expr_none(&self, sp: Span) -> P<ast::Expr>;
@@ -158,7 +158,7 @@ pub trait AstBuilder {
 
     fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
 
-    fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr>;
+    fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr>;
     fn expr_unreachable(&self, span: Span) -> P<ast::Expr>;
 
     fn expr_ok(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
@@ -275,22 +275,22 @@ pub trait AstBuilder {
                     generics: Generics) -> P<ast::Item>;
     fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> P<ast::Item>;
 
-    fn attribute(&self, sp: Span, mi: P<ast::MetaItem>) -> ast::Attribute;
+    fn attribute(&self, sp: Span, mi: ast::MetaItem) -> ast::Attribute;
 
-    fn meta_word(&self, sp: Span, w: InternedString) -> P<ast::MetaItem>;
+    fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem;
 
-    fn meta_list_item_word(&self, sp: Span, w: InternedString) -> ast::NestedMetaItem;
+    fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem;
 
     fn meta_list(&self,
                  sp: Span,
-                 name: InternedString,
+                 name: ast::Name,
                  mis: Vec<ast::NestedMetaItem> )
-                 -> P<ast::MetaItem>;
+                 -> ast::MetaItem;
     fn meta_name_value(&self,
                        sp: Span,
-                       name: InternedString,
+                       name: ast::Name,
                        value: ast::LitKind)
-                       -> P<ast::MetaItem>;
+                       -> ast::MetaItem;
 
     fn item_use(&self, sp: Span,
                 vis: ast::Visibility, vp: P<ast::ViewPath>) -> P<ast::Item>;
@@ -755,7 +755,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
     fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
         self.expr_addr_of(sp, self.expr_vec(sp, exprs))
     }
-    fn expr_str(&self, sp: Span, s: InternedString) -> P<ast::Expr> {
+    fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
         self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
     }
 
@@ -785,10 +785,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.expr(sp, ast::ExprKind::Tup(exprs))
     }
 
-    fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> {
+    fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
         let loc = self.codemap().lookup_char_pos(span.lo);
-        let expr_file = self.expr_str(span,
-                                      token::intern_and_get_ident(&loc.file.name));
+        let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name));
         let expr_line = self.expr_u32(span, loc.line as u32);
         let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]);
         let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
@@ -801,9 +800,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
     }
 
     fn expr_unreachable(&self, span: Span) -> P<ast::Expr> {
-        self.expr_fail(span,
-                       InternedString::new(
-                           "internal error: entered unreachable code"))
+        self.expr_fail(span, Symbol::intern("internal error: entered unreachable code"))
     }
 
     fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
@@ -1146,25 +1143,25 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.item_ty_poly(span, name, ty, Generics::default())
     }
 
-    fn attribute(&self, sp: Span, mi: P<ast::MetaItem>) -> ast::Attribute {
+    fn attribute(&self, sp: Span, mi: ast::MetaItem) -> ast::Attribute {
         attr::mk_spanned_attr_outer(sp, attr::mk_attr_id(), mi)
     }
 
-    fn meta_word(&self, sp: Span, w: InternedString) -> P<ast::MetaItem> {
+    fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem {
         attr::mk_spanned_word_item(sp, w)
     }
 
-    fn meta_list_item_word(&self, sp: Span, w: InternedString) -> ast::NestedMetaItem {
+    fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem {
         respan(sp, ast::NestedMetaItemKind::MetaItem(attr::mk_spanned_word_item(sp, w)))
     }
 
-    fn meta_list(&self, sp: Span, name: InternedString, mis: Vec<ast::NestedMetaItem>)
-                 -> P<ast::MetaItem> {
+    fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec<ast::NestedMetaItem>)
+                 -> ast::MetaItem {
         attr::mk_spanned_list_item(sp, name, mis)
     }
 
-    fn meta_name_value(&self, sp: Span, name: InternedString, value: ast::LitKind)
-                       -> P<ast::MetaItem> {
+    fn meta_name_value(&self, sp: Span, name: ast::Name, value: ast::LitKind)
+                       -> ast::MetaItem {
         attr::mk_spanned_name_value_item(sp, name, respan(sp, value))
     }
 
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 8e0c3ce8448..844fb77e29d 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -23,10 +23,11 @@ use fold;
 use fold::*;
 use parse::{ParseSess, PResult, lexer};
 use parse::parser::Parser;
-use parse::token::{self, intern, keywords};
+use parse::token;
 use print::pprust;
 use ptr::P;
 use std_inject;
+use symbol::keywords;
 use tokenstream::{TokenTree, TokenStream};
 use util::small_vector::SmallVector;
 use visit::Visitor;
@@ -190,7 +191,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
     pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
         self.cx.crate_root = std_inject::injected_crate_name(&krate);
         let mut module = ModuleData {
-            mod_path: vec![token::str_to_ident(&self.cx.ecfg.crate_name)],
+            mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)],
             directory: PathBuf::from(self.cx.codemap().span_to_filename(krate.span)),
         };
         module.directory.pop();
@@ -246,7 +247,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                     self.cx.resolver.resolve_macro(scope, &mac.node.path, force)
                 }
                 InvocationKind::Attr { ref attr, .. } => {
-                    let ident = ast::Ident::with_empty_ctxt(intern(&*attr.name()));
+                    let ident = Ident::with_empty_ctxt(attr.name());
                     let path = ast::Path::from_ident(attr.span, ident);
                     self.cx.resolver.resolve_macro(scope, &path, force)
                 }
@@ -341,7 +342,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         };
 
         attr::mark_used(&attr);
-        let name = intern(&attr.name());
+        let name = attr.name();
         self.cx.bt_push(ExpnInfo {
             call_site: attr.span,
             callee: NameAndSpan {
@@ -353,12 +354,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
 
         match *ext {
             MultiModifier(ref mac) => {
-                let item = mac.expand(self.cx, attr.span, &attr.node.value, item);
+                let item = mac.expand(self.cx, attr.span, &attr.value, item);
                 kind.expect_from_annotatables(item)
             }
             MultiDecorator(ref mac) => {
                 let mut items = Vec::new();
-                mac.expand(self.cx, attr.span, &attr.node.value, &item,
+                mac.expand(self.cx, attr.span, &attr.value, &item,
                            &mut |item| items.push(item));
                 items.push(item);
                 kind.expect_from_annotatables(items)
@@ -779,7 +780,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                 if inline_module {
                     if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
                         self.cx.current_expansion.no_noninline_mod = false;
-                        module.directory.push(&*path);
+                        module.directory.push(&*path.as_str());
                     } else {
                         module.directory.push(&*item.ident.name.as_str());
                     }
diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs
index e323dd2f623..4fe57a8345e 100644
--- a/src/libsyntax/ext/placeholders.rs
+++ b/src/libsyntax/ext/placeholders.rs
@@ -13,8 +13,8 @@ use codemap::{DUMMY_SP, dummy_spanned};
 use ext::base::ExtCtxt;
 use ext::expand::{Expansion, ExpansionKind};
 use fold::*;
-use parse::token::{intern, keywords};
 use ptr::P;
+use symbol::{Symbol, keywords};
 use util::move_map::MoveMap;
 use util::small_vector::SmallVector;
 
@@ -227,7 +227,7 @@ pub fn reconstructed_macro_rules(def: &ast::MacroDef) -> Expansion {
                     span: DUMMY_SP,
                     global: false,
                     segments: vec![ast::PathSegment {
-                        identifier: ast::Ident::with_empty_ctxt(intern("macro_rules")),
+                        identifier: ast::Ident::with_empty_ctxt(Symbol::intern("macro_rules")),
                         parameters: ast::PathParameters::none(),
                     }],
                 },
diff --git a/src/libsyntax/ext/proc_macro_shim.rs b/src/libsyntax/ext/proc_macro_shim.rs
index dc3a01f41bc..21ce89a6dd5 100644
--- a/src/libsyntax/ext/proc_macro_shim.rs
+++ b/src/libsyntax/ext/proc_macro_shim.rs
@@ -66,6 +66,7 @@ pub mod prelude {
     pub use ast::Ident;
     pub use codemap::{DUMMY_SP, Span};
     pub use ext::base::{ExtCtxt, MacResult};
-    pub use parse::token::{self, Token, DelimToken, keywords, str_to_ident};
+    pub use parse::token::{self, Token, DelimToken};
+    pub use symbol::keywords;
     pub use tokenstream::{TokenTree, TokenStream};
 }
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index 969cfa292ce..aa777a19a9b 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -33,6 +33,7 @@ pub mod rt {
     use parse::{self, token, classify};
     use ptr::P;
     use std::rc::Rc;
+    use symbol::Symbol;
 
     use tokenstream::{self, TokenTree};
 
@@ -211,7 +212,7 @@ pub mod rt {
     impl_to_tokens_slice! { P<ast::Item>, [] }
     impl_to_tokens_slice! { ast::Arg, [TokenTree::Token(DUMMY_SP, token::Comma)] }
 
-    impl ToTokens for P<ast::MetaItem> {
+    impl ToTokens for ast::MetaItem {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
             let nt = token::NtMeta(self.clone());
             vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
@@ -223,13 +224,13 @@ pub mod rt {
             let mut r = vec![];
             // FIXME: The spans could be better
             r.push(TokenTree::Token(self.span, token::Pound));
-            if self.node.style == ast::AttrStyle::Inner {
+            if self.style == ast::AttrStyle::Inner {
                 r.push(TokenTree::Token(self.span, token::Not));
             }
             r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
                 delim: token::Bracket,
                 open_span: self.span,
-                tts: self.node.value.to_tokens(cx),
+                tts: self.value.to_tokens(cx),
                 close_span: self.span,
             })));
             r
@@ -238,8 +239,7 @@ pub mod rt {
 
     impl ToTokens for str {
         fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
-            let lit = ast::LitKind::Str(
-                token::intern_and_get_ident(self), ast::StrStyle::Cooked);
+            let lit = ast::LitKind::Str(Symbol::intern(self), ast::StrStyle::Cooked);
             dummy_spanned(lit).to_tokens(cx)
         }
     }
@@ -405,7 +405,7 @@ pub fn parse_block_panic(parser: &mut Parser) -> P<Block> {
     panictry!(parser.parse_block())
 }
 
-pub fn parse_meta_item_panic(parser: &mut Parser) -> P<ast::MetaItem> {
+pub fn parse_meta_item_panic(parser: &mut Parser) -> ast::MetaItem {
     panictry!(parser.parse_meta_item())
 }
 
@@ -527,17 +527,17 @@ pub fn expand_quote_matcher(cx: &mut ExtCtxt,
     base::MacEager::expr(expanded)
 }
 
-fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
-    strs.iter().map(|str| str_to_ident(&(*str))).collect()
+fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
+    strs.iter().map(|s| ast::Ident::from_str(s)).collect()
 }
 
-fn id_ext(str: &str) -> ast::Ident {
-    str_to_ident(str)
+fn id_ext(s: &str) -> ast::Ident {
+    ast::Ident::from_str(s)
 }
 
 // Lift an ident to the expr that evaluates to that ident.
 fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
-    let e_str = cx.expr_str(sp, ident.name.as_str());
+    let e_str = cx.expr_str(sp, ident.name);
     cx.expr_method_call(sp,
                         cx.expr_ident(sp, id_ext("ext_cx")),
                         id_ext("ident_of"),
@@ -546,7 +546,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
 
 // Lift a name to the expr that evaluates to that name
 fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
-    let e_str = cx.expr_str(sp, ident.name.as_str());
+    let e_str = cx.expr_str(sp, ident.name);
     cx.expr_method_call(sp,
                         cx.expr_ident(sp, id_ext("ext_cx")),
                         id_ext("name_of"),
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index bda84cdaf39..320d49b6463 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -17,6 +17,7 @@ use parse::token;
 use parse;
 use print::pprust;
 use ptr::P;
+use symbol::Symbol;
 use tokenstream;
 use util::small_vector::SmallVector;
 
@@ -60,15 +61,13 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
 
     let topmost = cx.expansion_cause();
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
-    let filename = token::intern_and_get_ident(&loc.file.name);
-    base::MacEager::expr(cx.expr_str(topmost, filename))
+    base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name)))
 }
 
 pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
                         -> Box<base::MacResult+'static> {
     let s = pprust::tts_to_string(tts);
-    base::MacEager::expr(cx.expr_str(sp,
-                                   token::intern_and_get_ident(&s[..])))
+    base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
 }
 
 pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
@@ -77,9 +76,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
     let mod_path = &cx.current_expansion.module.mod_path;
     let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
 
-    base::MacEager::expr(cx.expr_str(
-            sp,
-            token::intern_and_get_ident(&string[..])))
+    base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string)))
 }
 
 /// include! : parse the given file as an expr
@@ -144,10 +141,9 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
             // Add this input file to the code map to make it available as
             // dependency information
             let filename = format!("{}", file.display());
-            let interned = token::intern_and_get_ident(&src[..]);
             cx.codemap().new_filemap_and_lines(&filename, None, &src);
 
-            base::MacEager::expr(cx.expr_str(sp, interned))
+            base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
         }
         Err(_) => {
             cx.span_err(sp,
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 552d4de9617..59b8b50e88c 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -20,9 +20,10 @@ use ext::tt::macro_parser::{parse, parse_failure_msg};
 use parse::ParseSess;
 use parse::lexer::new_tt_reader;
 use parse::parser::{Parser, Restrictions};
-use parse::token::{self, gensym_ident, NtTT, Token};
+use parse::token::{self, NtTT, Token};
 use parse::token::Token::*;
 use print;
+use symbol::Symbol;
 use tokenstream::{self, TokenTree};
 
 use std::collections::{HashMap};
@@ -187,16 +188,16 @@ impl IdentMacroExpander for MacroRulesExpander {
 
 /// Converts a `macro_rules!` invocation into a syntax extension.
 pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
-    let lhs_nm =  gensym_ident("lhs");
-    let rhs_nm =  gensym_ident("rhs");
+    let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
+    let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
 
     // The pattern that macro_rules matches.
     // The grammar for macro_rules! is:
     // $( $lhs:tt => $rhs:tt );+
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
-    let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt"));
-    let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt"));
+    let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
+    let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
     let argument_gram = vec![
         TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
             tts: vec![
@@ -790,8 +791,7 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
             "pat" => {
                 match *tok {
                     FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
-                    Ident(i) if (i.name.as_str() == "if" ||
-                                 i.name.as_str() == "in") => Ok(true),
+                    Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
                     _ => Ok(false)
                 }
             },
@@ -799,8 +799,8 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
                 match *tok {
                     OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
                     Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
-                    MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true),
-                    Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true),
+                    MatchNt(_, ref frag) if frag.name == "block" => Ok(true),
+                    Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
                     _ => Ok(false)
                 }
             },
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index dbdf56e4d78..16d4adf1705 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -33,7 +33,7 @@ use syntax_pos::Span;
 use errors::{DiagnosticBuilder, Handler};
 use visit::{self, FnKind, Visitor};
 use parse::ParseSess;
-use parse::token::InternedString;
+use symbol::Symbol;
 
 use std::ascii::AsciiExt;
 use std::env;
@@ -59,9 +59,9 @@ macro_rules! declare_features {
         /// A set of features to be used by later passes.
         pub struct Features {
             /// #![feature] attrs for stable language features, for error reporting
-            pub declared_stable_lang_features: Vec<(InternedString, Span)>,
+            pub declared_stable_lang_features: Vec<(Symbol, Span)>,
             /// #![feature] attrs for non-language (library) features
-            pub declared_lib_features: Vec<(InternedString, Span)>,
+            pub declared_lib_features: Vec<(Symbol, Span)>,
             $(pub $feature: bool),+
         }
 
@@ -755,7 +755,7 @@ pub struct GatedCfg {
 
 impl GatedCfg {
     pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> {
-        let name = cfg.name();
+        let name = &*cfg.name().as_str();
         GATED_CFGS.iter()
                   .position(|info| info.0 == name)
                   .map(|idx| {
@@ -802,7 +802,7 @@ macro_rules! gate_feature {
 impl<'a> Context<'a> {
     fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
         debug!("check_attribute(attr = {:?})", attr);
-        let name = &*attr.name();
+        let name = &*attr.name().as_str();
         for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
             if n == name {
                 if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
@@ -989,11 +989,11 @@ fn contains_novel_literal(item: &ast::MetaItem) -> bool {
     use ast::NestedMetaItemKind::*;
 
     match item.node {
-        Word(..) => false,
-        NameValue(_, ref lit) => !lit.node.is_str(),
-        List(_, ref list) => list.iter().any(|li| {
+        Word => false,
+        NameValue(ref lit) => !lit.node.is_str(),
+        List(ref list) => list.iter().any(|li| {
             match li.node {
-                MetaItem(ref mi) => contains_novel_literal(&**mi),
+                MetaItem(ref mi) => contains_novel_literal(&mi),
                 Literal(_) => true,
             }
         }),
@@ -1011,7 +1011,7 @@ impl<'a> Visitor for PostExpansionVisitor<'a> {
             self.context.check_attribute(attr, false);
         }
 
-        if contains_novel_literal(&*(attr.node.value)) {
+        if contains_novel_literal(&attr.value) {
             gate_feature_post!(&self, attr_literals, attr.span,
                                "non-string literals in attributes, or string \
                                literals in top-level positions, are experimental");
@@ -1119,9 +1119,8 @@ impl<'a> Visitor for PostExpansionVisitor<'a> {
     }
 
     fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
-        let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs,
-                                                                     "link_name") {
-            Some(val) => val.starts_with("llvm."),
+        let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs, "link_name") {
+            Some(val) => val.as_str().starts_with("llvm."),
             _ => false
         };
         if links_to_llvm {
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 2e62f23578d..ff0255a2f21 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -22,8 +22,9 @@ use ast::*;
 use ast;
 use syntax_pos::Span;
 use codemap::{Spanned, respan};
-use parse::token::{self, keywords};
+use parse::token;
 use ptr::P;
+use symbol::keywords;
 use tokenstream::*;
 use util::small_vector::SmallVector;
 use util::move_map::MoveMap;
@@ -43,7 +44,7 @@ pub trait Folder : Sized {
         noop_fold_crate(c, self)
     }
 
-    fn fold_meta_items(&mut self, meta_items: Vec<P<MetaItem>>) -> Vec<P<MetaItem>> {
+    fn fold_meta_items(&mut self, meta_items: Vec<MetaItem>) -> Vec<MetaItem> {
         noop_fold_meta_items(meta_items, self)
     }
 
@@ -51,7 +52,7 @@ pub trait Folder : Sized {
         noop_fold_meta_list_item(list_item, self)
     }
 
-    fn fold_meta_item(&mut self, meta_item: P<MetaItem>) -> P<MetaItem> {
+    fn fold_meta_item(&mut self, meta_item: MetaItem) -> MetaItem {
         noop_fold_meta_item(meta_item, self)
     }
 
@@ -293,8 +294,7 @@ pub trait Folder : Sized {
     }
 }
 
-pub fn noop_fold_meta_items<T: Folder>(meta_items: Vec<P<MetaItem>>, fld: &mut T)
-                                       -> Vec<P<MetaItem>> {
+pub fn noop_fold_meta_items<T: Folder>(meta_items: Vec<MetaItem>, fld: &mut T) -> Vec<MetaItem> {
     meta_items.move_map(|x| fld.fold_meta_item(x))
 }
 
@@ -486,16 +486,13 @@ pub fn noop_fold_local<T: Folder>(l: P<Local>, fld: &mut T) -> P<Local> {
     })
 }
 
-pub fn noop_fold_attribute<T: Folder>(at: Attribute, fld: &mut T) -> Option<Attribute> {
-    let Spanned {node: Attribute_ {id, style, value, is_sugared_doc}, span} = at;
-    Some(Spanned {
-        node: Attribute_ {
-            id: id,
-            style: style,
-            value: fld.fold_meta_item(value),
-            is_sugared_doc: is_sugared_doc
-        },
-        span: fld.new_span(span)
+pub fn noop_fold_attribute<T: Folder>(attr: Attribute, fld: &mut T) -> Option<Attribute> {
+    Some(Attribute {
+        id: attr.id,
+        style: attr.style,
+        value: fld.fold_meta_item(attr.value),
+        is_sugared_doc: attr.is_sugared_doc,
+        span: fld.new_span(attr.span),
     })
 }
 
@@ -522,17 +519,18 @@ pub fn noop_fold_meta_list_item<T: Folder>(li: NestedMetaItem, fld: &mut T)
     }
 }
 
-pub fn noop_fold_meta_item<T: Folder>(mi: P<MetaItem>, fld: &mut T) -> P<MetaItem> {
-    mi.map(|Spanned {node, span}| Spanned {
-        node: match node {
-            MetaItemKind::Word(id) => MetaItemKind::Word(id),
-            MetaItemKind::List(id, mis) => {
-                MetaItemKind::List(id, mis.move_map(|e| fld.fold_meta_list_item(e)))
-            }
-            MetaItemKind::NameValue(id, s) => MetaItemKind::NameValue(id, s)
+pub fn noop_fold_meta_item<T: Folder>(mi: MetaItem, fld: &mut T) -> MetaItem {
+    MetaItem {
+        name: mi.name,
+        node: match mi.node {
+            MetaItemKind::Word => MetaItemKind::Word,
+            MetaItemKind::List(mis) => {
+                MetaItemKind::List(mis.move_map(|e| fld.fold_meta_list_item(e)))
+            },
+            MetaItemKind::NameValue(s) => MetaItemKind::NameValue(s),
         },
-        span: fld.new_span(span)
-    })
+        span: fld.new_span(mi.span)
+    }
 }
 
 pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
@@ -1334,9 +1332,8 @@ pub fn noop_fold_vis<T: Folder>(vis: Visibility, folder: &mut T) -> Visibility {
 #[cfg(test)]
 mod tests {
     use std::io;
-    use ast;
+    use ast::{self, Ident};
     use util::parser_testing::{string_to_crate, matches_codepattern};
-    use parse::token;
     use print::pprust;
     use fold;
     use super::*;
@@ -1352,7 +1349,7 @@ mod tests {
 
     impl Folder for ToZzIdentFolder {
         fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident {
-            token::str_to_ident("zz")
+            Ident::from_str("zz")
         }
         fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
             fold::noop_fold_mac(mac, self)
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index 34280812421..5a1b0d4005e 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -27,6 +27,7 @@
 #![feature(associated_consts)]
 #![feature(const_fn)]
 #![feature(libc)]
+#![feature(optin_builtin_traits)]
 #![feature(rustc_private)]
 #![feature(staged_api)]
 #![feature(str_escape)]
@@ -83,7 +84,6 @@ pub mod diagnostics {
 pub mod diagnostic_list;
 
 pub mod util {
-    pub mod interner;
     pub mod lev_distance;
     pub mod node_count;
     pub mod parser;
@@ -118,6 +118,7 @@ pub mod ptr;
 pub mod show_span;
 pub mod std_inject;
 pub mod str;
+pub mod symbol;
 pub mod test;
 pub mod tokenstream;
 pub mod visit;
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 983c882eafc..ded676da3c6 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -11,12 +11,11 @@
 use attr;
 use ast;
 use syntax_pos::{mk_sp, Span};
-use codemap::{spanned, Spanned};
+use codemap::spanned;
 use parse::common::SeqSep;
 use parse::PResult;
 use parse::token;
 use parse::parser::{Parser, TokenType};
-use ptr::P;
 
 #[derive(PartialEq, Eq, Debug)]
 enum InnerAttributeParsePolicy<'a> {
@@ -49,13 +48,9 @@ impl<'a> Parser<'a> {
                     just_parsed_doc_comment = false;
                 }
                 token::DocComment(s) => {
-                    let attr = ::attr::mk_sugared_doc_attr(
-                        attr::mk_attr_id(),
-                        self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)),
-                        self.span.lo,
-                        self.span.hi
-                    );
-                    if attr.node.style != ast::AttrStyle::Outer {
+                    let Span { lo, hi, .. } = self.span;
+                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
+                    if attr.style != ast::AttrStyle::Outer {
                         let mut err = self.fatal("expected outer doc comment");
                         err.note("inner doc comments like this (starting with \
                                   `//!` or `/*!`) can only appear before items");
@@ -145,14 +140,12 @@ impl<'a> Parser<'a> {
             style = ast::AttrStyle::Inner;
         }
 
-        Ok(Spanned {
+        Ok(ast::Attribute {
+            id: attr::mk_attr_id(),
+            style: style,
+            value: value,
+            is_sugared_doc: false,
             span: span,
-            node: ast::Attribute_ {
-                id: attr::mk_attr_id(),
-                style: style,
-                value: value,
-                is_sugared_doc: false,
-            },
         })
     }
 
@@ -172,15 +165,14 @@ impl<'a> Parser<'a> {
                     }
 
                     let attr = self.parse_attribute(true)?;
-                    assert!(attr.node.style == ast::AttrStyle::Inner);
+                    assert!(attr.style == ast::AttrStyle::Inner);
                     attrs.push(attr);
                 }
                 token::DocComment(s) => {
                     // we need to get the position of this token before we bump.
                     let Span { lo, hi, .. } = self.span;
-                    let str = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
-                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
-                    if attr.node.style == ast::AttrStyle::Inner {
+                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
+                    if attr.style == ast::AttrStyle::Inner {
                         attrs.push(attr);
                         self.bump();
                     } else {
@@ -213,7 +205,7 @@ impl<'a> Parser<'a> {
     ///
     /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
     /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
-    pub fn parse_meta_item(&mut self) -> PResult<'a, P<ast::MetaItem>> {
+    pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
         let nt_meta = match self.token {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref e) => Some(e.clone()),
@@ -229,24 +221,15 @@ impl<'a> Parser<'a> {
 
         let lo = self.span.lo;
         let ident = self.parse_ident()?;
-        let name = self.id_to_interned_str(ident);
-        match self.token {
-            token::Eq => {
-                self.bump();
-                let lit = self.parse_unsuffixed_lit()?;
-                let hi = self.prev_span.hi;
-                Ok(P(spanned(lo, hi, ast::MetaItemKind::NameValue(name, lit))))
-            }
-            token::OpenDelim(token::Paren) => {
-                let inner_items = self.parse_meta_seq()?;
-                let hi = self.prev_span.hi;
-                Ok(P(spanned(lo, hi, ast::MetaItemKind::List(name, inner_items))))
-            }
-            _ => {
-                let hi = self.prev_span.hi;
-                Ok(P(spanned(lo, hi, ast::MetaItemKind::Word(name))))
-            }
-        }
+        let node = if self.eat(&token::Eq) {
+            ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
+        } else if self.token == token::OpenDelim(token::Paren) {
+            ast::MetaItemKind::List(self.parse_meta_seq()?)
+        } else {
+            ast::MetaItemKind::Word
+        };
+        let hi = self.prev_span.hi;
+        Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
     }
 
     /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index cf48c445c80..681dec0ab56 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -8,13 +8,14 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast;
+use ast::{self, Ident};
 use syntax_pos::{self, BytePos, CharPos, Pos, Span};
 use codemap::CodeMap;
 use errors::{FatalError, Handler, DiagnosticBuilder};
 use ext::tt::transcribe::tt_next_token;
-use parse::token::{self, keywords, str_to_ident};
+use parse::token;
 use str::char_at;
+use symbol::{Symbol, keywords};
 use rustc_unicode::property::Pattern_White_Space;
 
 use std::borrow::Cow;
@@ -350,13 +351,13 @@ impl<'a> StringReader<'a> {
     /// single-byte delimiter).
     pub fn name_from(&self, start: BytePos) -> ast::Name {
         debug!("taking an ident from {:?} to {:?}", start, self.pos);
-        self.with_str_from(start, token::intern)
+        self.with_str_from(start, Symbol::intern)
     }
 
     /// As name_from, with an explicit endpoint.
     pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
         debug!("taking an ident from {:?} to {:?}", start, end);
-        self.with_str_from_to(start, end, token::intern)
+        self.with_str_from_to(start, end, Symbol::intern)
     }
 
     /// Calls `f` with a string slice of the source text spanning from `start`
@@ -492,7 +493,7 @@ impl<'a> StringReader<'a> {
             if string == "_" {
                 None
             } else {
-                Some(token::intern(string))
+                Some(Symbol::intern(string))
             }
         })
     }
@@ -540,7 +541,7 @@ impl<'a> StringReader<'a> {
                         self.with_str_from(start_bpos, |string| {
                             // comments with only more "/"s are not doc comments
                             let tok = if is_doc_comment(string) {
-                                token::DocComment(token::intern(string))
+                                token::DocComment(Symbol::intern(string))
                             } else {
                                 token::Comment
                             };
@@ -669,7 +670,7 @@ impl<'a> StringReader<'a> {
                 } else {
                     string.into()
                 };
-                token::DocComment(token::intern(&string[..]))
+                token::DocComment(Symbol::intern(&string[..]))
             } else {
                 token::Comment
             };
@@ -758,7 +759,7 @@ impl<'a> StringReader<'a> {
             self.err_span_(start_bpos,
                            self.pos,
                            "no valid digits found for number");
-            return token::Integer(token::intern("0"));
+            return token::Integer(Symbol::intern("0"));
         }
 
         // might be a float, but don't be greedy if this is actually an
@@ -1097,7 +1098,7 @@ impl<'a> StringReader<'a> {
                     token::Underscore
                 } else {
                     // FIXME: perform NFKC normalization here. (Issue #2253)
-                    token::Ident(str_to_ident(string))
+                    token::Ident(Ident::from_str(string))
                 }
             }));
         }
@@ -1277,13 +1278,13 @@ impl<'a> StringReader<'a> {
                     // expansion purposes. See #12512 for the gory details of why
                     // this is necessary.
                     let ident = self.with_str_from(start, |lifetime_name| {
-                        str_to_ident(&format!("'{}", lifetime_name))
+                        Ident::from_str(&format!("'{}", lifetime_name))
                     });
 
                     // Conjure up a "keyword checking ident" to make sure that
                     // the lifetime name is not a keyword.
                     let keyword_checking_ident = self.with_str_from(start, |lifetime_name| {
-                        str_to_ident(lifetime_name)
+                        Ident::from_str(lifetime_name)
                     });
                     let keyword_checking_token = &token::Ident(keyword_checking_ident);
                     let last_bpos = self.pos;
@@ -1310,7 +1311,7 @@ impl<'a> StringReader<'a> {
                 let id = if valid {
                     self.name_from(start)
                 } else {
-                    token::intern("0")
+                    Symbol::intern("0")
                 };
                 self.bump(); // advance ch past token
                 let suffix = self.scan_optional_raw_name();
@@ -1352,7 +1353,7 @@ impl<'a> StringReader<'a> {
                 let id = if valid {
                     self.name_from(start_bpos + BytePos(1))
                 } else {
-                    token::intern("??")
+                    Symbol::intern("??")
                 };
                 self.bump();
                 let suffix = self.scan_optional_raw_name();
@@ -1424,7 +1425,7 @@ impl<'a> StringReader<'a> {
                 let id = if valid {
                     self.name_from_to(content_start_bpos, content_end_bpos)
                 } else {
-                    token::intern("??")
+                    Symbol::intern("??")
                 };
                 let suffix = self.scan_optional_raw_name();
                 return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
@@ -1551,7 +1552,7 @@ impl<'a> StringReader<'a> {
         let id = if valid {
             self.name_from(start)
         } else {
-            token::intern("?")
+            Symbol::intern("?")
         };
         self.bump(); // advance ch past token
         return token::Byte(id);
@@ -1584,7 +1585,7 @@ impl<'a> StringReader<'a> {
         let id = if valid {
             self.name_from(start)
         } else {
-            token::intern("??")
+            Symbol::intern("??")
         };
         self.bump();
         return token::ByteStr(id);
@@ -1700,11 +1701,12 @@ fn ident_continue(c: Option<char>) -> bool {
 mod tests {
     use super::*;
 
+    use ast::Ident;
+    use symbol::Symbol;
     use syntax_pos::{BytePos, Span, NO_EXPANSION};
     use codemap::CodeMap;
     use errors;
     use parse::token;
-    use parse::token::str_to_ident;
     use std::io;
     use std::rc::Rc;
 
@@ -1732,7 +1734,7 @@ mod tests {
                                       &sh,
                                       "/* my source file */ fn main() { println!(\"zebra\"); }\n"
                                           .to_string());
-        let id = str_to_ident("fn");
+        let id = Ident::from_str("fn");
         assert_eq!(string_reader.next_token().tok, token::Comment);
         assert_eq!(string_reader.next_token().tok, token::Whitespace);
         let tok1 = string_reader.next_token();
@@ -1751,7 +1753,7 @@ mod tests {
         // read another token:
         let tok3 = string_reader.next_token();
         let tok4 = TokenAndSpan {
-            tok: token::Ident(str_to_ident("main")),
+            tok: token::Ident(Ident::from_str("main")),
             sp: Span {
                 lo: BytePos(24),
                 hi: BytePos(28),
@@ -1773,7 +1775,7 @@ mod tests {
 
     // make the identifier by looking up the string in the interner
     fn mk_ident(id: &str) -> token::Token {
-        token::Ident(str_to_ident(id))
+        token::Ident(Ident::from_str(id))
     }
 
     #[test]
@@ -1813,7 +1815,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
-                   token::Literal(token::Char(token::intern("a")), None));
+                   token::Literal(token::Char(Symbol::intern("a")), None));
     }
 
     #[test]
@@ -1821,7 +1823,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
-                   token::Literal(token::Char(token::intern(" ")), None));
+                   token::Literal(token::Char(Symbol::intern(" ")), None));
     }
 
     #[test]
@@ -1829,7 +1831,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
-                   token::Literal(token::Char(token::intern("\\n")), None));
+                   token::Literal(token::Char(Symbol::intern("\\n")), None));
     }
 
     #[test]
@@ -1837,7 +1839,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
-                   token::Lifetime(token::str_to_ident("'abc")));
+                   token::Lifetime(Ident::from_str("'abc")));
     }
 
     #[test]
@@ -1847,7 +1849,7 @@ mod tests {
         assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
                        .next_token()
                        .tok,
-                   token::Literal(token::StrRaw(token::intern("\"#a\\b\x00c\""), 3), None));
+                   token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
     }
 
     #[test]
@@ -1857,11 +1859,11 @@ mod tests {
         macro_rules! test {
             ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
                 assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
-                           token::Literal(token::$tok_type(token::intern($tok_contents)),
-                                          Some(token::intern("suffix"))));
+                           token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
+                                          Some(Symbol::intern("suffix"))));
                 // with a whitespace separator:
                 assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
-                           token::Literal(token::$tok_type(token::intern($tok_contents)),
+                           token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
                                           None));
             }}
         }
@@ -1877,14 +1879,14 @@ mod tests {
         test!("1.0e10", Float, "1.0e10");
 
         assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
-                   token::Literal(token::Integer(token::intern("2")),
-                                  Some(token::intern("us"))));
+                   token::Literal(token::Integer(Symbol::intern("2")),
+                                  Some(Symbol::intern("us"))));
         assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
-                   token::Literal(token::StrRaw(token::intern("raw"), 3),
-                                  Some(token::intern("suffix"))));
+                   token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
+                                  Some(Symbol::intern("suffix"))));
         assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
-                   token::Literal(token::ByteStrRaw(token::intern("raw"), 3),
-                                  Some(token::intern("suffix"))));
+                   token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
+                                  Some(Symbol::intern("suffix"))));
     }
 
     #[test]
@@ -1904,7 +1906,7 @@ mod tests {
             _ => panic!("expected a comment!"),
         }
         assert_eq!(lexer.next_token().tok,
-                   token::Literal(token::Char(token::intern("a")), None));
+                   token::Literal(token::Char(Symbol::intern("a")), None));
     }
 
     #[test]
@@ -1917,6 +1919,6 @@ mod tests {
         assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7)));
         assert_eq!(lexer.next_token().tok, token::Whitespace);
         assert_eq!(lexer.next_token().tok,
-                   token::DocComment(token::intern("/// test")));
+                   token::DocComment(Symbol::intern("/// test")));
     }
 }
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 12408c7d3c9..be340a5b5aa 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -16,12 +16,13 @@ use syntax_pos::{self, Span, FileMap};
 use errors::{Handler, ColorConfig, DiagnosticBuilder};
 use feature_gate::UnstableFeatures;
 use parse::parser::Parser;
-use parse::token::InternedString;
 use ptr::P;
 use str::char_at;
+use symbol::Symbol;
 use tokenstream;
 
 use std::cell::RefCell;
+use std::collections::HashSet;
 use std::iter;
 use std::path::{Path, PathBuf};
 use std::rc::Rc;
@@ -64,7 +65,7 @@ impl ParseSess {
         ParseSess {
             span_diagnostic: handler,
             unstable_features: UnstableFeatures::from_environment(),
-            config: Vec::new(),
+            config: HashSet::new(),
             included_mod_stack: RefCell::new(vec![]),
             code_map: code_map
         }
@@ -116,7 +117,7 @@ pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a Pa
 }
 
 pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, P<ast::MetaItem>> {
+                                      -> PResult<'a, ast::MetaItem> {
     new_parser_from_source_str(sess, name, source).parse_meta_item()
 }
 
@@ -371,13 +372,18 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
         s[1..].chars().all(|c| '0' <= c && c <= '9')
 }
 
-fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
-                      sd: &Handler, sp: Span) -> ast::LitKind {
+fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, sd: &Handler, sp: Span)
+                      -> ast::LitKind {
     debug!("filtered_float_lit: {}, {:?}", data, suffix);
-    match suffix.as_ref().map(|s| &**s) {
-        Some("f32") => ast::LitKind::Float(data, ast::FloatTy::F32),
-        Some("f64") => ast::LitKind::Float(data, ast::FloatTy::F64),
-        Some(suf) => {
+    let suffix = match suffix {
+        Some(suffix) => suffix,
+        None => return ast::LitKind::FloatUnsuffixed(data),
+    };
+
+    match &*suffix.as_str() {
+        "f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
+        "f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
+        suf => {
             if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
                 // if it looks like a width, lets try to be helpful.
                 sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]))
@@ -391,16 +397,13 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
 
             ast::LitKind::FloatUnsuffixed(data)
         }
-        None => ast::LitKind::FloatUnsuffixed(data)
     }
 }
-pub fn float_lit(s: &str, suffix: Option<InternedString>,
-                 sd: &Handler, sp: Span) -> ast::LitKind {
+pub fn float_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
     debug!("float_lit: {:?}, {:?}", s, suffix);
     // FIXME #2252: bounds checking float literals is deferred until trans
     let s = s.chars().filter(|&c| c != '_').collect::<String>();
-    let data = token::intern_and_get_ident(&s);
-    filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp)
+    filtered_float_lit(Symbol::intern(&s), suffix, sd, sp)
 }
 
 /// Parse a string representing a byte literal into its final form. Similar to `char_lit`
@@ -495,11 +498,7 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
     Rc::new(res)
 }
 
-pub fn integer_lit(s: &str,
-                   suffix: Option<InternedString>,
-                   sd: &Handler,
-                   sp: Span)
-                   -> ast::LitKind {
+pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
     // s can only be ascii, byte indexing is fine
 
     let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
@@ -521,16 +520,15 @@ pub fn integer_lit(s: &str,
     }
 
     // 1f64 and 2f32 etc. are valid float literals.
-    if let Some(ref suf) = suffix {
-        if looks_like_width_suffix(&['f'], suf) {
+    if let Some(suf) = suffix {
+        if looks_like_width_suffix(&['f'], &suf.as_str()) {
             match base {
                 16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
                 8 => sd.span_err(sp, "octal float literal is not supported"),
                 2 => sd.span_err(sp, "binary float literal is not supported"),
                 _ => ()
             }
-            let ident = token::intern_and_get_ident(&s);
-            return filtered_float_lit(ident, Some(&suf), sd, sp)
+            return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp)
         }
     }
 
@@ -538,9 +536,9 @@ pub fn integer_lit(s: &str,
         s = &s[2..];
     }
 
-    if let Some(ref suf) = suffix {
-        if suf.is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
-        ty = match &**suf {
+    if let Some(suf) = suffix {
+        if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
+        ty = match &*suf.as_str() {
             "isize" => ast::LitIntType::Signed(ast::IntTy::Is),
             "i8"  => ast::LitIntType::Signed(ast::IntTy::I8),
             "i16" => ast::LitIntType::Signed(ast::IntTy::I16),
@@ -551,7 +549,7 @@ pub fn integer_lit(s: &str,
             "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
             "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
             "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
-            _ => {
+            suf => {
                 // i<digits> and u<digits> look like widths, so lets
                 // give an error message along those lines
                 if looks_like_width_suffix(&['i', 'u'], suf) {
@@ -599,12 +597,11 @@ mod tests {
     use std::rc::Rc;
     use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION};
     use codemap::Spanned;
-    use ast::{self, PatKind};
+    use ast::{self, Ident, PatKind};
     use abi::Abi;
     use attr::first_attr_value_str_by_name;
     use parse;
     use parse::parser::Parser;
-    use parse::token::{str_to_ident};
     use print::pprust::item_to_string;
     use ptr::P;
     use tokenstream::{self, TokenTree};
@@ -626,7 +623,7 @@ mod tests {
                         global: false,
                         segments: vec![
                             ast::PathSegment {
-                                identifier: str_to_ident("a"),
+                                identifier: Ident::from_str("a"),
                                 parameters: ast::PathParameters::none(),
                             }
                         ],
@@ -645,11 +642,11 @@ mod tests {
                             global: true,
                             segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("a"),
+                                    identifier: Ident::from_str("a"),
                                     parameters: ast::PathParameters::none(),
                                 },
                                 ast::PathSegment {
-                                    identifier: str_to_ident("b"),
+                                    identifier: Ident::from_str("b"),
                                     parameters: ast::PathParameters::none(),
                                 }
                             ]
@@ -678,8 +675,8 @@ mod tests {
                 Some(&TokenTree::Token(_, token::Ident(name_zip))),
                 Some(&TokenTree::Delimited(_, ref macro_delimed)),
             )
-            if name_macro_rules.name.as_str() == "macro_rules"
-            && name_zip.name.as_str() == "zip" => {
+            if name_macro_rules.name == "macro_rules"
+            && name_zip.name == "zip" => {
                 let tts = &macro_delimed.tts[..];
                 match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
                     (
@@ -696,8 +693,7 @@ mod tests {
                                 Some(&TokenTree::Token(_, token::Dollar)),
                                 Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
-                            if first_delimed.delim == token::Paren
-                            && ident.name.as_str() == "a" => {},
+                            if first_delimed.delim == token::Paren && ident.name == "a" => {},
                             _ => panic!("value 3: {:?}", **first_delimed),
                         }
                         let tts = &second_delimed.tts[..];
@@ -708,7 +704,7 @@ mod tests {
                                 Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
                             if second_delimed.delim == token::Paren
-                            && ident.name.as_str() == "a" => {},
+                            && ident.name == "a" => {},
                             _ => panic!("value 4: {:?}", **second_delimed),
                         }
                     },
@@ -724,17 +720,17 @@ mod tests {
         let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
 
         let expected = vec![
-            TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))),
-            TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
+            TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))),
+            TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
                 Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
-                        TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(8, 9), token::Colon),
-                        TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
+                        TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
                     ],
                     close_span: sp(13, 14),
                 })),
@@ -744,7 +740,7 @@ mod tests {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
-                        TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
@@ -765,7 +761,7 @@ mod tests {
                             global: false,
                             segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("d"),
+                                    identifier: Ident::from_str("d"),
                                     parameters: ast::PathParameters::none(),
                                 }
                             ],
@@ -788,7 +784,7 @@ mod tests {
                                global:false,
                                segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("b"),
+                                    identifier: Ident::from_str("b"),
                                     parameters: ast::PathParameters::none(),
                                 }
                                ],
@@ -812,7 +808,7 @@ mod tests {
                 id: ast::DUMMY_NODE_ID,
                 node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
                                     Spanned{ span:sp(0, 1),
-                                             node: str_to_ident("b")
+                                             node: Ident::from_str("b")
                     },
                                     None),
                 span: sp(0,1)}));
@@ -824,7 +820,7 @@ mod tests {
         // this test depends on the intern order of "fn" and "i32"
         assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
                   Some(
-                      P(ast::Item{ident:str_to_ident("a"),
+                      P(ast::Item{ident:Ident::from_str("a"),
                             attrs:Vec::new(),
                             id: ast::DUMMY_NODE_ID,
                             node: ast::ItemKind::Fn(P(ast::FnDecl {
@@ -835,8 +831,7 @@ mod tests {
                                         global:false,
                                         segments: vec![
                                             ast::PathSegment {
-                                                identifier:
-                                                    str_to_ident("i32"),
+                                                identifier: Ident::from_str("i32"),
                                                 parameters: ast::PathParameters::none(),
                                             }
                                         ],
@@ -849,7 +844,7 @@ mod tests {
                                             ast::BindingMode::ByValue(ast::Mutability::Immutable),
                                                 Spanned{
                                                     span: sp(6,7),
-                                                    node: str_to_ident("b")},
+                                                    node: Ident::from_str("b")},
                                                 None
                                                     ),
                                             span: sp(6,7)
@@ -884,9 +879,7 @@ mod tests {
                                                         global:false,
                                                         segments: vec![
                                                             ast::PathSegment {
-                                                                identifier:
-                                                                str_to_ident(
-                                                                    "b"),
+                                                                identifier: Ident::from_str("b"),
                                                                 parameters:
                                                                 ast::PathParameters::none(),
                                                             }
@@ -998,12 +991,12 @@ mod tests {
         let item = parse_item_from_source_str(name.clone(), source, &sess)
             .unwrap().unwrap();
         let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-        assert_eq!(&doc[..], "/// doc comment");
+        assert_eq!(doc, "/// doc comment");
 
         let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name.clone(), source, &sess)
             .unwrap().unwrap();
-        let docs = item.attrs.iter().filter(|a| &*a.name() == "doc")
+        let docs = item.attrs.iter().filter(|a| a.name() == "doc")
                     .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
         let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
         assert_eq!(&docs[..], b);
@@ -1011,7 +1004,7 @@ mod tests {
         let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap();
         let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-        assert_eq!(&doc[..], "/** doc comment\n *  with CRLF */");
+        assert_eq!(doc, "/** doc comment\n *  with CRLF */");
     }
 
     #[test]
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 2e38ca82d5d..4997e464c2b 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -48,13 +48,14 @@ use parse::classify;
 use parse::common::SeqSep;
 use parse::lexer::{Reader, TokenAndSpan};
 use parse::obsolete::ObsoleteSyntax;
-use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString};
+use parse::token::{self, MatchNt, SubstNt};
 use parse::{new_sub_parser_from_file, ParseSess};
 use util::parser::{AssocOp, Fixity};
 use print::pprust;
 use ptr::P;
 use parse::PResult;
 use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
+use symbol::{Symbol, keywords};
 use util::ThinVec;
 
 use std::collections::HashSet;
@@ -998,10 +999,6 @@ impl<'a> Parser<'a> {
         &self.sess.span_diagnostic
     }
 
-    pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
-        id.name.as_str()
-    }
-
     /// Is the current token one of the keywords that signals a bare function
     /// type?
     pub fn token_is_bare_fn_keyword(&mut self) -> bool {
@@ -1523,34 +1520,28 @@ impl<'a> Parser<'a> {
                     // float literals, so all the handling is done
                     // internally.
                     token::Integer(s) => {
-                        (false, parse::integer_lit(&s.as_str(),
-                                                   suf.as_ref().map(|s| s.as_str()),
-                                                   &self.sess.span_diagnostic,
-                                                   self.span))
+                        let diag = &self.sess.span_diagnostic;
+                        (false, parse::integer_lit(&s.as_str(), suf, diag, self.span))
                     }
                     token::Float(s) => {
-                        (false, parse::float_lit(&s.as_str(),
-                                                 suf.as_ref().map(|s| s.as_str()),
-                                                  &self.sess.span_diagnostic,
-                                                 self.span))
+                        let diag = &self.sess.span_diagnostic;
+                        (false, parse::float_lit(&s.as_str(), suf, diag, self.span))
                     }
 
                     token::Str_(s) => {
-                        (true,
-                         LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
-                                      ast::StrStyle::Cooked))
+                        let s = Symbol::intern(&parse::str_lit(&s.as_str()));
+                        (true, LitKind::Str(s, ast::StrStyle::Cooked))
                     }
                     token::StrRaw(s, n) => {
-                        (true,
-                         LitKind::Str(
-                            token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
-                            ast::StrStyle::Raw(n)))
+                        let s = Symbol::intern(&parse::raw_str_lit(&s.as_str()));
+                        (true, LitKind::Str(s, ast::StrStyle::Raw(n)))
+                    }
+                    token::ByteStr(i) => {
+                        (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str())))
+                    }
+                    token::ByteStrRaw(i, _) => {
+                        (true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))
                     }
-                    token::ByteStr(i) =>
-                        (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str()))),
-                    token::ByteStrRaw(i, _) =>
-                        (true,
-                         LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))),
                 };
 
                 if suffix_illegal {
@@ -2544,7 +2535,7 @@ impl<'a> Parser<'a> {
                     let prev_span = self.prev_span;
                     let fstr = n.as_str();
                     let mut err = self.diagnostic().struct_span_err(prev_span,
-                        &format!("unexpected token: `{}`", n.as_str()));
+                        &format!("unexpected token: `{}`", n));
                     if fstr.chars().all(|x| "0123456789.".contains(x)) {
                         let float = match fstr.parse::<f64>().ok() {
                             Some(f) => f,
@@ -2627,7 +2618,7 @@ impl<'a> Parser<'a> {
                                       })));
                 } else if self.token.is_keyword(keywords::Crate) {
                     let ident = match self.token {
-                        token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id },
+                        token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id },
                         _ => unreachable!(),
                     };
                     self.bump();
@@ -3751,9 +3742,7 @@ impl<'a> Parser<'a> {
     /// Emit an expected item after attributes error.
     fn expected_item_err(&self, attrs: &[Attribute]) {
         let message = match attrs.last() {
-            Some(&Attribute { node: ast::Attribute_ { is_sugared_doc: true, .. }, .. }) => {
-                "expected item after doc comment"
-            }
+            Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
             _ => "expected item after attributes",
         };
 
@@ -4837,7 +4826,7 @@ impl<'a> Parser<'a> {
             Visibility::Inherited => (),
             _ => {
                 let is_macro_rules: bool = match self.token {
-                    token::Ident(sid) => sid.name == intern("macro_rules"),
+                    token::Ident(sid) => sid.name == Symbol::intern("macro_rules"),
                     _ => false,
                 };
                 if is_macro_rules {
@@ -5304,17 +5293,16 @@ impl<'a> Parser<'a> {
 
     fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) -> Restrictions {
         if let Some(path) = ::attr::first_attr_value_str_by_name(attrs, "path") {
-            self.directory.push(&*path);
+            self.directory.push(&*path.as_str());
             self.restrictions - Restrictions::NO_NONINLINE_MOD
         } else {
-            let default_path = self.id_to_interned_str(id);
-            self.directory.push(&*default_path);
+            self.directory.push(&*id.name.as_str());
             self.restrictions
         }
     }
 
     pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
-        ::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d))
+        ::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str()))
     }
 
     /// Returns either a path to a module, or .
@@ -6128,26 +6116,17 @@ impl<'a> Parser<'a> {
         })
     }
 
-    pub fn parse_optional_str(&mut self)
-                              -> Option<(InternedString,
-                                         ast::StrStyle,
-                                         Option<ast::Name>)> {
+    pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
         let ret = match self.token {
-            token::Literal(token::Str_(s), suf) => {
-                let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
-                (s, ast::StrStyle::Cooked, suf)
-            }
-            token::Literal(token::StrRaw(s, n), suf) => {
-                let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
-                (s, ast::StrStyle::Raw(n), suf)
-            }
+            token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
+            token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
             _ => return None
         };
         self.bump();
         Some(ret)
     }
 
-    pub fn parse_str(&mut self) -> PResult<'a, (InternedString, StrStyle)> {
+    pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
         match self.parse_optional_str() {
             Some((s, style, suf)) => {
                 let sp = self.prev_span;
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 0198ee073d2..8ac39dd462e 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -16,13 +16,10 @@ pub use self::Token::*;
 
 use ast::{self};
 use ptr::P;
-use util::interner::Interner;
+use symbol::keywords;
 use tokenstream;
 
-use serialize::{Decodable, Decoder, Encodable, Encoder};
-use std::cell::RefCell;
 use std::fmt;
-use std::ops::Deref;
 use std::rc::Rc;
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@@ -301,7 +298,7 @@ pub enum Nonterminal {
     NtTy(P<ast::Ty>),
     NtIdent(ast::SpannedIdent),
     /// Stuff inside brackets for attributes
-    NtMeta(P<ast::MetaItem>),
+    NtMeta(ast::MetaItem),
     NtPath(ast::Path),
     NtTT(tokenstream::TokenTree),
     // These are not exposed to macros, but are used by quasiquote.
@@ -335,270 +332,3 @@ impl fmt::Debug for Nonterminal {
         }
     }
 }
-
-// In this macro, there is the requirement that the name (the number) must be monotonically
-// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
-// except starting from the next number instead of zero.
-macro_rules! declare_keywords {(
-    $( ($index: expr, $konst: ident, $string: expr) )*
-) => {
-    pub mod keywords {
-        use ast;
-        #[derive(Clone, Copy, PartialEq, Eq)]
-        pub struct Keyword {
-            ident: ast::Ident,
-        }
-        impl Keyword {
-            #[inline] pub fn ident(self) -> ast::Ident { self.ident }
-            #[inline] pub fn name(self) -> ast::Name { self.ident.name }
-        }
-        $(
-            #[allow(non_upper_case_globals)]
-            pub const $konst: Keyword = Keyword {
-                ident: ast::Ident::with_empty_ctxt(ast::Name($index))
-            };
-        )*
-    }
-
-    fn mk_fresh_ident_interner() -> IdentInterner {
-        Interner::prefill(&[$($string,)*])
-    }
-}}
-
-// NB: leaving holes in the ident table is bad! a different ident will get
-// interned with the id from the hole, but it will be between the min and max
-// of the reserved words, and thus tagged as "reserved".
-// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
-// this should be rarely necessary though if the keywords are kept in alphabetic order.
-declare_keywords! {
-    // Invalid identifier
-    (0,  Invalid,        "")
-
-    // Strict keywords used in the language.
-    (1,  As,             "as")
-    (2,  Box,            "box")
-    (3,  Break,          "break")
-    (4,  Const,          "const")
-    (5,  Continue,       "continue")
-    (6,  Crate,          "crate")
-    (7,  Else,           "else")
-    (8,  Enum,           "enum")
-    (9,  Extern,         "extern")
-    (10, False,          "false")
-    (11, Fn,             "fn")
-    (12, For,            "for")
-    (13, If,             "if")
-    (14, Impl,           "impl")
-    (15, In,             "in")
-    (16, Let,            "let")
-    (17, Loop,           "loop")
-    (18, Match,          "match")
-    (19, Mod,            "mod")
-    (20, Move,           "move")
-    (21, Mut,            "mut")
-    (22, Pub,            "pub")
-    (23, Ref,            "ref")
-    (24, Return,         "return")
-    (25, SelfValue,      "self")
-    (26, SelfType,       "Self")
-    (27, Static,         "static")
-    (28, Struct,         "struct")
-    (29, Super,          "super")
-    (30, Trait,          "trait")
-    (31, True,           "true")
-    (32, Type,           "type")
-    (33, Unsafe,         "unsafe")
-    (34, Use,            "use")
-    (35, Where,          "where")
-    (36, While,          "while")
-
-    // Keywords reserved for future use.
-    (37, Abstract,       "abstract")
-    (38, Alignof,        "alignof")
-    (39, Become,         "become")
-    (40, Do,             "do")
-    (41, Final,          "final")
-    (42, Macro,          "macro")
-    (43, Offsetof,       "offsetof")
-    (44, Override,       "override")
-    (45, Priv,           "priv")
-    (46, Proc,           "proc")
-    (47, Pure,           "pure")
-    (48, Sizeof,         "sizeof")
-    (49, Typeof,         "typeof")
-    (50, Unsized,        "unsized")
-    (51, Virtual,        "virtual")
-    (52, Yield,          "yield")
-
-    // Weak keywords, have special meaning only in specific contexts.
-    (53, Default,        "default")
-    (54, StaticLifetime, "'static")
-    (55, Union,          "union")
-}
-
-// looks like we can get rid of this completely...
-pub type IdentInterner = Interner;
-
-// if an interner exists in TLS, return it. Otherwise, prepare a
-// fresh one.
-// FIXME(eddyb) #8726 This should probably use a thread-local reference.
-pub fn with_ident_interner<T, F: FnOnce(&mut IdentInterner) -> T>(f: F) -> T {
-    thread_local!(static KEY: RefCell<IdentInterner> = {
-        RefCell::new(mk_fresh_ident_interner())
-    });
-    KEY.with(|interner| f(&mut *interner.borrow_mut()))
-}
-
-/// Reset the ident interner to its initial state.
-pub fn reset_ident_interner() {
-    with_ident_interner(|interner| *interner = mk_fresh_ident_interner());
-}
-
-pub fn clear_ident_interner() {
-    with_ident_interner(|interner| *interner = IdentInterner::new());
-}
-
-/// Represents a string stored in the thread-local interner. Because the
-/// interner lives for the life of the thread, this can be safely treated as an
-/// immortal string, as long as it never crosses between threads.
-///
-/// FIXME(pcwalton): You must be careful about what you do in the destructors
-/// of objects stored in TLS, because they may run after the interner is
-/// destroyed. In particular, they must not access string contents. This can
-/// be fixed in the future by just leaking all strings until thread death
-/// somehow.
-#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
-pub struct InternedString {
-    string: Rc<str>,
-}
-
-impl InternedString {
-    #[inline]
-    pub fn new(string: &'static str) -> InternedString {
-        InternedString {
-            string: Rc::__from_str(string),
-        }
-    }
-
-    #[inline]
-    pub fn new_from_name(name: ast::Name) -> InternedString {
-        with_ident_interner(|interner| InternedString { string: interner.get(name) })
-    }
-}
-
-impl Deref for InternedString {
-    type Target = str;
-
-    fn deref(&self) -> &str { &self.string }
-}
-
-impl fmt::Debug for InternedString {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Debug::fmt(&self.string, f)
-    }
-}
-
-impl fmt::Display for InternedString {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Display::fmt(&self.string, f)
-    }
-}
-
-impl<'a> PartialEq<&'a str> for InternedString {
-    #[inline(always)]
-    fn eq(&self, other: & &'a str) -> bool {
-        PartialEq::eq(&self.string[..], *other)
-    }
-    #[inline(always)]
-    fn ne(&self, other: & &'a str) -> bool {
-        PartialEq::ne(&self.string[..], *other)
-    }
-}
-
-impl<'a> PartialEq<InternedString> for &'a str {
-    #[inline(always)]
-    fn eq(&self, other: &InternedString) -> bool {
-        PartialEq::eq(*self, &other.string[..])
-    }
-    #[inline(always)]
-    fn ne(&self, other: &InternedString) -> bool {
-        PartialEq::ne(*self, &other.string[..])
-    }
-}
-
-impl PartialEq<str> for InternedString {
-    #[inline(always)]
-    fn eq(&self, other: &str) -> bool {
-        PartialEq::eq(&self.string[..], other)
-    }
-    #[inline(always)]
-    fn ne(&self, other: &str) -> bool {
-        PartialEq::ne(&self.string[..], other)
-    }
-}
-
-impl PartialEq<InternedString> for str {
-    #[inline(always)]
-    fn eq(&self, other: &InternedString) -> bool {
-        PartialEq::eq(self, &other.string[..])
-    }
-    #[inline(always)]
-    fn ne(&self, other: &InternedString) -> bool {
-        PartialEq::ne(self, &other.string[..])
-    }
-}
-
-impl Decodable for InternedString {
-    fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
-        Ok(intern(&d.read_str()?).as_str())
-    }
-}
-
-impl Encodable for InternedString {
-    fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
-        s.emit_str(&self.string)
-    }
-}
-
-/// Interns and returns the string contents of an identifier, using the
-/// thread-local interner.
-#[inline]
-pub fn intern_and_get_ident(s: &str) -> InternedString {
-    intern(s).as_str()
-}
-
-/// Maps a string to its interned representation.
-#[inline]
-pub fn intern(s: &str) -> ast::Name {
-    with_ident_interner(|interner| interner.intern(s))
-}
-
-/// gensym's a new usize, using the current interner.
-#[inline]
-pub fn gensym(s: &str) -> ast::Name {
-    with_ident_interner(|interner| interner.gensym(s))
-}
-
-/// Maps a string to an identifier with an empty syntax context.
-#[inline]
-pub fn str_to_ident(s: &str) -> ast::Ident {
-    ast::Ident::with_empty_ctxt(intern(s))
-}
-
-/// Maps a string to a gensym'ed identifier.
-#[inline]
-pub fn gensym_ident(s: &str) -> ast::Ident {
-    ast::Ident::with_empty_ctxt(gensym(s))
-}
-
-// create a fresh name that maps to the same string as the old one.
-// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
-// that is, that the new name and the old one are connected to ptr_eq strings.
-pub fn fresh_name(src: ast::Ident) -> ast::Name {
-    with_ident_interner(|interner| interner.gensym_copy(src.name))
-    // following: debug version. Could work in final except that it's incompatible with
-    // good error messages and uses of struct names in ambiguous could-be-binding
-    // locations. Also definitely destroys the guarantee given above about ptr_eq.
-    /*let num = rand::thread_rng().gen_uint_range(0,0xffff);
-    gensym(format!("{}_{}",ident_to_string(src),num))*/
-}
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 203c19285ac..3820f5ea90c 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -19,7 +19,7 @@ use attr;
 use codemap::{self, CodeMap};
 use syntax_pos::{self, BytePos};
 use errors;
-use parse::token::{self, keywords, BinOpToken, Token, InternedString};
+use parse::token::{self, BinOpToken, Token};
 use parse::lexer::comments;
 use parse;
 use print::pp::{self, break_offset, word, space, zerobreak, hardbreak};
@@ -27,6 +27,7 @@ use print::pp::{Breaks, eof};
 use print::pp::Breaks::{Consistent, Inconsistent};
 use ptr::P;
 use std_inject;
+use symbol::{Symbol, keywords};
 use tokenstream::{self, TokenTree};
 
 use std::ascii;
@@ -119,14 +120,13 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
         // of the feature gate, so we fake them up here.
 
         // #![feature(prelude_import)]
-        let prelude_import_meta = attr::mk_list_word_item(InternedString::new("prelude_import"));
-        let list = attr::mk_list_item(InternedString::new("feature"),
-                                      vec![prelude_import_meta]);
+        let prelude_import_meta = attr::mk_list_word_item(Symbol::intern("prelude_import"));
+        let list = attr::mk_list_item(Symbol::intern("feature"), vec![prelude_import_meta]);
         let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), list);
         try!(s.print_attribute(&fake_attr));
 
         // #![no_std]
-        let no_std_meta = attr::mk_word_item(InternedString::new("no_std"));
+        let no_std_meta = attr::mk_word_item(Symbol::intern("no_std"));
         let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), no_std_meta);
         try!(s.print_attribute(&fake_attr));
     }
@@ -630,7 +630,7 @@ pub trait PrintState<'a> {
             _ => ()
         }
         match lit.node {
-            ast::LitKind::Str(ref st, style) => self.print_string(&st, style),
+            ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
             ast::LitKind::Byte(byte) => {
                 let mut res = String::from("b'");
                 res.extend(ascii::escape_default(byte).map(|c| c as char));
@@ -664,7 +664,7 @@ pub trait PrintState<'a> {
                          &f,
                          t.ty_to_string()))
             }
-            ast::LitKind::FloatUnsuffixed(ref f) => word(self.writer(), &f[..]),
+            ast::LitKind::FloatUnsuffixed(ref f) => word(self.writer(), &f.as_str()),
             ast::LitKind::Bool(val) => {
                 if val { word(self.writer(), "true") } else { word(self.writer(), "false") }
             }
@@ -727,7 +727,7 @@ pub trait PrintState<'a> {
                               trailing_hardbreak: bool) -> io::Result<()> {
         let mut count = 0;
         for attr in attrs {
-            if attr.node.style == kind {
+            if attr.style == kind {
                 try!(self.print_attribute_inline(attr, is_inline));
                 if is_inline {
                     try!(self.nbsp());
@@ -751,11 +751,11 @@ pub trait PrintState<'a> {
             try!(self.hardbreak_if_not_bol());
         }
         try!(self.maybe_print_comment(attr.span.lo));
-        if attr.node.is_sugared_doc {
-            try!(word(self.writer(), &attr.value_str().unwrap()));
+        if attr.is_sugared_doc {
+            try!(word(self.writer(), &attr.value_str().unwrap().as_str()));
             hardbreak(self.writer())
         } else {
-            match attr.node.style {
+            match attr.style {
                 ast::AttrStyle::Inner => try!(word(self.writer(), "#![")),
                 ast::AttrStyle::Outer => try!(word(self.writer(), "#[")),
             }
@@ -778,16 +778,16 @@ pub trait PrintState<'a> {
     fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> {
         try!(self.ibox(INDENT_UNIT));
         match item.node {
-            ast::MetaItemKind::Word(ref name) => {
-                try!(word(self.writer(), &name));
+            ast::MetaItemKind::Word => {
+                try!(word(self.writer(), &item.name.as_str()));
             }
-            ast::MetaItemKind::NameValue(ref name, ref value) => {
-                try!(self.word_space(&name[..]));
+            ast::MetaItemKind::NameValue(ref value) => {
+                try!(self.word_space(&item.name.as_str()));
                 try!(self.word_space("="));
                 try!(self.print_literal(value));
             }
-            ast::MetaItemKind::List(ref name, ref items) => {
-                try!(word(self.writer(), &name));
+            ast::MetaItemKind::List(ref items) => {
+                try!(word(self.writer(), &item.name.as_str()));
                 try!(self.popen());
                 try!(self.commasep(Consistent,
                               &items[..],
@@ -2220,19 +2220,18 @@ impl<'a> State<'a> {
             ast::ExprKind::InlineAsm(ref a) => {
                 try!(word(&mut self.s, "asm!"));
                 try!(self.popen());
-                try!(self.print_string(&a.asm, a.asm_str_style));
+                try!(self.print_string(&a.asm.as_str(), a.asm_str_style));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, &a.outputs,
-                                   |s, out| {
-                    let mut ch = out.constraint.chars();
+                try!(self.commasep(Inconsistent, &a.outputs, |s, out| {
+                    let constraint = out.constraint.as_str();
+                    let mut ch = constraint.chars();
                     match ch.next() {
                         Some('=') if out.is_rw => {
                             try!(s.print_string(&format!("+{}", ch.as_str()),
                                            ast::StrStyle::Cooked))
                         }
-                        _ => try!(s.print_string(&out.constraint,
-                                            ast::StrStyle::Cooked))
+                        _ => try!(s.print_string(&constraint, ast::StrStyle::Cooked))
                     }
                     try!(s.popen());
                     try!(s.print_expr(&out.expr));
@@ -2242,9 +2241,8 @@ impl<'a> State<'a> {
                 try!(space(&mut self.s));
                 try!(self.word_space(":"));
 
-                try!(self.commasep(Inconsistent, &a.inputs,
-                                   |s, &(ref co, ref o)| {
-                    try!(s.print_string(&co, ast::StrStyle::Cooked));
+                try!(self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
+                    try!(s.print_string(&co.as_str(), ast::StrStyle::Cooked));
                     try!(s.popen());
                     try!(s.print_expr(&o));
                     try!(s.pclose());
@@ -2255,7 +2253,7 @@ impl<'a> State<'a> {
 
                 try!(self.commasep(Inconsistent, &a.clobbers,
                                    |s, co| {
-                    try!(s.print_string(&co, ast::StrStyle::Cooked));
+                    try!(s.print_string(&co.as_str(), ast::StrStyle::Cooked));
                     Ok(())
                 }));
 
@@ -3082,12 +3080,11 @@ mod tests {
 
     use ast;
     use codemap;
-    use parse::token;
     use syntax_pos;
 
     #[test]
     fn test_fun_to_string() {
-        let abba_ident = token::str_to_ident("abba");
+        let abba_ident = ast::Ident::from_str("abba");
 
         let decl = ast::FnDecl {
             inputs: Vec::new(),
@@ -3103,7 +3100,7 @@ mod tests {
 
     #[test]
     fn test_variant_to_string() {
-        let ident = token::str_to_ident("principal_skinner");
+        let ident = ast::Ident::from_str("principal_skinner");
 
         let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
             name: ident,
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index 1b63a2b7076..6a291ad9c40 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -10,10 +10,10 @@
 
 use ast;
 use attr;
+use symbol::{Symbol, keywords};
 use syntax_pos::{DUMMY_SP, Span};
 use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute};
-use parse::token::{intern, InternedString, keywords};
-use parse::{token, ParseSess};
+use parse::ParseSess;
 use ptr::P;
 
 /// Craft a span that will be ignored by the stability lint's
@@ -23,7 +23,7 @@ fn ignored_span(sess: &ParseSess, sp: Span) -> Span {
     let info = ExpnInfo {
         call_site: DUMMY_SP,
         callee: NameAndSpan {
-            format: MacroAttribute(intern("std_inject")),
+            format: MacroAttribute(Symbol::intern("std_inject")),
             span: None,
             allow_internal_unstable: true,
         }
@@ -53,14 +53,14 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
         None => return krate,
     };
 
-    let crate_name = token::intern(&alt_std_name.unwrap_or(name.to_string()));
+    let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
 
     krate.module.items.insert(0, P(ast::Item {
         attrs: vec![attr::mk_attr_outer(attr::mk_attr_id(),
-                                        attr::mk_word_item(InternedString::new("macro_use")))],
+                                        attr::mk_word_item(Symbol::intern("macro_use")))],
         vis: ast::Visibility::Inherited,
         node: ast::ItemKind::ExternCrate(Some(crate_name)),
-        ident: token::str_to_ident(name),
+        ident: ast::Ident::from_str(name),
         id: ast::DUMMY_NODE_ID,
         span: DUMMY_SP,
     }));
@@ -68,22 +68,21 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
     let span = ignored_span(sess, DUMMY_SP);
     krate.module.items.insert(0, P(ast::Item {
         attrs: vec![ast::Attribute {
-            node: ast::Attribute_ {
-                style: ast::AttrStyle::Outer,
-                value: P(ast::MetaItem {
-                    node: ast::MetaItemKind::Word(token::intern_and_get_ident("prelude_import")),
-                    span: span,
-                }),
-                id: attr::mk_attr_id(),
-                is_sugared_doc: false,
+            style: ast::AttrStyle::Outer,
+            value: ast::MetaItem {
+                name: Symbol::intern("prelude_import"),
+                node: ast::MetaItemKind::Word,
+                span: span,
             },
+            id: attr::mk_attr_id(),
+            is_sugared_doc: false,
             span: span,
         }],
         vis: ast::Visibility::Inherited,
         node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path {
             global: false,
             segments: vec![name, "prelude", "v1"].into_iter().map(|name| ast::PathSegment {
-                identifier: token::str_to_ident(name),
+                identifier: ast::Ident::from_str(name),
                 parameters: ast::PathParameters::none(),
             }).collect(),
             span: span,
diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs
new file mode 100644
index 00000000000..fe9a176179c
--- /dev/null
+++ b/src/libsyntax/symbol.rs
@@ -0,0 +1,303 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! An "interner" is a data structure that associates values with usize tags and
+//! allows bidirectional lookup; i.e. given a value, one can easily find the
+//! type, and vice versa.
+
+use serialize::{Decodable, Decoder, Encodable, Encoder};
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::fmt;
+
+/// A symbol is an interned or gensymed string.
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Symbol(u32);
+
+// The interner in thread-local, so `Symbol` shouldn't move between threads.
+impl !Send for Symbol { }
+
+impl Symbol {
+    /// Maps a string to its interned representation.
+    pub fn intern(string: &str) -> Self {
+        with_interner(|interner| interner.intern(string))
+    }
+
+    /// gensym's a new usize, using the current interner.
+    pub fn gensym(string: &str) -> Self {
+        with_interner(|interner| interner.gensym(string))
+    }
+
+    pub fn as_str(self) -> InternedString {
+        with_interner(|interner| unsafe {
+            InternedString {
+                string: ::std::mem::transmute::<&str, &str>(interner.get(self))
+            }
+        })
+    }
+
+    pub fn as_u32(self) -> u32 {
+        self.0
+    }
+}
+
+impl fmt::Debug for Symbol {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "{}({})", self, self.0)
+    }
+}
+
+impl fmt::Display for Symbol {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Display::fmt(&self.as_str(), f)
+    }
+}
+
+impl Encodable for Symbol {
+    fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+        s.emit_str(&self.as_str())
+    }
+}
+
+impl Decodable for Symbol {
+    fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> {
+        Ok(Symbol::intern(&d.read_str()?))
+    }
+}
+
+impl<'a> PartialEq<&'a str> for Symbol {
+    fn eq(&self, other: &&str) -> bool {
+        *self.as_str() == **other
+    }
+}
+
+#[derive(Default)]
+pub struct Interner {
+    names: HashMap<Box<str>, Symbol>,
+    strings: Vec<Box<str>>,
+}
+
+impl Interner {
+    pub fn new() -> Self {
+        Interner::default()
+    }
+
+    fn prefill(init: &[&str]) -> Self {
+        let mut this = Interner::new();
+        for &string in init {
+            this.intern(string);
+        }
+        this
+    }
+
+    pub fn intern(&mut self, string: &str) -> Symbol {
+        if let Some(&name) = self.names.get(string) {
+            return name;
+        }
+
+        let name = Symbol(self.strings.len() as u32);
+        let string = string.to_string().into_boxed_str();
+        self.strings.push(string.clone());
+        self.names.insert(string, name);
+        name
+    }
+
+    fn gensym(&mut self, string: &str) -> Symbol {
+        let gensym = Symbol(self.strings.len() as u32);
+        // leave out of `names` to avoid colliding
+        self.strings.push(string.to_string().into_boxed_str());
+        gensym
+    }
+
+    pub fn get(&self, name: Symbol) -> &str {
+        &self.strings[name.0 as usize]
+    }
+}
+
+// In this macro, there is the requirement that the name (the number) must be monotonically
+// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
+// except starting from the next number instead of zero.
+macro_rules! declare_keywords {(
+    $( ($index: expr, $konst: ident, $string: expr) )*
+) => {
+    pub mod keywords {
+        use ast;
+        #[derive(Clone, Copy, PartialEq, Eq)]
+        pub struct Keyword {
+            ident: ast::Ident,
+        }
+        impl Keyword {
+            #[inline] pub fn ident(self) -> ast::Ident { self.ident }
+            #[inline] pub fn name(self) -> ast::Name { self.ident.name }
+        }
+        $(
+            #[allow(non_upper_case_globals)]
+            pub const $konst: Keyword = Keyword {
+                ident: ast::Ident::with_empty_ctxt(ast::Name($index))
+            };
+        )*
+    }
+
+    impl Interner {
+        fn fresh() -> Self {
+            Interner::prefill(&[$($string,)*])
+        }
+    }
+}}
+
+// NB: leaving holes in the ident table is bad! a different ident will get
+// interned with the id from the hole, but it will be between the min and max
+// of the reserved words, and thus tagged as "reserved".
+// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
+// this should be rarely necessary though if the keywords are kept in alphabetic order.
+declare_keywords! {
+    // Invalid identifier
+    (0,  Invalid,        "")
+
+    // Strict keywords used in the language.
+    (1,  As,             "as")
+    (2,  Box,            "box")
+    (3,  Break,          "break")
+    (4,  Const,          "const")
+    (5,  Continue,       "continue")
+    (6,  Crate,          "crate")
+    (7,  Else,           "else")
+    (8,  Enum,           "enum")
+    (9,  Extern,         "extern")
+    (10, False,          "false")
+    (11, Fn,             "fn")
+    (12, For,            "for")
+    (13, If,             "if")
+    (14, Impl,           "impl")
+    (15, In,             "in")
+    (16, Let,            "let")
+    (17, Loop,           "loop")
+    (18, Match,          "match")
+    (19, Mod,            "mod")
+    (20, Move,           "move")
+    (21, Mut,            "mut")
+    (22, Pub,            "pub")
+    (23, Ref,            "ref")
+    (24, Return,         "return")
+    (25, SelfValue,      "self")
+    (26, SelfType,       "Self")
+    (27, Static,         "static")
+    (28, Struct,         "struct")
+    (29, Super,          "super")
+    (30, Trait,          "trait")
+    (31, True,           "true")
+    (32, Type,           "type")
+    (33, Unsafe,         "unsafe")
+    (34, Use,            "use")
+    (35, Where,          "where")
+    (36, While,          "while")
+
+    // Keywords reserved for future use.
+    (37, Abstract,       "abstract")
+    (38, Alignof,        "alignof")
+    (39, Become,         "become")
+    (40, Do,             "do")
+    (41, Final,          "final")
+    (42, Macro,          "macro")
+    (43, Offsetof,       "offsetof")
+    (44, Override,       "override")
+    (45, Priv,           "priv")
+    (46, Proc,           "proc")
+    (47, Pure,           "pure")
+    (48, Sizeof,         "sizeof")
+    (49, Typeof,         "typeof")
+    (50, Unsized,        "unsized")
+    (51, Virtual,        "virtual")
+    (52, Yield,          "yield")
+
+    // Weak keywords, have special meaning only in specific contexts.
+    (53, Default,        "default")
+    (54, StaticLifetime, "'static")
+    (55, Union,          "union")
+}
+
+// If an interner exists in TLS, return it. Otherwise, prepare a fresh one.
+fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
+    thread_local!(static INTERNER: RefCell<Interner> = {
+        RefCell::new(Interner::fresh())
+    });
+    INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
+}
+
+/// Represents a string stored in the thread-local interner. Because the
+/// interner lives for the life of the thread, this can be safely treated as an
+/// immortal string, as long as it never crosses between threads.
+///
+/// FIXME(pcwalton): You must be careful about what you do in the destructors
+/// of objects stored in TLS, because they may run after the interner is
+/// destroyed. In particular, they must not access string contents. This can
+/// be fixed in the future by just leaking all strings until thread death
+/// somehow.
+#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
+pub struct InternedString {
+    string: &'static str,
+}
+
+impl !Send for InternedString { }
+
+impl ::std::ops::Deref for InternedString {
+    type Target = str;
+    fn deref(&self) -> &str { self.string }
+}
+
+impl fmt::Debug for InternedString {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Debug::fmt(self.string, f)
+    }
+}
+
+impl fmt::Display for InternedString {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt::Display::fmt(self.string, f)
+    }
+}
+
+impl Decodable for InternedString {
+    fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
+        Ok(Symbol::intern(&d.read_str()?).as_str())
+    }
+}
+
+impl Encodable for InternedString {
+    fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+        s.emit_str(self.string)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use ast::Name;
+
+    #[test]
+    fn interner_tests() {
+        let mut i: Interner = Interner::new();
+        // first one is zero:
+        assert_eq!(i.intern("dog"), Name(0));
+        // re-use gets the same entry:
+        assert_eq!(i.intern ("dog"), Name(0));
+        // different string gets a different #:
+        assert_eq!(i.intern("cat"), Name(1));
+        assert_eq!(i.intern("cat"), Name(1));
+        // dog is still at zero
+        assert_eq!(i.intern("dog"), Name(0));
+        // gensym gets 3
+        assert_eq!(i.gensym("zebra"), Name(2));
+        // gensym of same string gets new number :
+        assert_eq!(i.gensym("zebra"), Name(3));
+        // gensym of *existing* string gets new number:
+        assert_eq!(i.gensym("dog"), Name(4));
+    }
+}
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 59a7e75d125..4de3baf7d14 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -34,21 +34,21 @@ use ext::expand::ExpansionConfig;
 use fold::Folder;
 use util::move_map::MoveMap;
 use fold;
-use parse::token::{intern, keywords, InternedString};
 use parse::{token, ParseSess};
 use print::pprust;
-use ast;
+use ast::{self, Ident};
 use ptr::P;
+use symbol::{self, Symbol, keywords};
 use util::small_vector::SmallVector;
 
 enum ShouldPanic {
     No,
-    Yes(Option<InternedString>),
+    Yes(Option<Symbol>),
 }
 
 struct Test {
     span: Span,
-    path: Vec<ast::Ident> ,
+    path: Vec<Ident> ,
     bench: bool,
     ignore: bool,
     should_panic: ShouldPanic
@@ -57,14 +57,14 @@ struct Test {
 struct TestCtxt<'a> {
     sess: &'a ParseSess,
     span_diagnostic: &'a errors::Handler,
-    path: Vec<ast::Ident>,
+    path: Vec<Ident>,
     ext_cx: ExtCtxt<'a>,
     testfns: Vec<Test>,
-    reexport_test_harness_main: Option<InternedString>,
+    reexport_test_harness_main: Option<Symbol>,
     is_test_crate: bool,
 
     // top-level re-export submodule, filled out after folding is finished
-    toplevel_reexport: Option<ast::Ident>,
+    toplevel_reexport: Option<Ident>,
 }
 
 // Traverse the crate, collecting all the test functions, eliding any
@@ -91,10 +91,10 @@ pub fn modify_for_testing(sess: &ParseSess,
 
 struct TestHarnessGenerator<'a> {
     cx: TestCtxt<'a>,
-    tests: Vec<ast::Ident>,
+    tests: Vec<Ident>,
 
     // submodule name, gensym'd identifier for re-exports
-    tested_submods: Vec<(ast::Ident, ast::Ident)>,
+    tested_submods: Vec<(Ident, Ident)>,
 }
 
 impl<'a> fold::Folder for TestHarnessGenerator<'a> {
@@ -191,8 +191,8 @@ impl fold::Folder for EntryPointCleaner {
             EntryPointType::MainAttr |
             EntryPointType::Start =>
                 folded.map(|ast::Item {id, ident, attrs, node, vis, span}| {
-                    let allow_str = InternedString::new("allow");
-                    let dead_code_str = InternedString::new("dead_code");
+                    let allow_str = Symbol::intern("allow");
+                    let dead_code_str = Symbol::intern("dead_code");
                     let word_vec = vec![attr::mk_list_word_item(dead_code_str)];
                     let allow_dead_code_item = attr::mk_list_item(allow_str, word_vec);
                     let allow_dead_code = attr::mk_attr_outer(attr::mk_attr_id(),
@@ -222,15 +222,18 @@ impl fold::Folder for EntryPointCleaner {
     fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac }
 }
 
-fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident>,
-                   tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P<ast::Item>, ast::Ident) {
-    let super_ = token::str_to_ident("super");
+fn mk_reexport_mod(cx: &mut TestCtxt,
+                   parent: ast::NodeId,
+                   tests: Vec<Ident>,
+                   tested_submods: Vec<(Ident, Ident)>)
+                   -> (P<ast::Item>, Ident) {
+    let super_ = Ident::from_str("super");
 
     // Generate imports with `#[allow(private_in_public)]` to work around issue #36768.
     let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list(
         DUMMY_SP,
-        InternedString::new("allow"),
-        vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, InternedString::new("private_in_public"))],
+        Symbol::intern("allow"),
+        vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, Symbol::intern("private_in_public"))],
     ));
     let items = tests.into_iter().map(|r| {
         cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public,
@@ -247,7 +250,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident
         items: items,
     };
 
-    let sym = token::gensym_ident("__test_reexports");
+    let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports"));
     let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
     cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
     let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
@@ -264,7 +267,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident
 
 fn generate_test_harness(sess: &ParseSess,
                          resolver: &mut Resolver,
-                         reexport_test_harness_main: Option<InternedString>,
+                         reexport_test_harness_main: Option<Symbol>,
                          krate: ast::Crate,
                          sd: &errors::Handler) -> ast::Crate {
     // Remove the entry points
@@ -286,7 +289,7 @@ fn generate_test_harness(sess: &ParseSess,
     cx.ext_cx.bt_push(ExpnInfo {
         call_site: DUMMY_SP,
         callee: NameAndSpan {
-            format: MacroAttribute(intern("test")),
+            format: MacroAttribute(Symbol::intern("test")),
             span: None,
             allow_internal_unstable: false,
         }
@@ -306,7 +309,7 @@ fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
     let info = ExpnInfo {
         call_site: DUMMY_SP,
         callee: NameAndSpan {
-            format: MacroAttribute(intern("test")),
+            format: MacroAttribute(Symbol::intern("test")),
             span: None,
             allow_internal_unstable: true,
         }
@@ -456,7 +459,7 @@ mod __test {
 */
 
 fn mk_std(cx: &TestCtxt) -> P<ast::Item> {
-    let id_test = token::str_to_ident("test");
+    let id_test = Ident::from_str("test");
     let (vi, vis, ident) = if cx.is_test_crate {
         (ast::ItemKind::Use(
             P(nospan(ast::ViewPathSimple(id_test,
@@ -487,16 +490,17 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
     let ecx = &cx.ext_cx;
 
     // test::test_main_static
-    let test_main_path = ecx.path(sp, vec![token::str_to_ident("test"),
-                                           token::str_to_ident("test_main_static")]);
+    let test_main_path =
+        ecx.path(sp, vec![Ident::from_str("test"), Ident::from_str("test_main_static")]);
+
     // test::test_main_static(...)
     let test_main_path_expr = ecx.expr_path(test_main_path);
-    let tests_ident_expr = ecx.expr_ident(sp, token::str_to_ident("TESTS"));
+    let tests_ident_expr = ecx.expr_ident(sp, Ident::from_str("TESTS"));
     let call_test_main = ecx.expr_call(sp, test_main_path_expr,
                                        vec![tests_ident_expr]);
     let call_test_main = ecx.stmt_expr(call_test_main);
     // #![main]
-    let main_meta = ecx.meta_word(sp, token::intern_and_get_ident("main"));
+    let main_meta = ecx.meta_word(sp, Symbol::intern("main"));
     let main_attr = ecx.attribute(sp, main_meta);
     // pub fn main() { ... }
     let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));
@@ -506,7 +510,7 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
                            dummy_spanned(ast::Constness::NotConst),
                            ::abi::Abi::Rust, ast::Generics::default(), main_body);
     let main = P(ast::Item {
-        ident: token::str_to_ident("main"),
+        ident: Ident::from_str("main"),
         attrs: vec![main_attr],
         id: ast::DUMMY_NODE_ID,
         node: main,
@@ -533,7 +537,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
         items: vec![import, mainfn, tests],
     };
     let item_ = ast::ItemKind::Mod(testmod);
-    let mod_ident = token::gensym_ident("__test");
+    let mod_ident = Ident::with_empty_ctxt(Symbol::gensym("__test"));
 
     let mut expander = cx.ext_cx.monotonic_expander();
     let item = expander.fold_item(P(ast::Item {
@@ -544,13 +548,13 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
         vis: ast::Visibility::Public,
         span: DUMMY_SP,
     })).pop().unwrap();
-    let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
+    let reexport = cx.reexport_test_harness_main.map(|s| {
         // building `use <ident> = __test::main`
-        let reexport_ident = token::str_to_ident(&s);
+        let reexport_ident = Ident::with_empty_ctxt(s);
 
         let use_path =
             nospan(ast::ViewPathSimple(reexport_ident,
-                                       path_node(vec![mod_ident, token::str_to_ident("main")])));
+                                       path_node(vec![mod_ident, Ident::from_str("main")])));
 
         expander.fold_item(P(ast::Item {
             id: ast::DUMMY_NODE_ID,
@@ -571,7 +575,7 @@ fn nospan<T>(t: T) -> codemap::Spanned<T> {
     codemap::Spanned { node: t, span: DUMMY_SP }
 }
 
-fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
+fn path_node(ids: Vec<Ident>) -> ast::Path {
     ast::Path {
         span: DUMMY_SP,
         global: false,
@@ -582,7 +586,7 @@ fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
     }
 }
 
-fn path_name_i(idents: &[ast::Ident]) -> String {
+fn path_name_i(idents: &[Ident]) -> String {
     // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
     idents.iter().map(|i| i.to_string()).collect::<Vec<String>>().join("::")
 }
@@ -614,7 +618,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
 
 fn is_test_crate(krate: &ast::Crate) -> bool {
     match attr::find_crate_name(&krate.attrs) {
-        Some(ref s) if "test" == &s[..] => true,
+        Some(s) if "test" == &*s.as_str() => true,
         _ => false
     }
 }
@@ -660,7 +664,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
 
     // path to the #[test] function: "foo::bar::baz"
     let path_string = path_name_i(&path[..]);
-    let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..]));
+    let name_expr = ecx.expr_str(span, Symbol::intern(&path_string));
 
     // self::test::StaticTestName($name_expr)
     let name_expr = ecx.expr_call(span,
@@ -673,10 +677,10 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
     };
     let fail_expr = match test.should_panic {
         ShouldPanic::No => ecx.expr_path(should_panic_path("No")),
-        ShouldPanic::Yes(ref msg) => {
-            match *msg {
-                Some(ref msg) => {
-                    let msg = ecx.expr_str(span, msg.clone());
+        ShouldPanic::Yes(msg) => {
+            match msg {
+                Some(msg) => {
+                    let msg = ecx.expr_str(span, msg);
                     let path = should_panic_path("YesWithMessage");
                     ecx.expr_call(span, ecx.expr_path(path), vec![msg])
                 }
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 9ef6c07e489..0d5dcaf339f 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -34,6 +34,7 @@ use parse::lexer;
 use parse;
 use parse::token::{self, Token, Lit, Nonterminal};
 use print::pprust;
+use symbol::Symbol;
 
 use std::fmt;
 use std::iter::*;
@@ -173,10 +174,10 @@ impl TokenTree {
                 TokenTree::Delimited(sp, Rc::new(Delimited {
                     delim: token::Bracket,
                     open_span: sp,
-                    tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
+                    tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
                               TokenTree::Token(sp, token::Eq),
                               TokenTree::Token(sp, token::Literal(
-                                  token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
+                                  token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
                     close_span: sp,
                 }))
             }
@@ -295,7 +296,7 @@ impl TokenTree {
     pub fn maybe_str(&self) -> Option<ast::Lit> {
         match *self {
             TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
-                let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
+                let l = LitKind::Str(Symbol::intern(&parse::str_lit(&s.as_str())),
                                      ast::StrStyle::Cooked);
                 Some(Spanned {
                     node: l,
@@ -303,7 +304,7 @@ impl TokenTree {
                 })
             }
             TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
-                let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
+                let l = LitKind::Str(Symbol::intern(&parse::raw_str_lit(&s.as_str())),
                                      ast::StrStyle::Raw(n));
                 Some(Spanned {
                     node: l,
@@ -871,8 +872,9 @@ impl Index<usize> for InternalTS {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use syntax::ast::Ident;
     use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
-    use parse::token::{self, str_to_ident, Token};
+    use parse::token::{self, Token};
     use util::parser_testing::string_to_tts;
     use std::rc::Rc;
 
@@ -967,15 +969,17 @@ mod tests {
         let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()))
             .slice(2..3);
         let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8),
-                                                    token::Ident(str_to_ident("bar")))]);
+                                                    token::Ident(Ident::from_str("bar")))]);
         assert_eq!(test_res, test_eqs)
     }
 
     #[test]
     fn test_is_empty() {
         let test0 = TokenStream::from_tts(Vec::new());
-        let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
-                                                                Token::Ident(str_to_ident("a")))]);
+        let test1 = TokenStream::from_tts(
+            vec![TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a")))]
+        );
+
         let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
 
         assert_eq!(test0.is_empty(), true);
@@ -1035,20 +1039,20 @@ mod tests {
         assert_eq!(test0, None);
 
         let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
-                                                        token::Ident(str_to_ident("bar"))),
+                                                        token::Ident(Ident::from_str("bar"))),
                                        TokenTree::Token(sp(4, 6), token::ModSep),
                                        TokenTree::Token(sp(6, 9),
-                                                        token::Ident(str_to_ident("baz")))]);
+                                                        token::Ident(Ident::from_str("baz")))]);
         assert_eq!(test1, Some(test1_expected));
 
         let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
-                                                        token::Ident(str_to_ident("foo"))),
+                                                        token::Ident(Ident::from_str("foo"))),
                                        TokenTree::Token(sp(4, 5), token::Comma),
                                        TokenTree::Token(sp(5, 8),
-                                                        token::Ident(str_to_ident("bar"))),
+                                                        token::Ident(Ident::from_str("bar"))),
                                        TokenTree::Token(sp(8, 9), token::Comma),
                                        TokenTree::Token(sp(9, 12),
-                                                        token::Ident(str_to_ident("baz")))]);
+                                                        token::Ident(Ident::from_str("baz")))]);
         assert_eq!(test2, Some(test2_expected));
 
         assert_eq!(test3, None);
@@ -1069,7 +1073,7 @@ mod tests {
 
         assert_eq!(test0, None);
         assert_eq!(test1, None);
-        assert_eq!(test2, Some(str_to_ident("foo")));
+        assert_eq!(test2, Some(Ident::from_str("foo")));
         assert_eq!(test3, None);
         assert_eq!(test4, None);
     }
@@ -1079,9 +1083,9 @@ mod tests {
         let test0 = as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
         let test1 = as_paren_delimited_stream(string_to_tts("baz(foo,bar)".to_string()));
 
-        let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))),
+        let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("foo"))),
                              TokenTree::Token(sp(3, 4), token::Comma),
-                             TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("bar"))),
                              TokenTree::Token(sp(7, 8), token::Comma)];
         let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
                                                                Rc::new(Delimited {
@@ -1094,11 +1098,11 @@ mod tests {
         assert_eq!(test0, test0_stream);
 
 
-        let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))),
+        let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("foo"))),
                              TokenTree::Token(sp(7, 8), token::Comma),
-                             TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))];
+                             TokenTree::Token(sp(8, 11), token::Ident(Ident::from_str("bar")))];
 
-        let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))),
+        let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("baz"))),
                                TokenTree::Delimited(sp(3, 12),
                                                     Rc::new(Delimited {
                                                         delim: token::DelimToken::Paren,
diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs
deleted file mode 100644
index f56c6cedcd1..00000000000
--- a/src/libsyntax/util/interner.rs
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! An "interner" is a data structure that associates values with usize tags and
-//! allows bidirectional lookup; i.e. given a value, one can easily find the
-//! type, and vice versa.
-
-use ast::Name;
-
-use std::collections::HashMap;
-use std::rc::Rc;
-
-#[derive(Default)]
-pub struct Interner {
-    names: HashMap<Rc<str>, Name>,
-    strings: Vec<Rc<str>>,
-}
-
-/// When traits can extend traits, we should extend index<Name,T> to get []
-impl Interner {
-    pub fn new() -> Self {
-        Interner::default()
-    }
-
-    pub fn prefill(init: &[&str]) -> Self {
-        let mut this = Interner::new();
-        for &string in init {
-            this.intern(string);
-        }
-        this
-    }
-
-    pub fn intern(&mut self, string: &str) -> Name {
-        if let Some(&name) = self.names.get(string) {
-            return name;
-        }
-
-        let name = Name(self.strings.len() as u32);
-        let string = Rc::__from_str(string);
-        self.strings.push(string.clone());
-        self.names.insert(string, name);
-        name
-    }
-
-    pub fn gensym(&mut self, string: &str) -> Name {
-        let gensym = Name(self.strings.len() as u32);
-        // leave out of `names` to avoid colliding
-        self.strings.push(Rc::__from_str(string));
-        gensym
-    }
-
-    /// Create a gensym with the same name as an existing entry.
-    pub fn gensym_copy(&mut self, name: Name) -> Name {
-        let gensym = Name(self.strings.len() as u32);
-        // leave out of `names` to avoid colliding
-        let string = self.strings[name.0 as usize].clone();
-        self.strings.push(string);
-        gensym
-    }
-
-    pub fn get(&self, name: Name) -> Rc<str> {
-        self.strings[name.0 as usize].clone()
-    }
-
-    pub fn find(&self, string: &str) -> Option<Name> {
-        self.names.get(string).cloned()
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use ast::Name;
-
-    #[test]
-    fn interner_tests() {
-        let mut i: Interner = Interner::new();
-        // first one is zero:
-        assert_eq!(i.intern("dog"), Name(0));
-        // re-use gets the same entry:
-        assert_eq!(i.intern ("dog"), Name(0));
-        // different string gets a different #:
-        assert_eq!(i.intern("cat"), Name(1));
-        assert_eq!(i.intern("cat"), Name(1));
-        // dog is still at zero
-        assert_eq!(i.intern("dog"), Name(0));
-        // gensym gets 3
-        assert_eq!(i.gensym("zebra"), Name(2));
-        // gensym of same string gets new number :
-        assert_eq!(i.gensym("zebra"), Name(3));
-        // gensym of *existing* string gets new number:
-        assert_eq!(i.gensym("dog"), Name(4));
-        // gensym tests again with gensym_copy:
-        assert_eq!(i.gensym_copy(Name(2)), Name(5));
-        assert_eq!(&*i.get(Name(5)), "zebra");
-        assert_eq!(i.gensym_copy(Name(2)), Name(6));
-        assert_eq!(&*i.get(Name(6)), "zebra");
-        assert_eq!(&*i.get(Name(0)), "dog");
-        assert_eq!(&*i.get(Name(1)), "cat");
-        assert_eq!(&*i.get(Name(2)), "zebra");
-        assert_eq!(&*i.get(Name(3)), "zebra");
-        assert_eq!(&*i.get(Name(4)), "dog");
-    }
-}
diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs
index e0796c34e57..a6fff2d7074 100644
--- a/src/libsyntax/util/lev_distance.rs
+++ b/src/libsyntax/util/lev_distance.rs
@@ -8,9 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast::Name;
 use std::cmp;
-use parse::token::InternedString;
+use symbol::Symbol;
 
 /// To find the Levenshtein distance between two strings
 pub fn lev_distance(a: &str, b: &str) -> usize {
@@ -48,14 +47,14 @@ pub fn lev_distance(a: &str, b: &str) -> usize {
 /// to one-third of the given word
 pub fn find_best_match_for_name<'a, T>(iter_names: T,
                                        lookup: &str,
-                                       dist: Option<usize>) -> Option<InternedString>
-    where T: Iterator<Item = &'a Name> {
+                                       dist: Option<usize>) -> Option<Symbol>
+    where T: Iterator<Item = &'a Symbol> {
     let max_dist = dist.map_or_else(|| cmp::max(lookup.len(), 3) / 3, |d| d);
     iter_names
-    .filter_map(|name| {
+    .filter_map(|&name| {
         let dist = lev_distance(lookup, &name.as_str());
         match dist <= max_dist {    // filter the unwanted cases
-            true => Some((name.as_str(), dist)),
+            true => Some((name, dist)),
             false => None,
         }
     })
diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs
index df4eb1c9ed7..ce24fe1eb61 100644
--- a/src/libsyntax/util/parser.rs
+++ b/src/libsyntax/util/parser.rs
@@ -7,7 +7,8 @@
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
-use parse::token::{Token, BinOpToken, keywords};
+use parse::token::{Token, BinOpToken};
+use symbol::keywords;
 use ast::BinOpKind;
 
 /// Associative operator with precedence.
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index 76d3f2a063c..e703dc6b419 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -8,11 +8,10 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast;
+use ast::{self, Ident};
 use parse::{ParseSess,PResult,filemap_to_tts};
 use parse::{lexer, new_parser_from_source_str};
 use parse::parser::Parser;
-use parse::token;
 use ptr::P;
 use tokenstream;
 use std::iter::Peekable;
@@ -78,9 +77,9 @@ pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
     })
 }
 
-/// Convert a vector of strings to a vector of ast::Ident's
-pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<ast::Ident> {
-    ids.iter().map(|u| token::str_to_ident(*u)).collect()
+/// Convert a vector of strings to a vector of Ident's
+pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<Ident> {
+    ids.iter().map(|u| Ident::from_str(*u)).collect()
 }
 
 /// Does the given string match the pattern? whitespace in the first string