about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs61
-rw-r--r--src/libsyntax/parse/lexer/mod.rs74
-rw-r--r--src/libsyntax/parse/mod.rs103
-rw-r--r--src/libsyntax/parse/parser.rs75
-rw-r--r--src/libsyntax/parse/token.rs274
5 files changed, 137 insertions, 450 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 983c882eafc..ded676da3c6 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -11,12 +11,11 @@
 use attr;
 use ast;
 use syntax_pos::{mk_sp, Span};
-use codemap::{spanned, Spanned};
+use codemap::spanned;
 use parse::common::SeqSep;
 use parse::PResult;
 use parse::token;
 use parse::parser::{Parser, TokenType};
-use ptr::P;
 
 #[derive(PartialEq, Eq, Debug)]
 enum InnerAttributeParsePolicy<'a> {
@@ -49,13 +48,9 @@ impl<'a> Parser<'a> {
                     just_parsed_doc_comment = false;
                 }
                 token::DocComment(s) => {
-                    let attr = ::attr::mk_sugared_doc_attr(
-                        attr::mk_attr_id(),
-                        self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)),
-                        self.span.lo,
-                        self.span.hi
-                    );
-                    if attr.node.style != ast::AttrStyle::Outer {
+                    let Span { lo, hi, .. } = self.span;
+                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
+                    if attr.style != ast::AttrStyle::Outer {
                         let mut err = self.fatal("expected outer doc comment");
                         err.note("inner doc comments like this (starting with \
                                   `//!` or `/*!`) can only appear before items");
@@ -145,14 +140,12 @@ impl<'a> Parser<'a> {
             style = ast::AttrStyle::Inner;
         }
 
-        Ok(Spanned {
+        Ok(ast::Attribute {
+            id: attr::mk_attr_id(),
+            style: style,
+            value: value,
+            is_sugared_doc: false,
             span: span,
-            node: ast::Attribute_ {
-                id: attr::mk_attr_id(),
-                style: style,
-                value: value,
-                is_sugared_doc: false,
-            },
         })
     }
 
@@ -172,15 +165,14 @@ impl<'a> Parser<'a> {
                     }
 
                     let attr = self.parse_attribute(true)?;
-                    assert!(attr.node.style == ast::AttrStyle::Inner);
+                    assert!(attr.style == ast::AttrStyle::Inner);
                     attrs.push(attr);
                 }
                 token::DocComment(s) => {
                     // we need to get the position of this token before we bump.
                     let Span { lo, hi, .. } = self.span;
-                    let str = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
-                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
-                    if attr.node.style == ast::AttrStyle::Inner {
+                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
+                    if attr.style == ast::AttrStyle::Inner {
                         attrs.push(attr);
                         self.bump();
                     } else {
@@ -213,7 +205,7 @@ impl<'a> Parser<'a> {
     ///
     /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
     /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
-    pub fn parse_meta_item(&mut self) -> PResult<'a, P<ast::MetaItem>> {
+    pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
         let nt_meta = match self.token {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref e) => Some(e.clone()),
@@ -229,24 +221,15 @@ impl<'a> Parser<'a> {
 
         let lo = self.span.lo;
         let ident = self.parse_ident()?;
-        let name = self.id_to_interned_str(ident);
-        match self.token {
-            token::Eq => {
-                self.bump();
-                let lit = self.parse_unsuffixed_lit()?;
-                let hi = self.prev_span.hi;
-                Ok(P(spanned(lo, hi, ast::MetaItemKind::NameValue(name, lit))))
-            }
-            token::OpenDelim(token::Paren) => {
-                let inner_items = self.parse_meta_seq()?;
-                let hi = self.prev_span.hi;
-                Ok(P(spanned(lo, hi, ast::MetaItemKind::List(name, inner_items))))
-            }
-            _ => {
-                let hi = self.prev_span.hi;
-                Ok(P(spanned(lo, hi, ast::MetaItemKind::Word(name))))
-            }
-        }
+        let node = if self.eat(&token::Eq) {
+            ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
+        } else if self.token == token::OpenDelim(token::Paren) {
+            ast::MetaItemKind::List(self.parse_meta_seq()?)
+        } else {
+            ast::MetaItemKind::Word
+        };
+        let hi = self.prev_span.hi;
+        Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
     }
 
     /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index cf48c445c80..681dec0ab56 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -8,13 +8,14 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast;
+use ast::{self, Ident};
 use syntax_pos::{self, BytePos, CharPos, Pos, Span};
 use codemap::CodeMap;
 use errors::{FatalError, Handler, DiagnosticBuilder};
 use ext::tt::transcribe::tt_next_token;
-use parse::token::{self, keywords, str_to_ident};
+use parse::token;
 use str::char_at;
+use symbol::{Symbol, keywords};
 use rustc_unicode::property::Pattern_White_Space;
 
 use std::borrow::Cow;
@@ -350,13 +351,13 @@ impl<'a> StringReader<'a> {
     /// single-byte delimiter).
     pub fn name_from(&self, start: BytePos) -> ast::Name {
         debug!("taking an ident from {:?} to {:?}", start, self.pos);
-        self.with_str_from(start, token::intern)
+        self.with_str_from(start, Symbol::intern)
     }
 
     /// As name_from, with an explicit endpoint.
     pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
         debug!("taking an ident from {:?} to {:?}", start, end);
-        self.with_str_from_to(start, end, token::intern)
+        self.with_str_from_to(start, end, Symbol::intern)
     }
 
     /// Calls `f` with a string slice of the source text spanning from `start`
@@ -492,7 +493,7 @@ impl<'a> StringReader<'a> {
             if string == "_" {
                 None
             } else {
-                Some(token::intern(string))
+                Some(Symbol::intern(string))
             }
         })
     }
@@ -540,7 +541,7 @@ impl<'a> StringReader<'a> {
                         self.with_str_from(start_bpos, |string| {
                             // comments with only more "/"s are not doc comments
                             let tok = if is_doc_comment(string) {
-                                token::DocComment(token::intern(string))
+                                token::DocComment(Symbol::intern(string))
                             } else {
                                 token::Comment
                             };
@@ -669,7 +670,7 @@ impl<'a> StringReader<'a> {
                 } else {
                     string.into()
                 };
-                token::DocComment(token::intern(&string[..]))
+                token::DocComment(Symbol::intern(&string[..]))
             } else {
                 token::Comment
             };
@@ -758,7 +759,7 @@ impl<'a> StringReader<'a> {
             self.err_span_(start_bpos,
                            self.pos,
                            "no valid digits found for number");
-            return token::Integer(token::intern("0"));
+            return token::Integer(Symbol::intern("0"));
         }
 
         // might be a float, but don't be greedy if this is actually an
@@ -1097,7 +1098,7 @@ impl<'a> StringReader<'a> {
                     token::Underscore
                 } else {
                     // FIXME: perform NFKC normalization here. (Issue #2253)
-                    token::Ident(str_to_ident(string))
+                    token::Ident(Ident::from_str(string))
                 }
             }));
         }
@@ -1277,13 +1278,13 @@ impl<'a> StringReader<'a> {
                     // expansion purposes. See #12512 for the gory details of why
                     // this is necessary.
                     let ident = self.with_str_from(start, |lifetime_name| {
-                        str_to_ident(&format!("'{}", lifetime_name))
+                        Ident::from_str(&format!("'{}", lifetime_name))
                     });
 
                     // Conjure up a "keyword checking ident" to make sure that
                     // the lifetime name is not a keyword.
                     let keyword_checking_ident = self.with_str_from(start, |lifetime_name| {
-                        str_to_ident(lifetime_name)
+                        Ident::from_str(lifetime_name)
                     });
                     let keyword_checking_token = &token::Ident(keyword_checking_ident);
                     let last_bpos = self.pos;
@@ -1310,7 +1311,7 @@ impl<'a> StringReader<'a> {
                 let id = if valid {
                     self.name_from(start)
                 } else {
-                    token::intern("0")
+                    Symbol::intern("0")
                 };
                 self.bump(); // advance ch past token
                 let suffix = self.scan_optional_raw_name();
@@ -1352,7 +1353,7 @@ impl<'a> StringReader<'a> {
                 let id = if valid {
                     self.name_from(start_bpos + BytePos(1))
                 } else {
-                    token::intern("??")
+                    Symbol::intern("??")
                 };
                 self.bump();
                 let suffix = self.scan_optional_raw_name();
@@ -1424,7 +1425,7 @@ impl<'a> StringReader<'a> {
                 let id = if valid {
                     self.name_from_to(content_start_bpos, content_end_bpos)
                 } else {
-                    token::intern("??")
+                    Symbol::intern("??")
                 };
                 let suffix = self.scan_optional_raw_name();
                 return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
@@ -1551,7 +1552,7 @@ impl<'a> StringReader<'a> {
         let id = if valid {
             self.name_from(start)
         } else {
-            token::intern("?")
+            Symbol::intern("?")
         };
         self.bump(); // advance ch past token
         return token::Byte(id);
@@ -1584,7 +1585,7 @@ impl<'a> StringReader<'a> {
         let id = if valid {
             self.name_from(start)
         } else {
-            token::intern("??")
+            Symbol::intern("??")
         };
         self.bump();
         return token::ByteStr(id);
@@ -1700,11 +1701,12 @@ fn ident_continue(c: Option<char>) -> bool {
 mod tests {
     use super::*;
 
+    use ast::Ident;
+    use symbol::Symbol;
     use syntax_pos::{BytePos, Span, NO_EXPANSION};
     use codemap::CodeMap;
     use errors;
     use parse::token;
-    use parse::token::str_to_ident;
     use std::io;
     use std::rc::Rc;
 
@@ -1732,7 +1734,7 @@ mod tests {
                                       &sh,
                                       "/* my source file */ fn main() { println!(\"zebra\"); }\n"
                                           .to_string());
-        let id = str_to_ident("fn");
+        let id = Ident::from_str("fn");
         assert_eq!(string_reader.next_token().tok, token::Comment);
         assert_eq!(string_reader.next_token().tok, token::Whitespace);
         let tok1 = string_reader.next_token();
@@ -1751,7 +1753,7 @@ mod tests {
         // read another token:
         let tok3 = string_reader.next_token();
         let tok4 = TokenAndSpan {
-            tok: token::Ident(str_to_ident("main")),
+            tok: token::Ident(Ident::from_str("main")),
             sp: Span {
                 lo: BytePos(24),
                 hi: BytePos(28),
@@ -1773,7 +1775,7 @@ mod tests {
 
     // make the identifier by looking up the string in the interner
     fn mk_ident(id: &str) -> token::Token {
-        token::Ident(str_to_ident(id))
+        token::Ident(Ident::from_str(id))
     }
 
     #[test]
@@ -1813,7 +1815,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
-                   token::Literal(token::Char(token::intern("a")), None));
+                   token::Literal(token::Char(Symbol::intern("a")), None));
     }
 
     #[test]
@@ -1821,7 +1823,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
-                   token::Literal(token::Char(token::intern(" ")), None));
+                   token::Literal(token::Char(Symbol::intern(" ")), None));
     }
 
     #[test]
@@ -1829,7 +1831,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
-                   token::Literal(token::Char(token::intern("\\n")), None));
+                   token::Literal(token::Char(Symbol::intern("\\n")), None));
     }
 
     #[test]
@@ -1837,7 +1839,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
-                   token::Lifetime(token::str_to_ident("'abc")));
+                   token::Lifetime(Ident::from_str("'abc")));
     }
 
     #[test]
@@ -1847,7 +1849,7 @@ mod tests {
         assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
                        .next_token()
                        .tok,
-                   token::Literal(token::StrRaw(token::intern("\"#a\\b\x00c\""), 3), None));
+                   token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
     }
 
     #[test]
@@ -1857,11 +1859,11 @@ mod tests {
         macro_rules! test {
             ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
                 assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
-                           token::Literal(token::$tok_type(token::intern($tok_contents)),
-                                          Some(token::intern("suffix"))));
+                           token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
+                                          Some(Symbol::intern("suffix"))));
                 // with a whitespace separator:
                 assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
-                           token::Literal(token::$tok_type(token::intern($tok_contents)),
+                           token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
                                           None));
             }}
         }
@@ -1877,14 +1879,14 @@ mod tests {
         test!("1.0e10", Float, "1.0e10");
 
         assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
-                   token::Literal(token::Integer(token::intern("2")),
-                                  Some(token::intern("us"))));
+                   token::Literal(token::Integer(Symbol::intern("2")),
+                                  Some(Symbol::intern("us"))));
         assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
-                   token::Literal(token::StrRaw(token::intern("raw"), 3),
-                                  Some(token::intern("suffix"))));
+                   token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
+                                  Some(Symbol::intern("suffix"))));
         assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
-                   token::Literal(token::ByteStrRaw(token::intern("raw"), 3),
-                                  Some(token::intern("suffix"))));
+                   token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
+                                  Some(Symbol::intern("suffix"))));
     }
 
     #[test]
@@ -1904,7 +1906,7 @@ mod tests {
             _ => panic!("expected a comment!"),
         }
         assert_eq!(lexer.next_token().tok,
-                   token::Literal(token::Char(token::intern("a")), None));
+                   token::Literal(token::Char(Symbol::intern("a")), None));
     }
 
     #[test]
@@ -1917,6 +1919,6 @@ mod tests {
         assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7)));
         assert_eq!(lexer.next_token().tok, token::Whitespace);
         assert_eq!(lexer.next_token().tok,
-                   token::DocComment(token::intern("/// test")));
+                   token::DocComment(Symbol::intern("/// test")));
     }
 }
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 12408c7d3c9..be340a5b5aa 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -16,12 +16,13 @@ use syntax_pos::{self, Span, FileMap};
 use errors::{Handler, ColorConfig, DiagnosticBuilder};
 use feature_gate::UnstableFeatures;
 use parse::parser::Parser;
-use parse::token::InternedString;
 use ptr::P;
 use str::char_at;
+use symbol::Symbol;
 use tokenstream;
 
 use std::cell::RefCell;
+use std::collections::HashSet;
 use std::iter;
 use std::path::{Path, PathBuf};
 use std::rc::Rc;
@@ -64,7 +65,7 @@ impl ParseSess {
         ParseSess {
             span_diagnostic: handler,
             unstable_features: UnstableFeatures::from_environment(),
-            config: Vec::new(),
+            config: HashSet::new(),
             included_mod_stack: RefCell::new(vec![]),
             code_map: code_map
         }
@@ -116,7 +117,7 @@ pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a Pa
 }
 
 pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, P<ast::MetaItem>> {
+                                      -> PResult<'a, ast::MetaItem> {
     new_parser_from_source_str(sess, name, source).parse_meta_item()
 }
 
@@ -371,13 +372,18 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
         s[1..].chars().all(|c| '0' <= c && c <= '9')
 }
 
-fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
-                      sd: &Handler, sp: Span) -> ast::LitKind {
+fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, sd: &Handler, sp: Span)
+                      -> ast::LitKind {
     debug!("filtered_float_lit: {}, {:?}", data, suffix);
-    match suffix.as_ref().map(|s| &**s) {
-        Some("f32") => ast::LitKind::Float(data, ast::FloatTy::F32),
-        Some("f64") => ast::LitKind::Float(data, ast::FloatTy::F64),
-        Some(suf) => {
+    let suffix = match suffix {
+        Some(suffix) => suffix,
+        None => return ast::LitKind::FloatUnsuffixed(data),
+    };
+
+    match &*suffix.as_str() {
+        "f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
+        "f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
+        suf => {
             if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
                 // if it looks like a width, lets try to be helpful.
                 sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]))
@@ -391,16 +397,13 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
 
             ast::LitKind::FloatUnsuffixed(data)
         }
-        None => ast::LitKind::FloatUnsuffixed(data)
     }
 }
-pub fn float_lit(s: &str, suffix: Option<InternedString>,
-                 sd: &Handler, sp: Span) -> ast::LitKind {
+pub fn float_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
     debug!("float_lit: {:?}, {:?}", s, suffix);
     // FIXME #2252: bounds checking float literals is deferred until trans
     let s = s.chars().filter(|&c| c != '_').collect::<String>();
-    let data = token::intern_and_get_ident(&s);
-    filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp)
+    filtered_float_lit(Symbol::intern(&s), suffix, sd, sp)
 }
 
 /// Parse a string representing a byte literal into its final form. Similar to `char_lit`
@@ -495,11 +498,7 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
     Rc::new(res)
 }
 
-pub fn integer_lit(s: &str,
-                   suffix: Option<InternedString>,
-                   sd: &Handler,
-                   sp: Span)
-                   -> ast::LitKind {
+pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
     // s can only be ascii, byte indexing is fine
 
     let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
@@ -521,16 +520,15 @@ pub fn integer_lit(s: &str,
     }
 
     // 1f64 and 2f32 etc. are valid float literals.
-    if let Some(ref suf) = suffix {
-        if looks_like_width_suffix(&['f'], suf) {
+    if let Some(suf) = suffix {
+        if looks_like_width_suffix(&['f'], &suf.as_str()) {
             match base {
                 16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
                 8 => sd.span_err(sp, "octal float literal is not supported"),
                 2 => sd.span_err(sp, "binary float literal is not supported"),
                 _ => ()
             }
-            let ident = token::intern_and_get_ident(&s);
-            return filtered_float_lit(ident, Some(&suf), sd, sp)
+            return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp)
         }
     }
 
@@ -538,9 +536,9 @@ pub fn integer_lit(s: &str,
         s = &s[2..];
     }
 
-    if let Some(ref suf) = suffix {
-        if suf.is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
-        ty = match &**suf {
+    if let Some(suf) = suffix {
+        if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
+        ty = match &*suf.as_str() {
             "isize" => ast::LitIntType::Signed(ast::IntTy::Is),
             "i8"  => ast::LitIntType::Signed(ast::IntTy::I8),
             "i16" => ast::LitIntType::Signed(ast::IntTy::I16),
@@ -551,7 +549,7 @@ pub fn integer_lit(s: &str,
             "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
             "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
             "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
-            _ => {
+            suf => {
                 // i<digits> and u<digits> look like widths, so lets
                 // give an error message along those lines
                 if looks_like_width_suffix(&['i', 'u'], suf) {
@@ -599,12 +597,11 @@ mod tests {
     use std::rc::Rc;
     use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION};
     use codemap::Spanned;
-    use ast::{self, PatKind};
+    use ast::{self, Ident, PatKind};
     use abi::Abi;
     use attr::first_attr_value_str_by_name;
     use parse;
     use parse::parser::Parser;
-    use parse::token::{str_to_ident};
     use print::pprust::item_to_string;
     use ptr::P;
     use tokenstream::{self, TokenTree};
@@ -626,7 +623,7 @@ mod tests {
                         global: false,
                         segments: vec![
                             ast::PathSegment {
-                                identifier: str_to_ident("a"),
+                                identifier: Ident::from_str("a"),
                                 parameters: ast::PathParameters::none(),
                             }
                         ],
@@ -645,11 +642,11 @@ mod tests {
                             global: true,
                             segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("a"),
+                                    identifier: Ident::from_str("a"),
                                     parameters: ast::PathParameters::none(),
                                 },
                                 ast::PathSegment {
-                                    identifier: str_to_ident("b"),
+                                    identifier: Ident::from_str("b"),
                                     parameters: ast::PathParameters::none(),
                                 }
                             ]
@@ -678,8 +675,8 @@ mod tests {
                 Some(&TokenTree::Token(_, token::Ident(name_zip))),
                 Some(&TokenTree::Delimited(_, ref macro_delimed)),
             )
-            if name_macro_rules.name.as_str() == "macro_rules"
-            && name_zip.name.as_str() == "zip" => {
+            if name_macro_rules.name == "macro_rules"
+            && name_zip.name == "zip" => {
                 let tts = &macro_delimed.tts[..];
                 match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
                     (
@@ -696,8 +693,7 @@ mod tests {
                                 Some(&TokenTree::Token(_, token::Dollar)),
                                 Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
-                            if first_delimed.delim == token::Paren
-                            && ident.name.as_str() == "a" => {},
+                            if first_delimed.delim == token::Paren && ident.name == "a" => {},
                             _ => panic!("value 3: {:?}", **first_delimed),
                         }
                         let tts = &second_delimed.tts[..];
@@ -708,7 +704,7 @@ mod tests {
                                 Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
                             if second_delimed.delim == token::Paren
-                            && ident.name.as_str() == "a" => {},
+                            && ident.name == "a" => {},
                             _ => panic!("value 4: {:?}", **second_delimed),
                         }
                     },
@@ -724,17 +720,17 @@ mod tests {
         let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
 
         let expected = vec![
-            TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))),
-            TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
+            TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))),
+            TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
                 Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
-                        TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(8, 9), token::Colon),
-                        TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
+                        TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
                     ],
                     close_span: sp(13, 14),
                 })),
@@ -744,7 +740,7 @@ mod tests {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
-                        TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
@@ -765,7 +761,7 @@ mod tests {
                             global: false,
                             segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("d"),
+                                    identifier: Ident::from_str("d"),
                                     parameters: ast::PathParameters::none(),
                                 }
                             ],
@@ -788,7 +784,7 @@ mod tests {
                                global:false,
                                segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("b"),
+                                    identifier: Ident::from_str("b"),
                                     parameters: ast::PathParameters::none(),
                                 }
                                ],
@@ -812,7 +808,7 @@ mod tests {
                 id: ast::DUMMY_NODE_ID,
                 node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
                                     Spanned{ span:sp(0, 1),
-                                             node: str_to_ident("b")
+                                             node: Ident::from_str("b")
                     },
                                     None),
                 span: sp(0,1)}));
@@ -824,7 +820,7 @@ mod tests {
         // this test depends on the intern order of "fn" and "i32"
         assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
                   Some(
-                      P(ast::Item{ident:str_to_ident("a"),
+                      P(ast::Item{ident:Ident::from_str("a"),
                             attrs:Vec::new(),
                             id: ast::DUMMY_NODE_ID,
                             node: ast::ItemKind::Fn(P(ast::FnDecl {
@@ -835,8 +831,7 @@ mod tests {
                                         global:false,
                                         segments: vec![
                                             ast::PathSegment {
-                                                identifier:
-                                                    str_to_ident("i32"),
+                                                identifier: Ident::from_str("i32"),
                                                 parameters: ast::PathParameters::none(),
                                             }
                                         ],
@@ -849,7 +844,7 @@ mod tests {
                                             ast::BindingMode::ByValue(ast::Mutability::Immutable),
                                                 Spanned{
                                                     span: sp(6,7),
-                                                    node: str_to_ident("b")},
+                                                    node: Ident::from_str("b")},
                                                 None
                                                     ),
                                             span: sp(6,7)
@@ -884,9 +879,7 @@ mod tests {
                                                         global:false,
                                                         segments: vec![
                                                             ast::PathSegment {
-                                                                identifier:
-                                                                str_to_ident(
-                                                                    "b"),
+                                                                identifier: Ident::from_str("b"),
                                                                 parameters:
                                                                 ast::PathParameters::none(),
                                                             }
@@ -998,12 +991,12 @@ mod tests {
         let item = parse_item_from_source_str(name.clone(), source, &sess)
             .unwrap().unwrap();
         let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-        assert_eq!(&doc[..], "/// doc comment");
+        assert_eq!(doc, "/// doc comment");
 
         let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name.clone(), source, &sess)
             .unwrap().unwrap();
-        let docs = item.attrs.iter().filter(|a| &*a.name() == "doc")
+        let docs = item.attrs.iter().filter(|a| a.name() == "doc")
                     .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
         let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
         assert_eq!(&docs[..], b);
@@ -1011,7 +1004,7 @@ mod tests {
         let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap();
         let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-        assert_eq!(&doc[..], "/** doc comment\n *  with CRLF */");
+        assert_eq!(doc, "/** doc comment\n *  with CRLF */");
     }
 
     #[test]
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 2e38ca82d5d..4997e464c2b 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -48,13 +48,14 @@ use parse::classify;
 use parse::common::SeqSep;
 use parse::lexer::{Reader, TokenAndSpan};
 use parse::obsolete::ObsoleteSyntax;
-use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString};
+use parse::token::{self, MatchNt, SubstNt};
 use parse::{new_sub_parser_from_file, ParseSess};
 use util::parser::{AssocOp, Fixity};
 use print::pprust;
 use ptr::P;
 use parse::PResult;
 use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
+use symbol::{Symbol, keywords};
 use util::ThinVec;
 
 use std::collections::HashSet;
@@ -998,10 +999,6 @@ impl<'a> Parser<'a> {
         &self.sess.span_diagnostic
     }
 
-    pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
-        id.name.as_str()
-    }
-
     /// Is the current token one of the keywords that signals a bare function
     /// type?
     pub fn token_is_bare_fn_keyword(&mut self) -> bool {
@@ -1523,34 +1520,28 @@ impl<'a> Parser<'a> {
                     // float literals, so all the handling is done
                     // internally.
                     token::Integer(s) => {
-                        (false, parse::integer_lit(&s.as_str(),
-                                                   suf.as_ref().map(|s| s.as_str()),
-                                                   &self.sess.span_diagnostic,
-                                                   self.span))
+                        let diag = &self.sess.span_diagnostic;
+                        (false, parse::integer_lit(&s.as_str(), suf, diag, self.span))
                     }
                     token::Float(s) => {
-                        (false, parse::float_lit(&s.as_str(),
-                                                 suf.as_ref().map(|s| s.as_str()),
-                                                  &self.sess.span_diagnostic,
-                                                 self.span))
+                        let diag = &self.sess.span_diagnostic;
+                        (false, parse::float_lit(&s.as_str(), suf, diag, self.span))
                     }
 
                     token::Str_(s) => {
-                        (true,
-                         LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
-                                      ast::StrStyle::Cooked))
+                        let s = Symbol::intern(&parse::str_lit(&s.as_str()));
+                        (true, LitKind::Str(s, ast::StrStyle::Cooked))
                     }
                     token::StrRaw(s, n) => {
-                        (true,
-                         LitKind::Str(
-                            token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
-                            ast::StrStyle::Raw(n)))
+                        let s = Symbol::intern(&parse::raw_str_lit(&s.as_str()));
+                        (true, LitKind::Str(s, ast::StrStyle::Raw(n)))
+                    }
+                    token::ByteStr(i) => {
+                        (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str())))
+                    }
+                    token::ByteStrRaw(i, _) => {
+                        (true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))
                     }
-                    token::ByteStr(i) =>
-                        (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str()))),
-                    token::ByteStrRaw(i, _) =>
-                        (true,
-                         LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))),
                 };
 
                 if suffix_illegal {
@@ -2544,7 +2535,7 @@ impl<'a> Parser<'a> {
                     let prev_span = self.prev_span;
                     let fstr = n.as_str();
                     let mut err = self.diagnostic().struct_span_err(prev_span,
-                        &format!("unexpected token: `{}`", n.as_str()));
+                        &format!("unexpected token: `{}`", n));
                     if fstr.chars().all(|x| "0123456789.".contains(x)) {
                         let float = match fstr.parse::<f64>().ok() {
                             Some(f) => f,
@@ -2627,7 +2618,7 @@ impl<'a> Parser<'a> {
                                       })));
                 } else if self.token.is_keyword(keywords::Crate) {
                     let ident = match self.token {
-                        token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id },
+                        token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id },
                         _ => unreachable!(),
                     };
                     self.bump();
@@ -3751,9 +3742,7 @@ impl<'a> Parser<'a> {
     /// Emit an expected item after attributes error.
     fn expected_item_err(&self, attrs: &[Attribute]) {
         let message = match attrs.last() {
-            Some(&Attribute { node: ast::Attribute_ { is_sugared_doc: true, .. }, .. }) => {
-                "expected item after doc comment"
-            }
+            Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
             _ => "expected item after attributes",
         };
 
@@ -4837,7 +4826,7 @@ impl<'a> Parser<'a> {
             Visibility::Inherited => (),
             _ => {
                 let is_macro_rules: bool = match self.token {
-                    token::Ident(sid) => sid.name == intern("macro_rules"),
+                    token::Ident(sid) => sid.name == Symbol::intern("macro_rules"),
                     _ => false,
                 };
                 if is_macro_rules {
@@ -5304,17 +5293,16 @@ impl<'a> Parser<'a> {
 
     fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) -> Restrictions {
         if let Some(path) = ::attr::first_attr_value_str_by_name(attrs, "path") {
-            self.directory.push(&*path);
+            self.directory.push(&*path.as_str());
             self.restrictions - Restrictions::NO_NONINLINE_MOD
         } else {
-            let default_path = self.id_to_interned_str(id);
-            self.directory.push(&*default_path);
+            self.directory.push(&*id.name.as_str());
             self.restrictions
         }
     }
 
     pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
-        ::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d))
+        ::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str()))
     }
 
     /// Returns either a path to a module, or .
@@ -6128,26 +6116,17 @@ impl<'a> Parser<'a> {
         })
     }
 
-    pub fn parse_optional_str(&mut self)
-                              -> Option<(InternedString,
-                                         ast::StrStyle,
-                                         Option<ast::Name>)> {
+    pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
         let ret = match self.token {
-            token::Literal(token::Str_(s), suf) => {
-                let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
-                (s, ast::StrStyle::Cooked, suf)
-            }
-            token::Literal(token::StrRaw(s, n), suf) => {
-                let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
-                (s, ast::StrStyle::Raw(n), suf)
-            }
+            token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
+            token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
             _ => return None
         };
         self.bump();
         Some(ret)
     }
 
-    pub fn parse_str(&mut self) -> PResult<'a, (InternedString, StrStyle)> {
+    pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
         match self.parse_optional_str() {
             Some((s, style, suf)) => {
                 let sp = self.prev_span;
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 0198ee073d2..8ac39dd462e 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -16,13 +16,10 @@ pub use self::Token::*;
 
 use ast::{self};
 use ptr::P;
-use util::interner::Interner;
+use symbol::keywords;
 use tokenstream;
 
-use serialize::{Decodable, Decoder, Encodable, Encoder};
-use std::cell::RefCell;
 use std::fmt;
-use std::ops::Deref;
 use std::rc::Rc;
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@@ -301,7 +298,7 @@ pub enum Nonterminal {
     NtTy(P<ast::Ty>),
     NtIdent(ast::SpannedIdent),
     /// Stuff inside brackets for attributes
-    NtMeta(P<ast::MetaItem>),
+    NtMeta(ast::MetaItem),
     NtPath(ast::Path),
     NtTT(tokenstream::TokenTree),
     // These are not exposed to macros, but are used by quasiquote.
@@ -335,270 +332,3 @@ impl fmt::Debug for Nonterminal {
         }
     }
 }
-
-// In this macro, there is the requirement that the name (the number) must be monotonically
-// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
-// except starting from the next number instead of zero.
-macro_rules! declare_keywords {(
-    $( ($index: expr, $konst: ident, $string: expr) )*
-) => {
-    pub mod keywords {
-        use ast;
-        #[derive(Clone, Copy, PartialEq, Eq)]
-        pub struct Keyword {
-            ident: ast::Ident,
-        }
-        impl Keyword {
-            #[inline] pub fn ident(self) -> ast::Ident { self.ident }
-            #[inline] pub fn name(self) -> ast::Name { self.ident.name }
-        }
-        $(
-            #[allow(non_upper_case_globals)]
-            pub const $konst: Keyword = Keyword {
-                ident: ast::Ident::with_empty_ctxt(ast::Name($index))
-            };
-        )*
-    }
-
-    fn mk_fresh_ident_interner() -> IdentInterner {
-        Interner::prefill(&[$($string,)*])
-    }
-}}
-
-// NB: leaving holes in the ident table is bad! a different ident will get
-// interned with the id from the hole, but it will be between the min and max
-// of the reserved words, and thus tagged as "reserved".
-// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
-// this should be rarely necessary though if the keywords are kept in alphabetic order.
-declare_keywords! {
-    // Invalid identifier
-    (0,  Invalid,        "")
-
-    // Strict keywords used in the language.
-    (1,  As,             "as")
-    (2,  Box,            "box")
-    (3,  Break,          "break")
-    (4,  Const,          "const")
-    (5,  Continue,       "continue")
-    (6,  Crate,          "crate")
-    (7,  Else,           "else")
-    (8,  Enum,           "enum")
-    (9,  Extern,         "extern")
-    (10, False,          "false")
-    (11, Fn,             "fn")
-    (12, For,            "for")
-    (13, If,             "if")
-    (14, Impl,           "impl")
-    (15, In,             "in")
-    (16, Let,            "let")
-    (17, Loop,           "loop")
-    (18, Match,          "match")
-    (19, Mod,            "mod")
-    (20, Move,           "move")
-    (21, Mut,            "mut")
-    (22, Pub,            "pub")
-    (23, Ref,            "ref")
-    (24, Return,         "return")
-    (25, SelfValue,      "self")
-    (26, SelfType,       "Self")
-    (27, Static,         "static")
-    (28, Struct,         "struct")
-    (29, Super,          "super")
-    (30, Trait,          "trait")
-    (31, True,           "true")
-    (32, Type,           "type")
-    (33, Unsafe,         "unsafe")
-    (34, Use,            "use")
-    (35, Where,          "where")
-    (36, While,          "while")
-
-    // Keywords reserved for future use.
-    (37, Abstract,       "abstract")
-    (38, Alignof,        "alignof")
-    (39, Become,         "become")
-    (40, Do,             "do")
-    (41, Final,          "final")
-    (42, Macro,          "macro")
-    (43, Offsetof,       "offsetof")
-    (44, Override,       "override")
-    (45, Priv,           "priv")
-    (46, Proc,           "proc")
-    (47, Pure,           "pure")
-    (48, Sizeof,         "sizeof")
-    (49, Typeof,         "typeof")
-    (50, Unsized,        "unsized")
-    (51, Virtual,        "virtual")
-    (52, Yield,          "yield")
-
-    // Weak keywords, have special meaning only in specific contexts.
-    (53, Default,        "default")
-    (54, StaticLifetime, "'static")
-    (55, Union,          "union")
-}
-
-// looks like we can get rid of this completely...
-pub type IdentInterner = Interner;
-
-// if an interner exists in TLS, return it. Otherwise, prepare a
-// fresh one.
-// FIXME(eddyb) #8726 This should probably use a thread-local reference.
-pub fn with_ident_interner<T, F: FnOnce(&mut IdentInterner) -> T>(f: F) -> T {
-    thread_local!(static KEY: RefCell<IdentInterner> = {
-        RefCell::new(mk_fresh_ident_interner())
-    });
-    KEY.with(|interner| f(&mut *interner.borrow_mut()))
-}
-
-/// Reset the ident interner to its initial state.
-pub fn reset_ident_interner() {
-    with_ident_interner(|interner| *interner = mk_fresh_ident_interner());
-}
-
-pub fn clear_ident_interner() {
-    with_ident_interner(|interner| *interner = IdentInterner::new());
-}
-
-/// Represents a string stored in the thread-local interner. Because the
-/// interner lives for the life of the thread, this can be safely treated as an
-/// immortal string, as long as it never crosses between threads.
-///
-/// FIXME(pcwalton): You must be careful about what you do in the destructors
-/// of objects stored in TLS, because they may run after the interner is
-/// destroyed. In particular, they must not access string contents. This can
-/// be fixed in the future by just leaking all strings until thread death
-/// somehow.
-#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
-pub struct InternedString {
-    string: Rc<str>,
-}
-
-impl InternedString {
-    #[inline]
-    pub fn new(string: &'static str) -> InternedString {
-        InternedString {
-            string: Rc::__from_str(string),
-        }
-    }
-
-    #[inline]
-    pub fn new_from_name(name: ast::Name) -> InternedString {
-        with_ident_interner(|interner| InternedString { string: interner.get(name) })
-    }
-}
-
-impl Deref for InternedString {
-    type Target = str;
-
-    fn deref(&self) -> &str { &self.string }
-}
-
-impl fmt::Debug for InternedString {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Debug::fmt(&self.string, f)
-    }
-}
-
-impl fmt::Display for InternedString {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Display::fmt(&self.string, f)
-    }
-}
-
-impl<'a> PartialEq<&'a str> for InternedString {
-    #[inline(always)]
-    fn eq(&self, other: & &'a str) -> bool {
-        PartialEq::eq(&self.string[..], *other)
-    }
-    #[inline(always)]
-    fn ne(&self, other: & &'a str) -> bool {
-        PartialEq::ne(&self.string[..], *other)
-    }
-}
-
-impl<'a> PartialEq<InternedString> for &'a str {
-    #[inline(always)]
-    fn eq(&self, other: &InternedString) -> bool {
-        PartialEq::eq(*self, &other.string[..])
-    }
-    #[inline(always)]
-    fn ne(&self, other: &InternedString) -> bool {
-        PartialEq::ne(*self, &other.string[..])
-    }
-}
-
-impl PartialEq<str> for InternedString {
-    #[inline(always)]
-    fn eq(&self, other: &str) -> bool {
-        PartialEq::eq(&self.string[..], other)
-    }
-    #[inline(always)]
-    fn ne(&self, other: &str) -> bool {
-        PartialEq::ne(&self.string[..], other)
-    }
-}
-
-impl PartialEq<InternedString> for str {
-    #[inline(always)]
-    fn eq(&self, other: &InternedString) -> bool {
-        PartialEq::eq(self, &other.string[..])
-    }
-    #[inline(always)]
-    fn ne(&self, other: &InternedString) -> bool {
-        PartialEq::ne(self, &other.string[..])
-    }
-}
-
-impl Decodable for InternedString {
-    fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
-        Ok(intern(&d.read_str()?).as_str())
-    }
-}
-
-impl Encodable for InternedString {
-    fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
-        s.emit_str(&self.string)
-    }
-}
-
-/// Interns and returns the string contents of an identifier, using the
-/// thread-local interner.
-#[inline]
-pub fn intern_and_get_ident(s: &str) -> InternedString {
-    intern(s).as_str()
-}
-
-/// Maps a string to its interned representation.
-#[inline]
-pub fn intern(s: &str) -> ast::Name {
-    with_ident_interner(|interner| interner.intern(s))
-}
-
-/// gensym's a new usize, using the current interner.
-#[inline]
-pub fn gensym(s: &str) -> ast::Name {
-    with_ident_interner(|interner| interner.gensym(s))
-}
-
-/// Maps a string to an identifier with an empty syntax context.
-#[inline]
-pub fn str_to_ident(s: &str) -> ast::Ident {
-    ast::Ident::with_empty_ctxt(intern(s))
-}
-
-/// Maps a string to a gensym'ed identifier.
-#[inline]
-pub fn gensym_ident(s: &str) -> ast::Ident {
-    ast::Ident::with_empty_ctxt(gensym(s))
-}
-
-// create a fresh name that maps to the same string as the old one.
-// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
-// that is, that the new name and the old one are connected to ptr_eq strings.
-pub fn fresh_name(src: ast::Ident) -> ast::Name {
-    with_ident_interner(|interner| interner.gensym_copy(src.name))
-    // following: debug version. Could work in final except that it's incompatible with
-    // good error messages and uses of struct names in ambiguous could-be-binding
-    // locations. Also definitely destroys the guarantee given above about ptr_eq.
-    /*let num = rand::thread_rng().gen_uint_range(0,0xffff);
-    gensym(format!("{}_{}",ident_to_string(src),num))*/
-}