about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2018-03-08 14:27:23 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2018-03-17 22:08:07 +0300
commit5d06c890fececc6f6779cd65ca83cef4647b8fdd (patch)
tree7ffd581b7c7b9ac7294ecb6f28994b1afe753659 /src/libsyntax
parent61b6bf54fdf56195baf9a8ee7383551b0d468c81 (diff)
downloadrust-5d06c890fececc6f6779cd65ca83cef4647b8fdd.tar.gz
rust-5d06c890fececc6f6779cd65ca83cef4647b8fdd.zip
syntax: Make `_` an identifier
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/diagnostics/plugin.rs4
-rw-r--r--src/libsyntax/ext/quote.rs1
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs3
-rw-r--r--src/libsyntax/feature_gate.rs2
-rw-r--r--src/libsyntax/parse/lexer/mod.rs14
-rw-r--r--src/libsyntax/parse/parser.rs36
-rw-r--r--src/libsyntax/parse/token.rs9
-rw-r--r--src/libsyntax/print/pprust.rs1
8 files changed, 28 insertions, 42 deletions
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index e8c2d325bd6..6c0fe525f55 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -19,7 +19,7 @@ use ext::base::{ExtCtxt, MacEager, MacResult};
 use ext::build::AstBuilder;
 use parse::token;
 use ptr::P;
-use symbol::Symbol;
+use symbol::{keywords, Symbol};
 use tokenstream::{TokenTree};
 use util::small_vector::SmallVector;
 
@@ -192,7 +192,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
             (descriptions.len(), ecx.expr_vec(span, descriptions))
         });
 
-    let static_ = ecx.lifetime(span, Ident::from_str("'static"));
+    let static_ = ecx.lifetime(span, keywords::StaticLifetime.ident());
     let ty_str = ecx.ty_rptr(
         span,
         ecx.ty_ident(span, ecx.ident_of("str")),
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index 7a024dbad88..d6642b7b6c2 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -709,7 +709,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
         token::Pound        => "Pound",
         token::Dollar       => "Dollar",
         token::Question     => "Question",
-        token::Underscore   => "Underscore",
         token::Eof          => "Eof",
 
         token::Whitespace | token::Comment | token::Shebang(_) => {
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 0621f728e2a..beefdb3a6ea 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -765,8 +765,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
             Token::DotDotDot |                  // range pattern (future compat)
             Token::ModSep |                     // path
             Token::Lt |                         // path (UFCS constant)
-            Token::BinOp(token::Shl) |          // path (double UFCS)
-            Token::Underscore => true,          // placeholder
+            Token::BinOp(token::Shl) => true,   // path (double UFCS)
             Token::Interpolated(ref nt) => may_be_ident(&nt.0),
             _ => false,
         },
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index f42cb8a2583..0098f2ae89b 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -1790,7 +1790,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
     }
 
     fn visit_lifetime(&mut self, lt: &'a ast::Lifetime) {
-        if lt.ident.name == "'_" {
+        if lt.ident.name == keywords::UnderscoreLifetime.name() {
             gate_feature_post!(&self, underscore_lifetimes, lt.span,
                                "underscore lifetimes are unstable");
         }
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 815ba49a60a..9d1bfba7b94 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -34,7 +34,7 @@ pub struct TokenAndSpan {
 
 impl Default for TokenAndSpan {
     fn default() -> Self {
-        TokenAndSpan { tok: token::Underscore, sp: syntax_pos::DUMMY_SP }
+        TokenAndSpan { tok: token::Whitespace, sp: syntax_pos::DUMMY_SP }
     }
 }
 
@@ -126,7 +126,7 @@ impl<'a> StringReader<'a> {
     pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
         assert!(self.fatal_errs.is_empty());
         let ret_val = TokenAndSpan {
-            tok: replace(&mut self.peek_tok, token::Underscore),
+            tok: replace(&mut self.peek_tok, token::Whitespace),
             sp: self.peek_span,
         };
         self.advance_token()?;
@@ -1133,14 +1133,8 @@ impl<'a> StringReader<'a> {
                 self.bump();
             }
 
-            return Ok(self.with_str_from(start, |string| {
-                if string == "_" {
-                    token::Underscore
-                } else {
-                    // FIXME: perform NFKC normalization here. (Issue #2253)
-                    token::Ident(self.mk_ident(string))
-                }
-            }));
+            // FIXME: perform NFKC normalization here. (Issue #2253)
+            return Ok(self.with_str_from(start, |string| token::Ident(self.mk_ident(string))));
         }
 
         if is_dec_digit(c) {
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index a3a6489fe8b..aa2a6f1cb47 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -549,7 +549,7 @@ impl<'a> Parser<'a> {
                -> Self {
         let mut parser = Parser {
             sess,
-            token: token::Underscore,
+            token: token::Whitespace,
             span: syntax_pos::DUMMY_SP,
             prev_span: syntax_pos::DUMMY_SP,
             meta_var_span: None,
@@ -800,11 +800,7 @@ impl<'a> Parser<'a> {
                 Err(if self.prev_token_kind == PrevTokenKind::DocComment {
                         self.span_fatal_err(self.prev_span, Error::UselessDocComment)
                     } else {
-                        let mut err = self.expected_ident_found();
-                        if self.token == token::Underscore {
-                            err.note("`_` is a wildcard pattern, not an identifier");
-                        }
-                        err
+                        self.expected_ident_found()
                     })
             }
         }
@@ -1602,7 +1598,7 @@ impl<'a> Parser<'a> {
             let e = self.parse_expr()?;
             self.expect(&token::CloseDelim(token::Paren))?;
             TyKind::Typeof(e)
-        } else if self.eat(&token::Underscore) {
+        } else if self.eat_keyword(keywords::Underscore) {
             // A type to be inferred `_`
             TyKind::Infer
         } else if self.token_is_bare_fn_keyword() {
@@ -1796,7 +1792,7 @@ impl<'a> Parser<'a> {
             _ => 0,
         };
 
-        self.look_ahead(offset, |t| t.is_ident() || t == &token::Underscore) &&
+        self.look_ahead(offset, |t| t.is_ident()) &&
         self.look_ahead(offset + 1, |t| t == &token::Colon)
     }
 
@@ -2782,7 +2778,7 @@ impl<'a> Parser<'a> {
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
-                let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span);
+                let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
                 self.bump();
                 TokenTree::Token(span, token)
             }
@@ -3815,11 +3811,6 @@ impl<'a> Parser<'a> {
         let lo = self.span;
         let pat;
         match self.token {
-            token::Underscore => {
-                // Parse _
-                self.bump();
-                pat = PatKind::Wild;
-            }
             token::BinOp(token::And) | token::AndAnd => {
                 // Parse &pat / &mut pat
                 self.expect_and()?;
@@ -3849,8 +3840,11 @@ impl<'a> Parser<'a> {
                 self.expect(&token::CloseDelim(token::Bracket))?;
                 pat = PatKind::Slice(before, slice, after);
             }
-            // At this point, token != _, &, &&, (, [
-            _ => if self.eat_keyword(keywords::Mut) {
+            // At this point, token != &, &&, (, [
+            _ => if self.eat_keyword(keywords::Underscore) {
+                // Parse _
+                pat = PatKind::Wild;
+            } else if self.eat_keyword(keywords::Mut) {
                 // Parse mut ident @ pat / mut ref ident @ pat
                 let mutref_span = self.prev_span.to(self.span);
                 let binding_mode = if self.eat_keyword(keywords::Ref) {
@@ -7065,10 +7059,12 @@ impl<'a> Parser<'a> {
 
     fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
         if self.eat_keyword(keywords::As) {
-            if self.eat(&token::Underscore) {
-                Ok(Some(Ident::with_empty_ctxt(Symbol::gensym("_"))))
-            } else {
-                self.parse_ident().map(Some)
+            match self.token {
+                token::Ident(ident) if ident.name == keywords::Underscore.name() => {
+                    self.bump(); // `_`
+                    Ok(Some(Ident { name: ident.name.gensymed(), ..ident }))
+                }
+                _ => self.parse_ident().map(Some),
             }
         } else {
             Ok(None)
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 097a2eb89fd..5c051e9b358 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -122,6 +122,7 @@ fn ident_can_begin_type(ident: ast::Ident) -> bool {
     !ident_token.is_reserved_ident() ||
     ident_token.is_path_segment_keyword() ||
     [
+        keywords::Underscore.name(),
         keywords::For.name(),
         keywords::Impl.name(),
         keywords::Fn.name(),
@@ -175,7 +176,6 @@ pub enum Token {
 
     /* Name components */
     Ident(ast::Ident),
-    Underscore,
     Lifetime(ast::Ident),
 
     // The `LazyTokenStream` is a pure function of the `Nonterminal`,
@@ -242,7 +242,6 @@ impl Token {
             Ident(ident)                => ident_can_begin_type(ident), // type name or keyword
             OpenDelim(Paren)            | // tuple
             OpenDelim(Bracket)          | // array
-            Underscore                  | // placeholder
             Not                         | // never
             BinOp(Star)                 | // raw pointer
             BinOp(And)                  | // reference
@@ -371,7 +370,7 @@ impl Token {
     // unnamed method parameters, crate root module, error recovery etc.
     pub fn is_special_ident(&self) -> bool {
         match self.ident() {
-            Some(id) => id.name <= keywords::DollarCrate.name(),
+            Some(id) => id.name <= keywords::Underscore.name(),
             _ => false,
         }
     }
@@ -441,7 +440,7 @@ impl Token {
 
             Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | DotEq |
             DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
-            Question | OpenDelim(..) | CloseDelim(..) | Underscore => return None,
+            Question | OpenDelim(..) | CloseDelim(..) => return None,
 
             Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
             Whitespace | Comment | Shebang(..) | Eof => return None,
@@ -573,7 +572,7 @@ impl fmt::Debug for Nonterminal {
 pub fn is_op(tok: &Token) -> bool {
     match *tok {
         OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
-        Ident(..) | Underscore | Lifetime(..) | Interpolated(..) |
+        Ident(..) | Lifetime(..) | Interpolated(..) |
         Whitespace | Comment | Shebang(..) | Eof => false,
         _ => true,
     }
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 1cf2b7a44bc..36698a86374 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -252,7 +252,6 @@ pub fn token_to_string(tok: &Token) -> String {
         /* Name components */
         token::Ident(s)             => s.to_string(),
         token::Lifetime(s)          => s.to_string(),
-        token::Underscore           => "_".to_string(),
 
         /* Other */
         token::DocComment(s)        => s.to_string(),