about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-08 20:20:00 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2019-06-08 20:36:20 +0300
commitffe23475cba4b933475715ff72ca0be6aea0a398 (patch)
tree8e997af213f2bd840371b705b25f0ccf6e204c44 /src
parent5c45343f11fbf93cf4e15568aee3ff3f2f287466 (diff)
downloadrust-ffe23475cba4b933475715ff72ca0be6aea0a398.tar.gz
rust-ffe23475cba4b933475715ff72ca0be6aea0a398.zip
syntax: Keep full `Token`s for `macro_rules` separators
Diffstat (limited to 'src')
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs2
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs30
-rw-r--r--src/libsyntax/ext/tt/quoted.rs14
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs19
-rw-r--r--src/test/ui/macros/macro-input-future-proofing.stderr4
5 files changed, 32 insertions, 37 deletions
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index f98e1433356..1c7fdf995e1 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> {
     seq_op: Option<quoted::KleeneOp>,
 
     /// The separator if we are in a repetition.
-    sep: Option<TokenKind>,
+    sep: Option<Token>,
 
     /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
     /// before we enter the sequence.
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 4998129fdee..b7fbfd60ed7 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -17,7 +17,7 @@ use crate::symbol::{Symbol, kw, sym};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
 
 use errors::FatalError;
-use syntax_pos::{Span, DUMMY_SP, symbol::Ident};
+use syntax_pos::{Span, symbol::Ident};
 use log::debug;
 
 use rustc_data_structures::fx::{FxHashMap};
@@ -266,17 +266,19 @@ pub fn compile(
     let argument_gram = vec![
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![
-                quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
-                quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
+                quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, ast::Ident::from_str("tt")),
+                quoted::TokenTree::token(token::FatArrow, def.span),
+                quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, ast::Ident::from_str("tt")),
             ],
-            separator: Some(if body.legacy { token::Semi } else { token::Comma }),
+            separator: Some(Token::new(
+                if body.legacy { token::Semi } else { token::Comma }, def.span
+            )),
             op: quoted::KleeneOp::OneOrMore,
             num_captures: 2,
         })),
         // to phase into semicolon-termination instead of semicolon-separation
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
-            tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
+            tts: vec![quoted::TokenTree::token(token::Semi, def.span)],
             separator: None,
             op: quoted::KleeneOp::ZeroOrMore,
             num_captures: 0
@@ -608,9 +610,8 @@ impl FirstSets {
                         // If the sequence contents can be empty, then the first
                         // token could be the separator token itself.
 
-                        if let (Some(ref sep), true) = (seq_rep.separator.clone(),
-                                                        subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
+                        if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+                            first.add_one_maybe(TokenTree::Token(sep.clone()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -658,9 +659,8 @@ impl FirstSets {
                             // If the sequence contents can be empty, then the first
                             // token could be the separator token itself.
 
-                            if let (Some(ref sep), true) = (seq_rep.separator.clone(),
-                                                            subfirst.maybe_empty) {
-                                first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
+                            if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+                                first.add_one_maybe(TokenTree::Token(sep.clone()));
                             }
 
                             assert!(first.maybe_empty);
@@ -851,7 +851,7 @@ fn check_matcher_core(sess: &ParseSess,
                 // against SUFFIX
                 continue 'each_token;
             }
-            TokenTree::Sequence(sp, ref seq_rep) => {
+            TokenTree::Sequence(_, ref seq_rep) => {
                 suffix_first = build_suffix_first();
                 // The trick here: when we check the interior, we want
                 // to include the separator (if any) as a potential
@@ -864,9 +864,9 @@ fn check_matcher_core(sess: &ParseSess,
                 // work of cloning it? But then again, this way I may
                 // get a "tighter" span?
                 let mut new;
-                let my_suffix = if let Some(ref u) = seq_rep.separator {
+                let my_suffix = if let Some(sep) = &seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
+                    new.add_one_maybe(TokenTree::Token(sep.clone()));
                     &new
                 } else {
                     &suffix_first
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index ec7d7f705d8..8f8aa586070 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -59,7 +59,7 @@ pub struct SequenceRepetition {
     /// The sequence of token trees
     pub tts: Vec<TokenTree>,
     /// The optional separator
-    pub separator: Option<TokenKind>,
+    pub separator: Option<Token>,
     /// Whether the sequence can be repeated zero (*), or one or more times (+)
     pub op: KleeneOp,
     /// The number of `Match`s that appear in the sequence (and subsequences)
@@ -424,7 +424,7 @@ fn parse_sep_and_kleene_op<I>(
     attrs: &[ast::Attribute],
     edition: Edition,
     macro_node_id: NodeId,
-) -> (Option<TokenKind>, KleeneOp)
+) -> (Option<Token>, KleeneOp)
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
@@ -449,7 +449,7 @@ fn parse_sep_and_kleene_op_2015<I>(
     _features: &Features,
     _attrs: &[ast::Attribute],
     macro_node_id: NodeId,
-) -> (Option<TokenKind>, KleeneOp)
+) -> (Option<Token>, KleeneOp)
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
@@ -502,7 +502,7 @@ where
                              a hard error in an upcoming edition",
                         );
 
-                        return (Some(token::Question), op);
+                        return (Some(Token::new(token::Question, op1_span)), op);
                     }
 
                     // #2 is a random token (this is an error) :(
@@ -541,7 +541,7 @@ where
             }
 
             // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(token.kind), op),
+            Ok(Ok((op, _))) => return (Some(token), op),
 
             // #2 is a random token :(
             Ok(Err(token)) => token.span,
@@ -567,7 +567,7 @@ fn parse_sep_and_kleene_op_2018<I>(
     sess: &ParseSess,
     _features: &Features,
     _attrs: &[ast::Attribute],
-) -> (Option<TokenKind>, KleeneOp)
+) -> (Option<Token>, KleeneOp)
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
@@ -596,7 +596,7 @@ where
             }
 
             // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(token.kind), op),
+            Ok(Ok((op, _))) => return (Some(token), op),
 
             // #2 is a random token :(
             Ok(Err(token)) => token.span,
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 90a9cc8f34d..3408cefdf42 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -4,11 +4,10 @@ use crate::ext::expand::Marker;
 use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
 use crate::ext::tt::quoted;
 use crate::mut_visit::noop_visit_tt;
-use crate::parse::token::{self, NtTT, TokenKind};
+use crate::parse::token::{self, NtTT, Token, TokenKind};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
 
 use smallvec::{smallvec, SmallVec};
-use syntax_pos::DUMMY_SP;
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::Lrc;
@@ -18,7 +17,7 @@ use std::rc::Rc;
 /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
 enum Frame {
     Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
-    Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<TokenKind> },
+    Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
 }
 
 impl Frame {
@@ -109,17 +108,13 @@ pub fn transcribe(
         else {
             // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
             // go back to the beginning of the sequence.
-            if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
-                let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
+            if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
+                let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
                 *repeat_idx += 1;
-                if *repeat_idx < repeat_len {
+                if repeat_idx < repeat_len {
                     *idx = 0;
-                    if let Some(sep) = sep.clone() {
-                        let prev_span = match result.last() {
-                            Some((tt, _)) => tt.span(),
-                            None => DUMMY_SP,
-                        };
-                        result.push(TokenTree::token(sep, prev_span).into());
+                    if let Some(sep) = sep {
+                        result.push(TokenTree::Token(sep.clone()).into());
                     }
                     continue;
                 }
diff --git a/src/test/ui/macros/macro-input-future-proofing.stderr b/src/test/ui/macros/macro-input-future-proofing.stderr
index a35f6283afb..542486927df 100644
--- a/src/test/ui/macros/macro-input-future-proofing.stderr
+++ b/src/test/ui/macros/macro-input-future-proofing.stderr
@@ -55,10 +55,10 @@ LL |     ($($a:ty, $b:ty)* -) => ();
    = note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`
 
 error: `$ty:ty` is followed by `-`, which is not allowed for `ty` fragments
-  --> $DIR/macro-input-future-proofing.rs:18:7
+  --> $DIR/macro-input-future-proofing.rs:18:15
    |
 LL |     ($($ty:ty)-+) => ();
-   |       ^^^^^^^^ not allowed after `ty` fragments
+   |               ^ not allowed after `ty` fragments
    |
    = note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`