about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authormark <markm@cs.wisc.edu>2018-06-15 21:49:00 -0500
committermark <markm@cs.wisc.edu>2018-07-23 21:54:43 -0500
commit8eb4941e30d2a40bc03840dd0d99beb5aaf8159d (patch)
treec2b4f35f76ad4697f92741e5b6fa6ce5cf4d6bce /src/libsyntax
parent2a7ae04a6872edd8a1bffa620fde53a2eb2964e1 (diff)
downloadrust-8eb4941e30d2a40bc03840dd0d99beb5aaf8159d.tar.gz
rust-8eb4941e30d2a40bc03840dd0d99beb5aaf8159d.zip
Implement 2015 vs 2018 `?` kleene op + test
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ext/expand.rs18
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs462
-rw-r--r--src/libsyntax/ext/tt/quoted.rs263
3 files changed, 486 insertions, 257 deletions
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index dc461d0a15d..b84046d1050 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -44,8 +44,10 @@ macro_rules! ast_fragments {
     (
         $($Kind:ident($AstTy:ty) {
             $kind_name:expr;
-            $(one fn $fold_ast:ident; fn $visit_ast:ident;)?
-            $(many fn $fold_ast_elt:ident; fn $visit_ast_elt:ident;)?
+            // FIXME: HACK: this should be `$(one ...)?` and `$(many ...)?` but `?` macro
+            // repetition was removed from 2015 edition in #51587 because of ambiguities.
+            $(one fn $fold_ast:ident; fn $visit_ast:ident;)*
+            $(many fn $fold_ast_elt:ident; fn $visit_ast_elt:ident;)*
             fn $make_ast:ident;
         })*
     ) => {
@@ -100,11 +102,11 @@ macro_rules! ast_fragments {
                     AstFragment::OptExpr(expr) =>
                         AstFragment::OptExpr(expr.and_then(|expr| folder.fold_opt_expr(expr))),
                     $($(AstFragment::$Kind(ast) =>
-                        AstFragment::$Kind(folder.$fold_ast(ast)),)?)*
+                        AstFragment::$Kind(folder.$fold_ast(ast)),)*)*
                     $($(AstFragment::$Kind(ast) =>
                         AstFragment::$Kind(ast.into_iter()
                                               .flat_map(|ast| folder.$fold_ast_elt(ast))
-                                              .collect()),)?)*
+                                              .collect()),)*)*
                 }
             }
 
@@ -112,10 +114,10 @@ macro_rules! ast_fragments {
                 match *self {
                     AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr),
                     AstFragment::OptExpr(None) => {}
-                    $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)?)*
+                    $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)*)*
                     $($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] {
                         visitor.$visit_ast_elt(ast_elt);
-                    })?)*
+                    })*)*
                 }
             }
         }
@@ -126,10 +128,10 @@ macro_rules! ast_fragments {
             }
             $($(fn $fold_ast(&mut self, ast: $AstTy) -> $AstTy {
                 self.expand_fragment(AstFragment::$Kind(ast)).$make_ast()
-            })?)*
+            })*)*
             $($(fn $fold_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy {
                 self.expand_fragment(AstFragment::$Kind(SmallVector::one(ast_elt))).$make_ast()
-            })?)*
+            })*)*
         }
 
         impl<'a> MacResult for ::ext::tt::macro_rules::ParserAnyMacro<'a> {
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 9ebead1062e..05e59d6b47c 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -8,28 +8,33 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use {ast, attr};
-use syntax_pos::{Span, DUMMY_SP};
 use edition::Edition;
-use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
-use ext::base::{NormalTT, TTMacroExpander};
-use ext::expand::{AstFragment, AstFragmentKind};
-use ext::tt::macro_parser::{Success, Error, Failure};
-use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
-use ext::tt::macro_parser::{parse, parse_failure_msg};
-use ext::tt::quoted;
-use ext::tt::transcribe::transcribe;
+use ext::{
+    base::{DummyResult, ExtCtxt, MacResult, NormalTT, SyntaxExtension, TTMacroExpander},
+    expand::{AstFragment, AstFragmentKind},
+    tt::{
+        macro_parser::{
+            parse, parse_failure_msg, Error, Failure, MatchedNonterminal, MatchedSeq, Success,
+        },
+        quoted,
+        transcribe::transcribe,
+    },
+};
 use feature_gate::{self, emit_feature_err, Features, GateIssue};
-use parse::{Directory, ParseSess};
-use parse::parser::Parser;
-use parse::token::{self, NtTT};
-use parse::token::Token::*;
+use parse::{
+    parser::Parser,
+    token::{self, NtTT, Token::*},
+    Directory, ParseSess,
+};
 use symbol::Symbol;
+use syntax_pos::{Span, DUMMY_SP};
 use tokenstream::{TokenStream, TokenTree};
+use {ast, attr};
 
-use std::borrow::Cow;
-use std::collections::HashMap;
-use std::collections::hash_map::Entry;
+use std::{
+    borrow::Cow,
+    collections::{hash_map::Entry, HashMap},
+};
 
 use rustc_data_structures::sync::Lrc;
 
@@ -39,12 +44,16 @@ pub struct ParserAnyMacro<'a> {
     /// Span of the expansion site of the macro this parser is for
     site_span: Span,
     /// The ident of the macro we're parsing
-    macro_ident: ast::Ident
+    macro_ident: ast::Ident,
 }
 
 impl<'a> ParserAnyMacro<'a> {
     pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
-        let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self;
+        let ParserAnyMacro {
+            site_span,
+            macro_ident,
+            ref mut parser,
+        } = *self;
         let fragment = panictry!(parser.parse_ast_fragment(kind, true));
 
         // We allow semicolons at the end of expressions -- e.g. the semicolon in
@@ -77,17 +86,16 @@ impl TTMacroExpander for MacroRulesMacroExpander {
         if !self.valid {
             return DummyResult::any(sp);
         }
-        generic_extension(cx,
-                          sp,
-                          self.name,
-                          input,
-                          &self.lhses,
-                          &self.rhses)
+        generic_extension(cx, sp, self.name, input, &self.lhses, &self.rhses)
     }
 }
 
 fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
-    let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
+    let sp = sp
+        .macro_backtrace()
+        .last()
+        .map(|trace| trace.call_site)
+        .unwrap_or(sp);
     let values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
     values.push(message);
 }
@@ -108,10 +116,11 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
     let mut best_fail_spot = DUMMY_SP;
     let mut best_fail_tok = None;
 
-    for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
+    for (i, lhs) in lhses.iter().enumerate() {
+        // try each arm's matchers
         let lhs_tt = match *lhs {
             quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
-            _ => cx.span_bug(sp, "malformed macro lhs")
+            _ => cx.span_bug(sp, "malformed macro lhs"),
         };
 
         match TokenTree::parse(cx, lhs_tt, arg.clone()) {
@@ -147,7 +156,11 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                     ownership: cx.current_expansion.directory_ownership,
                 };
                 let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false);
-                p.root_module_name = cx.current_expansion.module.mod_path.last()
+                p.root_module_name = cx
+                    .current_expansion
+                    .module
+                    .mod_path
+                    .last()
                     .map(|id| id.as_str().to_string());
 
                 p.process_potential_macro_variable();
@@ -160,16 +173,14 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                     // so we can print a useful error message if the parse of the expanded
                     // macro leaves unparsed tokens.
                     site_span: sp,
-                    macro_ident: name
-                })
+                    macro_ident: name,
+                });
             }
             Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() {
                 best_fail_spot = sp;
                 best_fail_tok = Some(tok);
             },
-            Error(err_sp, ref msg) => {
-                cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..])
-            }
+            Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]),
         }
     }
 
@@ -185,8 +196,12 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
 // Holy self-referential!
 
 /// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: Edition)
-               -> SyntaxExtension {
+pub fn compile(
+    sess: &ParseSess,
+    features: &Features,
+    def: &ast::Item,
+    edition: Edition,
+) -> SyntaxExtension {
     let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
     let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
 
@@ -202,33 +217,47 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
     let argument_gram = vec![
-        quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
-            tts: vec![
-                quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
-                quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
-            ],
-            separator: Some(if body.legacy { token::Semi } else { token::Comma }),
-            op: quoted::KleeneOp::OneOrMore,
-            num_captures: 2,
-        })),
+        quoted::TokenTree::Sequence(
+            DUMMY_SP,
+            Lrc::new(quoted::SequenceRepetition {
+                tts: vec![
+                    quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
+                    quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
+                    quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
+                ],
+                separator: Some(if body.legacy {
+                    token::Semi
+                } else {
+                    token::Comma
+                }),
+                op: quoted::KleeneOp::OneOrMore,
+                num_captures: 2,
+            }),
+        ),
         // to phase into semicolon-termination instead of semicolon-separation
-        quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
-            tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
-            separator: None,
-            op: quoted::KleeneOp::ZeroOrMore,
-            num_captures: 0
-        })),
+        quoted::TokenTree::Sequence(
+            DUMMY_SP,
+            Lrc::new(quoted::SequenceRepetition {
+                tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
+                separator: None,
+                op: quoted::KleeneOp::ZeroOrMore,
+                num_captures: 0,
+            }),
+        ),
     ];
 
     let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
         Success(m) => m,
         Failure(sp, tok) => {
             let s = parse_failure_msg(tok);
-            sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
+            sess.span_diagnostic
+                .span_fatal(sp.substitute_dummy(def.span), &s)
+                .raise();
         }
         Error(sp, s) => {
-            sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
+            sess.span_diagnostic
+                .span_fatal(sp.substitute_dummy(def.span), &s)
+                .raise();
         }
     };
 
@@ -236,35 +265,57 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
 
     // Extract the arguments:
     let lhses = match *argument_map[&lhs_nm] {
-        MatchedSeq(ref s, _) => {
-            s.iter().map(|m| {
+        MatchedSeq(ref s, _) => s
+            .iter()
+            .map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
-                        let tt = quoted::parse(tt.clone().into(), true, sess, features, &def.attrs)
-                            .pop().unwrap();
+                        let tt = quoted::parse(
+                            tt.clone().into(),
+                            true,
+                            sess,
+                            features,
+                            &def.attrs,
+                            edition,
+                        ).pop()
+                            .unwrap();
                         valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt);
                         return tt;
                     }
                 }
-                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
-            }).collect::<Vec<quoted::TokenTree>>()
-        }
-        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
+                sess.span_diagnostic
+                    .span_bug(def.span, "wrong-structured lhs")
+            })
+            .collect::<Vec<quoted::TokenTree>>(),
+        _ => sess
+            .span_diagnostic
+            .span_bug(def.span, "wrong-structured lhs"),
     };
 
     let rhses = match *argument_map[&rhs_nm] {
-        MatchedSeq(ref s, _) => {
-            s.iter().map(|m| {
+        MatchedSeq(ref s, _) => s
+            .iter()
+            .map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
-                        return quoted::parse(tt.clone().into(), false, sess, features, &def.attrs)
-                            .pop().unwrap();
+                        return quoted::parse(
+                            tt.clone().into(),
+                            false,
+                            sess,
+                            features,
+                            &def.attrs,
+                            edition,
+                        ).pop()
+                            .unwrap();
                     }
                 }
-                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
-            }).collect::<Vec<quoted::TokenTree>>()
-        }
-        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
+                sess.span_diagnostic
+                    .span_bug(def.span, "wrong-structured lhs")
+            })
+            .collect::<Vec<quoted::TokenTree>>(),
+        _ => sess
+            .span_diagnostic
+            .span_bug(def.span, "wrong-structured rhs"),
     };
 
     for rhs in &rhses {
@@ -293,14 +344,14 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
             }
         }
 
-        let unstable_feature = attr::find_stability(&sess.span_diagnostic,
-                                                    &def.attrs, def.span).and_then(|stability| {
-            if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
-                Some((stability.feature, issue))
-            } else {
-                None
-            }
-        });
+        let unstable_feature = attr::find_stability(&sess.span_diagnostic, &def.attrs, def.span)
+            .and_then(|stability| {
+                if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
+                    Some((stability.feature, issue))
+                } else {
+                    None
+                }
+            });
 
         NormalTT {
             expander,
@@ -323,10 +374,12 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
     }
 }
 
-fn check_lhs_nt_follows(sess: &ParseSess,
-                        features: &Features,
-                        attrs: &[ast::Attribute],
-                        lhs: &quoted::TokenTree) -> bool {
+fn check_lhs_nt_follows(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    lhs: &quoted::TokenTree,
+) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
     if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
@@ -351,15 +404,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
                 return false;
             },
             TokenTree::Sequence(span, ref seq) => {
-                if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
-                    match *seq_tt {
-                        TokenTree::MetaVarDecl(_, _, id) => id.name == "vis",
-                        TokenTree::Sequence(_, ref sub_seq) =>
-                            sub_seq.op == quoted::KleeneOp::ZeroOrMore,
-                        _ => false,
+                if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| match *seq_tt {
+                    TokenTree::MetaVarDecl(_, _, id) => id.name == "vis",
+                    TokenTree::Sequence(_, ref sub_seq) => {
+                        sub_seq.op == quoted::KleeneOp::ZeroOrMore
                     }
+                    _ => false,
                 }) {
-                    sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+                    sess.span_diagnostic
+                        .span_err(span, "repetition matches empty token tree");
                     return false;
                 }
                 if !check_lhs_no_empty_seq(sess, &seq.tts) {
@@ -375,15 +428,19 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
 fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool {
     match *rhs {
         quoted::TokenTree::Delimited(..) => return true,
-        _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited")
+        _ => sess
+            .span_diagnostic
+            .span_err(rhs.span(), "macro rhs must be delimited"),
     }
     false
 }
 
-fn check_matcher(sess: &ParseSess,
-                 features: &Features,
-                 attrs: &[ast::Attribute],
-                 matcher: &[quoted::TokenTree]) -> bool {
+fn check_matcher(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    matcher: &[quoted::TokenTree],
+) -> bool {
     let first_sets = FirstSets::new(matcher);
     let empty_suffix = TokenSet::empty();
     let err = sess.span_diagnostic.err_count();
@@ -417,7 +474,9 @@ impl FirstSets {
     fn new(tts: &[quoted::TokenTree]) -> FirstSets {
         use self::quoted::TokenTree;
 
-        let mut sets = FirstSets { first: HashMap::new() };
+        let mut sets = FirstSets {
+            first: HashMap::new(),
+        };
         build_recur(&mut sets, tts);
         return sets;
 
@@ -456,8 +515,9 @@ impl FirstSets {
                         // If the sequence contents can be empty, then the first
                         // token could be the separator token itself.
 
-                        if let (Some(ref sep), true) = (seq_rep.separator.clone(),
-                                                        subfirst.maybe_empty) {
+                        if let (Some(ref sep), true) =
+                            (seq_rep.separator.clone(), subfirst.maybe_empty)
+                        {
                             first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
                         }
 
@@ -465,7 +525,10 @@ impl FirstSets {
                         if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore {
                             // If sequence is potentially empty, then
                             // union them (preserving first emptiness).
-                            first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
+                            first.add_all(&TokenSet {
+                                maybe_empty: true,
+                                ..subfirst
+                            });
                         } else {
                             // Otherwise, sequence guaranteed
                             // non-empty; replace first.
@@ -499,19 +562,18 @@ impl FirstSets {
                 TokenTree::Sequence(sp, ref seq_rep) => {
                     match self.first.get(&sp) {
                         Some(&Some(ref subfirst)) => {
-
                             // If the sequence contents can be empty, then the first
                             // token could be the separator token itself.
 
-                            if let (Some(ref sep), true) = (seq_rep.separator.clone(),
-                                                            subfirst.maybe_empty) {
+                            if let (Some(ref sep), true) =
+                                (seq_rep.separator.clone(), subfirst.maybe_empty)
+                            {
                                 first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
                             }
 
                             assert!(first.maybe_empty);
                             first.add_all(subfirst);
-                            if subfirst.maybe_empty ||
-                               seq_rep.op == quoted::KleeneOp::ZeroOrMore {
+                            if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore {
                                 // continue scanning for more first
                                 // tokens, but also make sure we
                                 // restore empty-tracking state
@@ -559,12 +621,20 @@ struct TokenSet {
 
 impl TokenSet {
     // Returns a set for the empty sequence.
-    fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } }
+    fn empty() -> Self {
+        TokenSet {
+            tokens: Vec::new(),
+            maybe_empty: true,
+        }
+    }
 
     // Returns the set `{ tok }` for the single-token (and thus
     // non-empty) sequence [tok].
     fn singleton(tok: quoted::TokenTree) -> Self {
-        TokenSet { tokens: vec![tok], maybe_empty: false }
+        TokenSet {
+            tokens: vec![tok],
+            maybe_empty: false,
+        }
     }
 
     // Changes self to be the set `{ tok }`.
@@ -628,12 +698,14 @@ impl TokenSet {
 //
 // Requires that `first_sets` is pre-computed for `matcher`;
 // see `FirstSets::new`.
-fn check_matcher_core(sess: &ParseSess,
-                      features: &Features,
-                      attrs: &[ast::Attribute],
-                      first_sets: &FirstSets,
-                      matcher: &[quoted::TokenTree],
-                      follow: &TokenSet) -> TokenSet {
+fn check_matcher_core(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    first_sets: &FirstSets,
+    matcher: &[quoted::TokenTree],
+    follow: &TokenSet,
+) -> TokenSet {
     use self::quoted::TokenTree;
 
     let mut last = TokenSet::empty();
@@ -643,11 +715,13 @@ fn check_matcher_core(sess: &ParseSess,
     // then ensure T can also be followed by any element of FOLLOW.
     'each_token: for i in 0..matcher.len() {
         let token = &matcher[i];
-        let suffix = &matcher[i+1..];
+        let suffix = &matcher[i + 1..];
 
         let build_suffix_first = || {
             let mut s = first_sets.first(suffix);
-            if s.maybe_empty { s.add_all(follow); }
+            if s.maybe_empty {
+                s.add_all(follow);
+            }
             s
         };
 
@@ -663,9 +737,12 @@ fn check_matcher_core(sess: &ParseSess,
                 let can_be_followed_by_any;
                 if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) {
                     let msg = format!("invalid fragment specifier `{}`", bad_frag);
-                    sess.span_diagnostic.struct_span_err(token.span(), &msg)
-                        .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \
-                              `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`")
+                    sess.span_diagnostic
+                        .struct_span_err(token.span(), &msg)
+                        .help(
+                            "valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \
+                             `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`",
+                        )
                         .emit();
                     // (This eliminates false positives and duplicates
                     // from error messages.)
@@ -719,12 +796,8 @@ fn check_matcher_core(sess: &ParseSess,
                 // At this point, `suffix_first` is built, and
                 // `my_suffix` is some TokenSet that we can use
                 // for checking the interior of `seq_rep`.
-                let next = check_matcher_core(sess,
-                                              features,
-                                              attrs,
-                                              first_sets,
-                                              &seq_rep.tts,
-                                              my_suffix);
+                let next =
+                    check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix);
                 if next.maybe_empty {
                     last.add_all(&next);
                 } else {
@@ -746,16 +819,17 @@ fn check_matcher_core(sess: &ParseSess,
                 for next_token in &suffix_first.tokens {
                     match is_in_follow(next_token, &frag_spec.as_str()) {
                         Err((msg, help)) => {
-                            sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
-                                .help(help).emit();
+                            sess.span_diagnostic
+                                .struct_span_err(next_token.span(), &msg)
+                                .help(help)
+                                .emit();
                             // don't bother reporting every source of
                             // conflict for a particular element of `last`.
                             continue 'each_last;
                         }
                         Ok(true) => {}
                         Ok(false) => {
-                            let may_be = if last.tokens.len() == 1 &&
-                                suffix_first.tokens.len() == 1
+                            let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
                             {
                                 "is"
                             } else {
@@ -764,12 +838,14 @@ fn check_matcher_core(sess: &ParseSess,
 
                             sess.span_diagnostic.span_err(
                                 next_token.span(),
-                                &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
-                                          is not allowed for `{frag}` fragments",
-                                         name=name,
-                                         frag=frag_spec,
-                                         next=quoted_tt_to_string(next_token),
-                                         may_be=may_be)
+                                &format!(
+                                    "`${name}:{frag}` {may_be} followed by `{next}`, which \
+                                     is not allowed for `{frag}` fragments",
+                                    name = name,
+                                    frag = frag_spec,
+                                    next = quoted_tt_to_string(next_token),
+                                    may_be = may_be
+                                ),
                             );
                         }
                     }
@@ -834,16 +910,16 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
                 // since items *must* be followed by either a `;` or a `}`, we can
                 // accept anything after them
                 Ok(true)
-            },
+            }
             "block" => {
                 // anything can follow block, the braces provide an easy boundary to
                 // maintain
                 Ok(true)
-            },
-            "stmt" | "expr"  => match *tok {
+            }
+            "stmt" | "expr" => match *tok {
                 TokenTree::Token(_, ref tok) => match *tok {
                     FatArrow | Comma | Semi => Ok(true),
-                    _ => Ok(false)
+                    _ => Ok(false),
                 },
                 _ => Ok(false),
             },
@@ -851,16 +927,23 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
                 TokenTree::Token(_, ref tok) => match *tok {
                     FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
                     Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true),
-                    _ => Ok(false)
+                    _ => Ok(false),
                 },
                 _ => Ok(false),
             },
             "path" | "ty" => match *tok {
                 TokenTree::Token(_, ref tok) => match *tok {
-                    OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
-                    Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
+                    OpenDelim(token::DelimToken::Brace)
+                    | OpenDelim(token::DelimToken::Bracket)
+                    | Comma
+                    | FatArrow
+                    | Colon
+                    | Eq
+                    | Gt
+                    | Semi
+                    | BinOp(token::Or) => Ok(true),
                     Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true),
-                    _ => Ok(false)
+                    _ => Ok(false),
                 },
                 TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
                 _ => Ok(false),
@@ -868,43 +951,49 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
             "ident" | "lifetime" => {
                 // being a single token, idents and lifetimes are harmless
                 Ok(true)
-            },
+            }
             "literal" => {
                 // literals may be of a single token, or two tokens (negative numbers)
                 Ok(true)
-            },
+            }
             "meta" | "tt" => {
                 // being either a single token or a delimited sequence, tt is
                 // harmless
                 Ok(true)
-            },
+            }
             "vis" => {
                 // Explicitly disallow `priv`, on the off chance it comes back.
                 match *tok {
                     TokenTree::Token(_, ref tok) => match *tok {
                         Comma => Ok(true),
                         Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true),
-                        ref tok => Ok(tok.can_begin_type())
+                        ref tok => Ok(tok.can_begin_type()),
                     },
-                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident"
-                                                       || frag.name == "ty"
-                                                       || frag.name == "path" => Ok(true),
-                    _ => Ok(false)
+                    TokenTree::MetaVarDecl(_, _, frag)
+                        if frag.name == "ident" || frag.name == "ty" || frag.name == "path" =>
+                    {
+                        Ok(true)
+                    }
+                    _ => Ok(false),
                 }
-            },
+            }
             "" => Ok(true), // keywords::Invalid
-            _ => Err((format!("invalid fragment specifier `{}`", frag),
-                     "valid fragment specifiers are `ident`, `block`, \
-                      `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \
-                      `literal`, `item` and `vis`"))
+            _ => Err((
+                format!("invalid fragment specifier `{}`", frag),
+                "valid fragment specifiers are `ident`, `block`, \
+                 `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \
+                 `literal`, `item` and `vis`",
+            )),
         }
     }
 }
 
-fn has_legal_fragment_specifier(sess: &ParseSess,
-                                features: &Features,
-                                attrs: &[ast::Attribute],
-                                tok: &quoted::TokenTree) -> Result<(), String> {
+fn has_legal_fragment_specifier(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    tok: &quoted::TokenTree,
+) -> Result<(), String> {
     debug!("has_legal_fragment_specifier({:?})", tok);
     if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
         let frag_name = frag_spec.as_str();
@@ -916,38 +1005,45 @@ fn has_legal_fragment_specifier(sess: &ParseSess,
     Ok(())
 }
 
-fn is_legal_fragment_specifier(sess: &ParseSess,
-                               features: &Features,
-                               attrs: &[ast::Attribute],
-                               frag_name: &str,
-                               frag_span: Span) -> bool {
+fn is_legal_fragment_specifier(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    frag_name: &str,
+    frag_span: Span,
+) -> bool {
     match frag_name {
-        "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" |
-        "path" | "ty" | "ident" | "meta" | "tt" | "" => true,
+        "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" | "path" | "ty" | "ident"
+        | "meta" | "tt" | "" => true,
         "literal" => {
-            if !features.macro_literal_matcher &&
-               !attr::contains_name(attrs, "allow_internal_unstable") {
+            if !features.macro_literal_matcher
+                && !attr::contains_name(attrs, "allow_internal_unstable")
+            {
                 let explain = feature_gate::EXPLAIN_LITERAL_MATCHER;
-                emit_feature_err(sess,
-                                 "macro_literal_matcher",
-                                 frag_span,
-                                 GateIssue::Language,
-                                 explain);
+                emit_feature_err(
+                    sess,
+                    "macro_literal_matcher",
+                    frag_span,
+                    GateIssue::Language,
+                    explain,
+                );
             }
             true
-        },
+        }
         "vis" => {
-            if !features.macro_vis_matcher &&
-               !attr::contains_name(attrs, "allow_internal_unstable") {
+            if !features.macro_vis_matcher && !attr::contains_name(attrs, "allow_internal_unstable")
+            {
                 let explain = feature_gate::EXPLAIN_VIS_MATCHER;
-                emit_feature_err(sess,
-                                 "macro_vis_matcher",
-                                 frag_span,
-                                 GateIssue::Language,
-                                 explain);
+                emit_feature_err(
+                    sess,
+                    "macro_vis_matcher",
+                    frag_span,
+                    GateIssue::Language,
+                    explain,
+                );
             }
             true
-        },
+        }
         _ => false,
     }
 }
@@ -957,7 +1053,9 @@ fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
         quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
         quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
-        _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
-                     in follow set checker"),
+        _ => panic!(
+            "unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+             in follow set checker"
+        ),
     }
 }
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index d21ffabb62e..1bca6dd0653 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -8,17 +8,17 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use {ast, attr};
 use ext::tt::macro_parser;
 use feature_gate::{self, emit_feature_err, Features, GateIssue};
 use parse::{token, ParseSess};
 use print::pprust;
 use symbol::keywords;
-use syntax_pos::{BytePos, Span};
+use syntax_pos::{edition::Edition, BytePos, Span, DUMMY_SP};
 use tokenstream;
+use {ast, attr};
 
-use std::iter::Peekable;
 use rustc_data_structures::sync::Lrc;
+use std::iter::Peekable;
 
 /// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
 /// that the delimiter itself might be `NoDelim`.
@@ -174,6 +174,7 @@ impl TokenTree {
 /// - `sess`: the parsing session. Any errors will be emitted to this session.
 /// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
 ///   unstable features or not.
+/// - `edition`: which edition are we in.
 ///
 /// # Returns
 ///
@@ -184,6 +185,7 @@ pub fn parse(
     sess: &ParseSess,
     features: &Features,
     attrs: &[ast::Attribute],
+    edition: Edition,
 ) -> Vec<TokenTree> {
     // Will contain the final collection of `self::TokenTree`
     let mut result = Vec::new();
@@ -194,7 +196,15 @@ pub fn parse(
     while let Some(tree) = trees.next() {
         // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
         // parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`).
-        let tree = parse_tree(tree, &mut trees, expect_matchers, sess, features, attrs);
+        let tree = parse_tree(
+            tree,
+            &mut trees,
+            expect_matchers,
+            sess,
+            features,
+            attrs,
+            edition,
+        );
         match tree {
             TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
                 let span = match trees.next() {
@@ -207,11 +217,13 @@ pub fn parse(
                             }
                             _ => end_sp,
                         },
-                        tree => tree.as_ref()
+                        tree => tree
+                            .as_ref()
                             .map(tokenstream::TokenTree::span)
                             .unwrap_or(span),
                     },
-                    tree => tree.as_ref()
+                    tree => tree
+                        .as_ref()
                         .map(tokenstream::TokenTree::span)
                         .unwrap_or(start_sp),
                 };
@@ -252,6 +264,7 @@ fn parse_tree<I>(
     sess: &ParseSess,
     features: &Features,
     attrs: &[ast::Attribute],
+    edition: Edition,
 ) -> TokenTree
 where
     I: Iterator<Item = tokenstream::TokenTree>,
@@ -270,9 +283,17 @@ where
                     sess.span_diagnostic.span_err(span, &msg);
                 }
                 // Parse the contents of the sequence itself
-                let sequence = parse(delimited.tts.into(), expect_matchers, sess, features, attrs);
+                let sequence = parse(
+                    delimited.tts.into(),
+                    expect_matchers,
+                    sess,
+                    features,
+                    attrs,
+                    edition,
+                );
                 // Get the Kleene operator and optional separator
-                let (separator, op) = parse_sep_and_kleene_op(trees, span, sess, features, attrs);
+                let (separator, op) =
+                    parse_sep_and_kleene_op(trees, span, sess, features, attrs, edition);
                 // Count the number of captured "names" (i.e. named metavars)
                 let name_captures = macro_parser::count_names(&sequence);
                 TokenTree::Sequence(
@@ -322,7 +343,14 @@ where
             span,
             Lrc::new(Delimited {
                 delim: delimited.delim,
-                tts: parse(delimited.tts.into(), expect_matchers, sess, features, attrs),
+                tts: parse(
+                    delimited.tts.into(),
+                    expect_matchers,
+                    sess,
+                    features,
+                    attrs,
+                    edition,
+                ),
             }),
         ),
     }
@@ -341,22 +369,23 @@ fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
 
 /// Parse the next token tree of the input looking for a KleeneOp. Returns
 ///
-/// - Ok(Ok(op)) if the next token tree is a KleeneOp
+/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
 /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
 /// - Err(span) if the next token tree is not a token
 fn parse_kleene_op<I>(
     input: &mut I,
     span: Span,
-) -> Result<Result<KleeneOp, (token::Token, Span)>, Span>
+) -> Result<Result<(KleeneOp, Span), (token::Token, Span)>, Span>
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
     match input.next() {
         Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
-            Some(op) => Ok(Ok(op)),
+            Some(op) => Ok(Ok((op, span))),
             None => Ok(Err((tok, span))),
         },
-        tree => Err(tree.as_ref()
+        tree => Err(tree
+            .as_ref()
             .map(tokenstream::TokenTree::span)
             .unwrap_or(span)),
     }
@@ -374,12 +403,34 @@ where
 /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
 /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
 /// error with the appropriate span is emitted to `sess` and a dummy value is returned.
+///
+/// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018,
+/// `?` is a Kleene op and not a separator.
 fn parse_sep_and_kleene_op<I>(
     input: &mut Peekable<I>,
     span: Span,
     sess: &ParseSess,
     features: &Features,
     attrs: &[ast::Attribute],
+    edition: Edition,
+) -> (Option<token::Token>, KleeneOp)
+where
+    I: Iterator<Item = tokenstream::TokenTree>,
+{
+    match edition {
+        Edition::Edition2015 => parse_sep_and_kleene_op_2015(input, span, sess, features, attrs),
+        Edition::Edition2018 => parse_sep_and_kleene_op_2018(input, span, sess, features, attrs),
+        _ => unimplemented!(),
+    }
+}
+
+// `?` is a separator (with a migration warning) and never a KleeneOp.
+fn parse_sep_and_kleene_op_2015<I>(
+    input: &mut Peekable<I>,
+    span: Span,
+    sess: &ParseSess,
+    _features: &Features,
+    _attrs: &[ast::Attribute],
 ) -> (Option<token::Token>, KleeneOp)
 where
     I: Iterator<Item = tokenstream::TokenTree>,
@@ -388,14 +439,14 @@ where
     let span = match parse_kleene_op(input, span) {
         // #1 is a `+` or `*` KleeneOp
         //
-        // `?` is ambiguous: it could be a separator or a Kleene::ZeroOrOne, so we need to look
-        // ahead one more token to be sure.
-        Ok(Ok(op)) if op != KleeneOp::ZeroOrOne => return (None, op),
-
-        // #1 is `?` token, but it could be a Kleene::ZeroOrOne without a separator or it could
-        // be a `?` separator followed by any Kleene operator. We need to look ahead 1 token to
-        // find out which.
-        Ok(Ok(op)) => {
+        // `?` is ambiguous: it could be a separator (warning) or a Kleene::ZeroOrOne (error), so
+        // we need to look ahead one more token to be sure.
+        Ok(Ok((op, _))) if op != KleeneOp::ZeroOrOne => return (None, op),
+
+        // #1 is `?` token, but it could be a Kleene::ZeroOrOne (error in 2015) without a separator
+        // or it could be a `?` separator followed by any Kleene operator. We need to look ahead 1
+        // token to find out which.
+        Ok(Ok((op, op1_span))) => {
             assert_eq!(op, KleeneOp::ZeroOrOne);
 
             // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
@@ -406,71 +457,147 @@ where
             };
 
             if is_1_sep {
-                // #1 is a separator and #2 should be a KleepeOp::*
+                // #1 is a separator and #2 should be a KleepeOp.
                 // (N.B. We need to advance the input iterator.)
                 match parse_kleene_op(input, span) {
-                    // #2 is a KleeneOp (this is the only valid option) :)
-                    Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
-                        if !features.macro_at_most_once_rep
-                            && !attr::contains_name(attrs, "allow_internal_unstable")
-                        {
-                            let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
-                            emit_feature_err(
-                                sess,
-                                "macro_at_most_once_rep",
-                                span,
-                                GateIssue::Language,
-                                explain,
-                            );
-                        }
+                    // #2 is `?`, which is not allowed as a Kleene op in 2015 edition.
+                    Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
+                        sess.span_diagnostic
+                            .struct_span_err(op2_span, "expected `*` or `+`")
+                            .note("`?` is not a macro repetition operator")
+                            .emit();
+
+                        // Return a dummy
+                        return (None, KleeneOp::ZeroOrMore);
+                    }
+
+                    // #2 is a Kleene op, which is the the only valid option
+                    Ok(Ok((op, _))) => {
+                        // Warn that `?` as a separator will be deprecated
+                        sess.span_diagnostic.span_warn(
+                            op1_span,
+                            "using `?` as a separator is deprecated and will be \
+                             a hard error in an upcoming edition",
+                        );
+
                         return (Some(token::Question), op);
                     }
-                    Ok(Ok(op)) => return (Some(token::Question), op),
 
                     // #2 is a random token (this is an error) :(
-                    Ok(Err((_, span))) => span,
+                    Ok(Err((_, _))) => op1_span,
 
                     // #2 is not even a token at all :(
-                    Err(span) => span,
+                    Err(_) => op1_span,
                 }
             } else {
-                if !features.macro_at_most_once_rep
-                    && !attr::contains_name(attrs, "allow_internal_unstable")
-                {
-                    let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
-                    emit_feature_err(
-                        sess,
-                        "macro_at_most_once_rep",
-                        span,
-                        GateIssue::Language,
-                        explain,
-                    );
-                }
+                // `?` is not allowed as a Kleene op in 2015
+                sess.span_diagnostic
+                    .struct_span_err(op1_span, "expected `*` or `+`")
+                    .note("`?` is not a macro repetition operator")
+                    .emit();
+
+                // Return a dummy
+                return (None, KleeneOp::ZeroOrMore);
+            }
+        }
+
+        // #1 is a separator followed by #2, a KleeneOp
+        Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+            // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition.
+            Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
+                sess.span_diagnostic
+                    .struct_span_err(op2_span, "expected `*` or `+`")
+                    .note("`?` is not a macro repetition operator")
+                    .emit();
+
+                // Return a dummy
+                return (None, KleeneOp::ZeroOrMore);
+            }
+
+            // #2 is a KleeneOp :D
+            Ok(Ok((op, _))) => return (Some(tok), op),
+
+            // #2 is a random token :(
+            Ok(Err((_, span))) => span,
+
+            // #2 is not a token at all :(
+            Err(span) => span,
+        },
+
+        // #1 is not a token
+        Err(span) => span,
+    };
+
+    sess.span_diagnostic.span_err(span, "expected `*` or `+`");
+
+    // Return a dummy
+    (None, KleeneOp::ZeroOrMore)
+}
+
+// `?` is a Kleene op, not a separator
+fn parse_sep_and_kleene_op_2018<I>(
+    input: &mut Peekable<I>,
+    span: Span,
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+) -> (Option<token::Token>, KleeneOp)
+where
+    I: Iterator<Item = tokenstream::TokenTree>,
+{
+    // We basically look at two token trees here, denoted as #1 and #2 below
+    let span = match parse_kleene_op(input, span) {
+        // #1 is a `?` (needs feature gate)
+        Ok(Ok((op, op1_span))) if op == KleeneOp::ZeroOrOne => {
+            if !features.macro_at_most_once_rep
+                && !attr::contains_name(attrs, "allow_internal_unstable")
+            {
+                let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
+                emit_feature_err(
+                    sess,
+                    "macro_at_most_once_rep",
+                    op1_span,
+                    GateIssue::Language,
+                    explain,
+                );
 
-                // #2 is a random tree and #1 is KleeneOp::ZeroOrOne
+                op1_span
+            } else {
                 return (None, op);
             }
         }
 
+        // #1 is a `+` or `*` KleeneOp
+        Ok(Ok((op, _))) => return (None, op),
+
         // #1 is a separator followed by #2, a KleeneOp
         Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
-            // #2 is a KleeneOp :D
-            Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
+            // #2 is the `?` Kleene op, which does not take a separator (error)
+            Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
+                // Error!
+
                 if !features.macro_at_most_once_rep
                     && !attr::contains_name(attrs, "allow_internal_unstable")
                 {
-                    let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
-                    emit_feature_err(
-                        sess,
-                        "macro_at_most_once_rep",
+                    // FIXME: when `?` as a Kleene op is stabilized, we only need the "does not
+                    // take a macro separator" error (i.e. the `else` case).
+                    sess.span_diagnostic
+                        .struct_span_err(op2_span, "expected `*` or `+`")
+                        .note("`?` is not a macro repetition operator")
+                        .emit();
+                } else {
+                    sess.span_diagnostic.span_err(
                         span,
-                        GateIssue::Language,
-                        explain,
+                        "the `?` macro repetition operator does not take a separator",
                     );
                 }
-                return (Some(tok), op);
+
+                // Return a dummy
+                return (None, KleeneOp::ZeroOrMore);
             }
-            Ok(Ok(op)) => return (Some(tok), op),
+
+            // #2 is a KleeneOp :D
+            Ok(Ok((op, _))) => return (Some(tok), op),
 
             // #2 is a random token :(
             Ok(Err((_, span))) => span,
@@ -483,13 +610,15 @@ where
         Err(span) => span,
     };
 
-    if !features.macro_at_most_once_rep
-        && !attr::contains_name(attrs, "allow_internal_unstable")
-    {
+    // If we ever get to this point, we have experienced an "unexpected token" error
+
+    if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") {
+        sess.span_diagnostic.span_err(span, "expected `*` or `+`");
+    } else {
         sess.span_diagnostic
             .span_err(span, "expected one of: `*`, `+`, or `?`");
-    } else {
-        sess.span_diagnostic.span_err(span, "expected `*` or `+`");
     }
+
+    // Return a dummy
     (None, KleeneOp::ZeroOrMore)
 }