about summary refs log tree commit diff
path: root/compiler/rustc_expand
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_expand')
-rw-r--r--compiler/rustc_expand/src/base.rs99
-rw-r--r--compiler/rustc_expand/src/expand.rs82
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs18
-rw-r--r--compiler/rustc_expand/src/mbe/macro_check.rs62
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs132
5 files changed, 208 insertions, 185 deletions
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs
index 762d6745341..7ece46523db 100644
--- a/compiler/rustc_expand/src/base.rs
+++ b/compiler/rustc_expand/src/base.rs
@@ -532,7 +532,7 @@ impl MacResult for MacEager {
 /// after hitting errors.
 #[derive(Copy, Clone)]
 pub struct DummyResult {
-    is_error: bool,
+    guar: Option<ErrorGuaranteed>,
     span: Span,
 }
 
@@ -541,20 +541,24 @@ impl DummyResult {
     ///
     /// Use this as a return value after hitting any errors and
     /// calling `span_err`.
-    pub fn any(span: Span) -> Box<dyn MacResult + 'static> {
-        Box::new(DummyResult { is_error: true, span })
+    pub fn any(span: Span, guar: ErrorGuaranteed) -> Box<dyn MacResult + 'static> {
+        Box::new(DummyResult { guar: Some(guar), span })
     }
 
     /// Same as `any`, but must be a valid fragment, not error.
     pub fn any_valid(span: Span) -> Box<dyn MacResult + 'static> {
-        Box::new(DummyResult { is_error: false, span })
+        Box::new(DummyResult { guar: None, span })
     }
 
     /// A plain dummy expression.
-    pub fn raw_expr(sp: Span, is_error: bool) -> P<ast::Expr> {
+    pub fn raw_expr(sp: Span, guar: Option<ErrorGuaranteed>) -> P<ast::Expr> {
         P(ast::Expr {
             id: ast::DUMMY_NODE_ID,
-            kind: if is_error { ast::ExprKind::Err } else { ast::ExprKind::Tup(ThinVec::new()) },
+            kind: if let Some(guar) = guar {
+                ast::ExprKind::Err(guar)
+            } else {
+                ast::ExprKind::Tup(ThinVec::new())
+            },
             span: sp,
             attrs: ast::AttrVec::new(),
             tokens: None,
@@ -582,7 +586,7 @@ impl DummyResult {
 
 impl MacResult for DummyResult {
     fn make_expr(self: Box<DummyResult>) -> Option<P<ast::Expr>> {
-        Some(DummyResult::raw_expr(self.span, self.is_error))
+        Some(DummyResult::raw_expr(self.span, self.guar))
     }
 
     fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
@@ -608,7 +612,7 @@ impl MacResult for DummyResult {
     fn make_stmts(self: Box<DummyResult>) -> Option<SmallVec<[ast::Stmt; 1]>> {
         Some(smallvec![ast::Stmt {
             id: ast::DUMMY_NODE_ID,
-            kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)),
+            kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.guar)),
             span: self.span,
         }])
     }
@@ -884,17 +888,19 @@ impl SyntaxExtension {
         }
     }
 
+    /// A dummy bang macro `foo!()`.
     pub fn dummy_bang(edition: Edition) -> SyntaxExtension {
         fn expander<'cx>(
-            _: &'cx mut ExtCtxt<'_>,
+            cx: &'cx mut ExtCtxt<'_>,
             span: Span,
             _: TokenStream,
         ) -> Box<dyn MacResult + 'cx> {
-            DummyResult::any(span)
+            DummyResult::any(span, cx.dcx().span_delayed_bug(span, "expanded a dummy bang macro"))
         }
         SyntaxExtension::default(SyntaxExtensionKind::LegacyBang(Box::new(expander)), edition)
     }
 
+    /// A dummy derive macro `#[derive(Foo)]`.
     pub fn dummy_derive(edition: Edition) -> SyntaxExtension {
         fn expander(
             _: &mut ExtCtxt<'_>,
@@ -1066,7 +1072,7 @@ pub struct ExtCtxt<'a> {
     pub sess: &'a Session,
     pub ecfg: expand::ExpansionConfig<'a>,
     pub num_standard_library_imports: usize,
-    pub reduced_recursion_limit: Option<Limit>,
+    pub reduced_recursion_limit: Option<(Limit, ErrorGuaranteed)>,
     pub root_path: PathBuf,
     pub resolver: &'a mut dyn ResolverExpand,
     pub current_expansion: ExpansionData,
@@ -1244,7 +1250,7 @@ pub fn resolve_path(
 /// Extracts a string literal from the macro expanded version of `expr`,
 /// returning a diagnostic error of `err_msg` if `expr` is not a string literal.
 /// The returned bool indicates whether an applicable suggestion has already been
-/// added to the diagnostic to avoid emitting multiple suggestions. `Err(None)`
+/// added to the diagnostic to avoid emitting multiple suggestions. `Err(Err(ErrorGuaranteed))`
 /// indicates that an ast error was encountered.
 // FIXME(Nilstrieb) Make this function setup translatable
 #[allow(rustc::untranslatable_diagnostic)]
@@ -1252,7 +1258,10 @@ pub fn expr_to_spanned_string<'a>(
     cx: &'a mut ExtCtxt<'_>,
     expr: P<ast::Expr>,
     err_msg: &'static str,
-) -> Result<(Symbol, ast::StrStyle, Span), Option<(DiagnosticBuilder<'a>, bool)>> {
+) -> Result<
+    (Symbol, ast::StrStyle, Span),
+    Result<(DiagnosticBuilder<'a>, bool /* has_suggestions */), ErrorGuaranteed>,
+> {
     // Perform eager expansion on the expression.
     // We want to be able to handle e.g., `concat!("foo", "bar")`.
     let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
@@ -1269,38 +1278,33 @@ pub fn expr_to_spanned_string<'a>(
                     "",
                     Applicability::MaybeIncorrect,
                 );
-                Some((err, true))
-            }
-            Ok(ast::LitKind::Err(_)) => None,
-            Err(err) => {
-                report_lit_error(&cx.sess.parse_sess, err, token_lit, expr.span);
-                None
+                Ok((err, true))
             }
-            _ => Some((cx.dcx().struct_span_err(expr.span, err_msg), false)),
+            Ok(ast::LitKind::Err(guar)) => Err(guar),
+            Err(err) => Err(report_lit_error(&cx.sess.parse_sess, err, token_lit, expr.span)),
+            _ => Ok((cx.dcx().struct_span_err(expr.span, err_msg), false)),
         },
-        ast::ExprKind::Err => None,
+        ast::ExprKind::Err(guar) => Err(guar),
         ast::ExprKind::Dummy => {
             cx.dcx().span_bug(expr.span, "tried to get a string literal from `ExprKind::Dummy`")
         }
-        _ => Some((cx.dcx().struct_span_err(expr.span, err_msg), false)),
+        _ => Ok((cx.dcx().struct_span_err(expr.span, err_msg), false)),
     })
 }
 
 /// Extracts a string literal from the macro expanded version of `expr`,
 /// emitting `err_msg` if `expr` is not a string literal. This does not stop
-/// compilation on error, merely emits a non-fatal error and returns `None`.
+/// compilation on error, merely emits a non-fatal error and returns `Err`.
 pub fn expr_to_string(
     cx: &mut ExtCtxt<'_>,
     expr: P<ast::Expr>,
     err_msg: &'static str,
-) -> Option<(Symbol, ast::StrStyle)> {
+) -> Result<(Symbol, ast::StrStyle), ErrorGuaranteed> {
     expr_to_spanned_string(cx, expr, err_msg)
-        .map_err(|err| {
-            err.map(|(err, _)| {
-                err.emit();
-            })
+        .map_err(|err| match err {
+            Ok((err, _)) => err.emit(),
+            Err(guar) => guar,
         })
-        .ok()
         .map(|(symbol, style, _)| (symbol, style))
 }
 
@@ -1314,32 +1318,30 @@ pub fn check_zero_tts(cx: &ExtCtxt<'_>, span: Span, tts: TokenStream, name: &str
     }
 }
 
-/// Parse an expression. On error, emit it, advancing to `Eof`, and return `None`.
-pub fn parse_expr(p: &mut parser::Parser<'_>) -> Option<P<ast::Expr>> {
-    match p.parse_expr() {
-        Ok(e) => return Some(e),
-        Err(err) => {
-            err.emit();
-        }
-    }
+/// Parse an expression. On error, emit it, advancing to `Eof`, and return `Err`.
+pub fn parse_expr(p: &mut parser::Parser<'_>) -> Result<P<ast::Expr>, ErrorGuaranteed> {
+    let guar = match p.parse_expr() {
+        Ok(expr) => return Ok(expr),
+        Err(err) => err.emit(),
+    };
     while p.token != token::Eof {
         p.bump();
     }
-    None
+    Err(guar)
 }
 
 /// Interpreting `tts` as a comma-separated sequence of expressions,
-/// expect exactly one string literal, or emit an error and return `None`.
+/// expect exactly one string literal, or emit an error and return `Err`.
 pub fn get_single_str_from_tts(
     cx: &mut ExtCtxt<'_>,
     span: Span,
     tts: TokenStream,
     name: &str,
-) -> Option<Symbol> {
+) -> Result<Symbol, ErrorGuaranteed> {
     let mut p = cx.new_parser_from_tts(tts);
     if p.token == token::Eof {
-        cx.dcx().emit_err(errors::OnlyOneArgument { span, name });
-        return None;
+        let guar = cx.dcx().emit_err(errors::OnlyOneArgument { span, name });
+        return Err(guar);
     }
     let ret = parse_expr(&mut p)?;
     let _ = p.eat(&token::Comma);
@@ -1351,8 +1353,11 @@ pub fn get_single_str_from_tts(
 }
 
 /// Extracts comma-separated expressions from `tts`.
-/// On error, emit it, and return `None`.
-pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>, tts: TokenStream) -> Option<Vec<P<ast::Expr>>> {
+/// On error, emit it, and return `Err`.
+pub fn get_exprs_from_tts(
+    cx: &mut ExtCtxt<'_>,
+    tts: TokenStream,
+) -> Result<Vec<P<ast::Expr>>, ErrorGuaranteed> {
     let mut p = cx.new_parser_from_tts(tts);
     let mut es = Vec::new();
     while p.token != token::Eof {
@@ -1367,11 +1372,11 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>, tts: TokenStream) -> Option<Vec<
             continue;
         }
         if p.token != token::Eof {
-            cx.dcx().emit_err(errors::ExpectedCommaInList { span: p.token.span });
-            return None;
+            let guar = cx.dcx().emit_err(errors::ExpectedCommaInList { span: p.token.span });
+            return Err(guar);
         }
     }
-    Some(es)
+    Ok(es)
 }
 
 pub fn parse_macro_name_and_helper_attrs(
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 2752d3ebd68..9bc7b4bdd1e 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -34,7 +34,7 @@ use rustc_session::lint::BuiltinLintDiagnostics;
 use rustc_session::parse::feature_err;
 use rustc_session::{Limit, Session};
 use rustc_span::symbol::{sym, Ident};
-use rustc_span::{FileName, LocalExpnId, Span};
+use rustc_span::{ErrorGuaranteed, FileName, LocalExpnId, Span};
 
 use smallvec::SmallVec;
 use std::ops::Deref;
@@ -232,8 +232,8 @@ pub enum SupportsMacroExpansion {
 }
 
 impl AstFragmentKind {
-    pub(crate) fn dummy(self, span: Span) -> AstFragment {
-        self.make_from(DummyResult::any(span)).expect("couldn't create a dummy AST fragment")
+    pub(crate) fn dummy(self, span: Span, guar: ErrorGuaranteed) -> AstFragment {
+        self.make_from(DummyResult::any(span, guar)).expect("couldn't create a dummy AST fragment")
     }
 
     pub fn supports_macro_expansion(self) -> SupportsMacroExpansion {
@@ -604,14 +604,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         (fragment, invocations)
     }
 
-    fn error_recursion_limit_reached(&mut self) {
+    fn error_recursion_limit_reached(&mut self) -> ErrorGuaranteed {
         let expn_data = self.cx.current_expansion.id.expn_data();
         let suggested_limit = match self.cx.ecfg.recursion_limit {
             Limit(0) => Limit(2),
             limit => limit * 2,
         };
 
-        self.cx.dcx().emit_err(RecursionLimitReached {
+        let guar = self.cx.dcx().emit_err(RecursionLimitReached {
             span: expn_data.call_site,
             descr: expn_data.kind.descr(),
             suggested_limit,
@@ -619,14 +619,21 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         });
 
         self.cx.trace_macros_diag();
+        guar
     }
 
     /// A macro's expansion does not fit in this fragment kind.
     /// For example, a non-type macro in a type position.
-    fn error_wrong_fragment_kind(&mut self, kind: AstFragmentKind, mac: &ast::MacCall, span: Span) {
-        self.cx.dcx().emit_err(WrongFragmentKind { span, kind: kind.name(), name: &mac.path });
-
+    fn error_wrong_fragment_kind(
+        &mut self,
+        kind: AstFragmentKind,
+        mac: &ast::MacCall,
+        span: Span,
+    ) -> ErrorGuaranteed {
+        let guar =
+            self.cx.dcx().emit_err(WrongFragmentKind { span, kind: kind.name(), name: &mac.path });
         self.cx.trace_macros_diag();
+        guar
     }
 
     fn expand_invoc(
@@ -634,36 +641,41 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         invoc: Invocation,
         ext: &SyntaxExtensionKind,
     ) -> ExpandResult<AstFragment, Invocation> {
-        let recursion_limit =
-            self.cx.reduced_recursion_limit.unwrap_or(self.cx.ecfg.recursion_limit);
+        let recursion_limit = match self.cx.reduced_recursion_limit {
+            Some((limit, _)) => limit,
+            None => self.cx.ecfg.recursion_limit,
+        };
+
         if !recursion_limit.value_within_limit(self.cx.current_expansion.depth) {
-            if self.cx.reduced_recursion_limit.is_none() {
-                self.error_recursion_limit_reached();
-            }
+            let guar = match self.cx.reduced_recursion_limit {
+                Some((_, guar)) => guar,
+                None => self.error_recursion_limit_reached(),
+            };
 
             // Reduce the recursion limit by half each time it triggers.
-            self.cx.reduced_recursion_limit = Some(recursion_limit / 2);
+            self.cx.reduced_recursion_limit = Some((recursion_limit / 2, guar));
 
-            return ExpandResult::Ready(invoc.fragment_kind.dummy(invoc.span()));
+            return ExpandResult::Ready(invoc.fragment_kind.dummy(invoc.span(), guar));
         }
 
         let (fragment_kind, span) = (invoc.fragment_kind, invoc.span());
         ExpandResult::Ready(match invoc.kind {
             InvocationKind::Bang { mac, .. } => match ext {
                 SyntaxExtensionKind::Bang(expander) => {
-                    let Ok(tok_result) = expander.expand(self.cx, span, mac.args.tokens.clone())
-                    else {
-                        return ExpandResult::Ready(fragment_kind.dummy(span));
-                    };
-                    self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
+                    match expander.expand(self.cx, span, mac.args.tokens.clone()) {
+                        Ok(tok_result) => {
+                            self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
+                        }
+                        Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)),
+                    }
                 }
                 SyntaxExtensionKind::LegacyBang(expander) => {
                     let tok_result = expander.expand(self.cx, span, mac.args.tokens.clone());
                     let result = if let Some(result) = fragment_kind.make_from(tok_result) {
                         result
                     } else {
-                        self.error_wrong_fragment_kind(fragment_kind, &mac, span);
-                        fragment_kind.dummy(span)
+                        let guar = self.error_wrong_fragment_kind(fragment_kind, &mac, span);
+                        fragment_kind.dummy(span, guar)
                     };
                     result
                 }
@@ -705,11 +717,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                         self.cx.dcx().emit_err(UnsupportedKeyValue { span });
                     }
                     let inner_tokens = attr_item.args.inner_tokens();
-                    let Ok(tok_result) = expander.expand(self.cx, span, inner_tokens, tokens)
-                    else {
-                        return ExpandResult::Ready(fragment_kind.dummy(span));
-                    };
-                    self.parse_ast_fragment(tok_result, fragment_kind, &attr_item.path, span)
+                    match expander.expand(self.cx, span, inner_tokens, tokens) {
+                        Ok(tok_result) => self.parse_ast_fragment(
+                            tok_result,
+                            fragment_kind,
+                            &attr_item.path,
+                            span,
+                        ),
+                        Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)),
+                    }
                 }
                 SyntaxExtensionKind::LegacyAttr(expander) => {
                     match validate_attr::parse_meta(&self.cx.sess.parse_sess, &attr) {
@@ -729,15 +745,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                                 AstFragmentKind::Expr | AstFragmentKind::MethodReceiverExpr
                             ) && items.is_empty()
                             {
-                                self.cx.dcx().emit_err(RemoveExprNotSupported { span });
-                                fragment_kind.dummy(span)
+                                let guar = self.cx.dcx().emit_err(RemoveExprNotSupported { span });
+                                fragment_kind.dummy(span, guar)
                             } else {
                                 fragment_kind.expect_from_annotatables(items)
                             }
                         }
                         Err(err) => {
-                            err.emit();
-                            fragment_kind.dummy(span)
+                            let guar = err.emit();
+                            fragment_kind.dummy(span, guar)
                         }
                     }
                 }
@@ -857,9 +873,9 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                     err.span(span);
                 }
                 annotate_err_with_kind(&mut err, kind, span);
-                err.emit();
+                let guar = err.emit();
                 self.cx.trace_macros_diag();
-                kind.dummy(span)
+                kind.dummy(span, guar)
             }
         }
     }
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
index 25af974d326..5629c5ef5fa 100644
--- a/compiler/rustc_expand/src/mbe/diagnostics.rs
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -11,7 +11,7 @@ use rustc_errors::{Applicability, DiagCtxt, DiagnosticBuilder, DiagnosticMessage
 use rustc_parse::parser::{Parser, Recovery};
 use rustc_span::source_map::SourceMap;
 use rustc_span::symbol::Ident;
-use rustc_span::Span;
+use rustc_span::{ErrorGuaranteed, Span};
 use std::borrow::Cow;
 
 use super::macro_rules::{parser_from_cx, NoopTracker};
@@ -47,7 +47,7 @@ pub(super) fn failed_to_match_macro<'cx>(
 
     let Some(BestFailure { token, msg: label, remaining_matcher, .. }) = tracker.best_failure
     else {
-        return DummyResult::any(sp);
+        return DummyResult::any(sp, cx.dcx().span_delayed_bug(sp, "failed to match a macro"));
     };
 
     let span = token.span.substitute_dummy(sp);
@@ -106,9 +106,9 @@ pub(super) fn failed_to_match_macro<'cx>(
             }
         }
     }
-    err.emit();
+    let guar = err.emit();
     cx.trace_macros_diag();
-    DummyResult::any(sp)
+    DummyResult::any(sp, guar)
 }
 
 /// The tracker used for the slow error path that collects useful info for diagnostics.
@@ -180,10 +180,10 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
             }
             Error(err_sp, msg) => {
                 let span = err_sp.substitute_dummy(self.root_span);
-                self.cx.dcx().span_err(span, msg.clone());
-                self.result = Some(DummyResult::any(span));
+                let guar = self.cx.dcx().span_err(span, msg.clone());
+                self.result = Some(DummyResult::any(span, guar));
             }
-            ErrorReported(_) => self.result = Some(DummyResult::any(self.root_span)),
+            ErrorReported(guar) => self.result = Some(DummyResult::any(self.root_span, *guar)),
         }
     }
 
@@ -224,7 +224,7 @@ pub(super) fn emit_frag_parse_err(
     site_span: Span,
     arm_span: Span,
     kind: AstFragmentKind,
-) {
+) -> ErrorGuaranteed {
     // FIXME(davidtwco): avoid depending on the error message text
     if parser.token == token::Eof
         && let DiagnosticMessage::Str(message) = &e.messages[0].0
@@ -282,7 +282,7 @@ pub(super) fn emit_frag_parse_err(
         },
         _ => annotate_err_with_kind(&mut e, kind, site_span),
     };
-    e.emit();
+    e.emit()
 }
 
 pub(crate) fn annotate_err_with_kind(
diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs
index ffc8f782fd3..8174cb03d33 100644
--- a/compiler/rustc_expand/src/mbe/macro_check.rs
+++ b/compiler/rustc_expand/src/mbe/macro_check.rs
@@ -114,7 +114,7 @@ use rustc_errors::{DiagnosticMessage, MultiSpan};
 use rustc_session::lint::builtin::{META_VARIABLE_MISUSE, MISSING_FRAGMENT_SPECIFIER};
 use rustc_session::parse::ParseSess;
 use rustc_span::symbol::kw;
-use rustc_span::{symbol::MacroRulesNormalizedIdent, Span};
+use rustc_span::{symbol::MacroRulesNormalizedIdent, ErrorGuaranteed, Span};
 
 use smallvec::SmallVec;
 
@@ -203,17 +203,17 @@ pub(super) fn check_meta_variables(
     span: Span,
     lhses: &[TokenTree],
     rhses: &[TokenTree],
-) -> bool {
+) -> Result<(), ErrorGuaranteed> {
     if lhses.len() != rhses.len() {
         sess.dcx.span_bug(span, "length mismatch between LHSes and RHSes")
     }
-    let mut valid = true;
+    let mut guar = None;
     for (lhs, rhs) in iter::zip(lhses, rhses) {
         let mut binders = Binders::default();
-        check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut valid);
-        check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut valid);
+        check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut guar);
+        check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut guar);
     }
-    valid
+    guar.map_or(Ok(()), Err)
 }
 
 /// Checks `lhs` as part of the LHS of a macro definition, extends `binders` with new binders, and
@@ -226,7 +226,7 @@ pub(super) fn check_meta_variables(
 /// - `macros` is the stack of possible outer macros
 /// - `binders` contains the binders of the LHS
 /// - `ops` is the stack of Kleene operators from the LHS
-/// - `valid` is set in case of errors
+/// - `guar` is set in case of errors
 fn check_binders(
     sess: &ParseSess,
     node_id: NodeId,
@@ -234,7 +234,7 @@ fn check_binders(
     macros: &Stack<'_, MacroState<'_>>,
     binders: &mut Binders,
     ops: &Stack<'_, KleeneToken>,
-    valid: &mut bool,
+    guar: &mut Option<ErrorGuaranteed>,
 ) {
     match *lhs {
         TokenTree::Token(..) => {}
@@ -258,7 +258,7 @@ fn check_binders(
                 binders.insert(name, BinderInfo { span, ops: ops.into() });
             } else {
                 // 3. The meta-variable is bound: This is an occurrence.
-                check_occurrences(sess, node_id, lhs, macros, binders, ops, valid);
+                check_occurrences(sess, node_id, lhs, macros, binders, ops, guar);
             }
         }
         // Similarly, this can only happen when checking a toplevel macro.
@@ -281,8 +281,10 @@ fn check_binders(
             if let Some(prev_info) = get_binder_info(macros, binders, name) {
                 // Duplicate binders at the top-level macro definition are errors. The lint is only
                 // for nested macro definitions.
-                sess.dcx.emit_err(errors::DuplicateMatcherBinding { span, prev: prev_info.span });
-                *valid = false;
+                *guar = Some(
+                    sess.dcx
+                        .emit_err(errors::DuplicateMatcherBinding { span, prev: prev_info.span }),
+                );
             } else {
                 binders.insert(name, BinderInfo { span, ops: ops.into() });
             }
@@ -291,13 +293,13 @@ fn check_binders(
         TokenTree::MetaVarExpr(..) => {}
         TokenTree::Delimited(.., ref del) => {
             for tt in &del.tts {
-                check_binders(sess, node_id, tt, macros, binders, ops, valid);
+                check_binders(sess, node_id, tt, macros, binders, ops, guar);
             }
         }
         TokenTree::Sequence(_, ref seq) => {
             let ops = ops.push(seq.kleene);
             for tt in &seq.tts {
-                check_binders(sess, node_id, tt, macros, binders, &ops, valid);
+                check_binders(sess, node_id, tt, macros, binders, &ops, guar);
             }
         }
     }
@@ -327,7 +329,7 @@ fn get_binder_info<'a>(
 /// - `macros` is the stack of possible outer macros
 /// - `binders` contains the binders of the associated LHS
 /// - `ops` is the stack of Kleene operators from the RHS
-/// - `valid` is set in case of errors
+/// - `guar` is set in case of errors
 fn check_occurrences(
     sess: &ParseSess,
     node_id: NodeId,
@@ -335,7 +337,7 @@ fn check_occurrences(
     macros: &Stack<'_, MacroState<'_>>,
     binders: &Binders,
     ops: &Stack<'_, KleeneToken>,
-    valid: &mut bool,
+    guar: &mut Option<ErrorGuaranteed>,
 ) {
     match *rhs {
         TokenTree::Token(..) => {}
@@ -353,11 +355,11 @@ fn check_occurrences(
             check_ops_is_prefix(sess, node_id, macros, binders, ops, dl.entire(), name);
         }
         TokenTree::Delimited(.., ref del) => {
-            check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
+            check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, guar);
         }
         TokenTree::Sequence(_, ref seq) => {
             let ops = ops.push(seq.kleene);
-            check_nested_occurrences(sess, node_id, &seq.tts, macros, binders, &ops, valid);
+            check_nested_occurrences(sess, node_id, &seq.tts, macros, binders, &ops, guar);
         }
     }
 }
@@ -392,7 +394,7 @@ enum NestedMacroState {
 /// - `macros` is the stack of possible outer macros
 /// - `binders` contains the binders of the associated LHS
 /// - `ops` is the stack of Kleene operators from the RHS
-/// - `valid` is set in case of errors
+/// - `guar` is set in case of errors
 fn check_nested_occurrences(
     sess: &ParseSess,
     node_id: NodeId,
@@ -400,7 +402,7 @@ fn check_nested_occurrences(
     macros: &Stack<'_, MacroState<'_>>,
     binders: &Binders,
     ops: &Stack<'_, KleeneToken>,
-    valid: &mut bool,
+    guar: &mut Option<ErrorGuaranteed>,
 ) {
     let mut state = NestedMacroState::Empty;
     let nested_macros = macros.push(MacroState { binders, ops: ops.into() });
@@ -432,7 +434,7 @@ fn check_nested_occurrences(
             (NestedMacroState::MacroRulesNot, &TokenTree::MetaVar(..)) => {
                 state = NestedMacroState::MacroRulesNotName;
                 // We check that the meta-variable is correctly used.
-                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
+                check_occurrences(sess, node_id, tt, macros, binders, ops, guar);
             }
             (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(.., del))
             | (NestedMacroState::MacroName, TokenTree::Delimited(.., del))
@@ -441,7 +443,7 @@ fn check_nested_occurrences(
                 let macro_rules = state == NestedMacroState::MacroRulesNotName;
                 state = NestedMacroState::Empty;
                 let rest =
-                    check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, valid);
+                    check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, guar);
                 // If we did not check the whole macro definition, then check the rest as if outside
                 // the macro definition.
                 check_nested_occurrences(
@@ -451,7 +453,7 @@ fn check_nested_occurrences(
                     macros,
                     binders,
                     ops,
-                    valid,
+                    guar,
                 );
             }
             (
@@ -463,7 +465,7 @@ fn check_nested_occurrences(
             (NestedMacroState::Macro, &TokenTree::MetaVar(..)) => {
                 state = NestedMacroState::MacroName;
                 // We check that the meta-variable is correctly used.
-                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
+                check_occurrences(sess, node_id, tt, macros, binders, ops, guar);
             }
             (NestedMacroState::MacroName, TokenTree::Delimited(.., del))
                 if del.delim == Delimiter::Parenthesis =>
@@ -477,7 +479,7 @@ fn check_nested_occurrences(
                     &nested_macros,
                     &mut nested_binders,
                     &Stack::Empty,
-                    valid,
+                    guar,
                 );
             }
             (NestedMacroState::MacroNameParen, TokenTree::Delimited(.., del))
@@ -491,12 +493,12 @@ fn check_nested_occurrences(
                     &nested_macros,
                     &nested_binders,
                     &Stack::Empty,
-                    valid,
+                    guar,
                 );
             }
             (_, tt) => {
                 state = NestedMacroState::Empty;
-                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
+                check_occurrences(sess, node_id, tt, macros, binders, ops, guar);
             }
         }
     }
@@ -515,14 +517,14 @@ fn check_nested_occurrences(
 /// - `macro_rules` specifies whether the macro is `macro_rules`
 /// - `tts` is checked as a list of (LHS) => {RHS}
 /// - `macros` is the stack of outer macros
-/// - `valid` is set in case of errors
+/// - `guar` is set in case of errors
 fn check_nested_macro(
     sess: &ParseSess,
     node_id: NodeId,
     macro_rules: bool,
     tts: &[TokenTree],
     macros: &Stack<'_, MacroState<'_>>,
-    valid: &mut bool,
+    guar: &mut Option<ErrorGuaranteed>,
 ) -> usize {
     let n = tts.len();
     let mut i = 0;
@@ -539,8 +541,8 @@ fn check_nested_macro(
         let lhs = &tts[i];
         let rhs = &tts[i + 2];
         let mut binders = Binders::default();
-        check_binders(sess, node_id, lhs, macros, &mut binders, &Stack::Empty, valid);
-        check_occurrences(sess, node_id, rhs, macros, &binders, &Stack::Empty, valid);
+        check_binders(sess, node_id, lhs, macros, &mut binders, &Stack::Empty, guar);
+        check_occurrences(sess, node_id, rhs, macros, &binders, &Stack::Empty, guar);
         // Since the last semicolon is optional for `macro_rules` macros and decl_macro are not terminated,
         // we increment our checked position by how many token trees we already checked (the 3
         // above) before checking for the separator.
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index bf99e9e6d5c..c11d538048a 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -66,8 +66,10 @@ impl<'a> ParserAnyMacro<'a> {
         let fragment = match parse_ast_fragment(parser, kind) {
             Ok(f) => f,
             Err(err) => {
-                diagnostics::emit_frag_parse_err(err, parser, snapshot, site_span, arm_span, kind);
-                return kind.dummy(site_span);
+                let guar = diagnostics::emit_frag_parse_err(
+                    err, parser, snapshot, site_span, arm_span, kind,
+                );
+                return kind.dummy(site_span, guar);
             }
         };
 
@@ -101,7 +103,6 @@ struct MacroRulesMacroExpander {
     transparency: Transparency,
     lhses: Vec<Vec<MatcherLoc>>,
     rhses: Vec<mbe::TokenTree>,
-    valid: bool,
 }
 
 impl TTMacroExpander for MacroRulesMacroExpander {
@@ -111,9 +112,6 @@ impl TTMacroExpander for MacroRulesMacroExpander {
         sp: Span,
         input: TokenStream,
     ) -> Box<dyn MacResult + 'cx> {
-        if !self.valid {
-            return DummyResult::any(sp);
-        }
         expand_macro(
             cx,
             sp,
@@ -128,12 +126,17 @@ impl TTMacroExpander for MacroRulesMacroExpander {
     }
 }
 
-fn macro_rules_dummy_expander<'cx>(
-    _: &'cx mut ExtCtxt<'_>,
-    span: Span,
-    _: TokenStream,
-) -> Box<dyn MacResult + 'cx> {
-    DummyResult::any(span)
+struct DummyExpander(ErrorGuaranteed);
+
+impl TTMacroExpander for DummyExpander {
+    fn expand<'cx>(
+        &self,
+        _: &'cx mut ExtCtxt<'_>,
+        span: Span,
+        _: TokenStream,
+    ) -> Box<dyn MacResult + 'cx> {
+        DummyResult::any(span, self.0)
+    }
 }
 
 fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
@@ -217,8 +220,8 @@ fn expand_macro<'cx>(
             let tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) {
                 Ok(tts) => tts,
                 Err(err) => {
-                    err.emit();
-                    return DummyResult::any(arm_span);
+                    let guar = err.emit();
+                    return DummyResult::any(arm_span, guar);
                 }
             };
 
@@ -249,9 +252,9 @@ fn expand_macro<'cx>(
                 is_local,
             })
         }
-        Err(CanRetry::No(_)) => {
+        Err(CanRetry::No(guar)) => {
             debug!("Will not retry matching as an error was emitted already");
-            DummyResult::any(sp)
+            DummyResult::any(sp, guar)
         }
         Err(CanRetry::Yes) => {
             // Retry and emit a better error.
@@ -371,7 +374,7 @@ pub fn compile_declarative_macro(
             def.id != DUMMY_NODE_ID,
         )
     };
-    let dummy_syn_ext = || (mk_syn_ext(Box::new(macro_rules_dummy_expander)), Vec::new());
+    let dummy_syn_ext = |guar| (mk_syn_ext(Box::new(DummyExpander(guar))), Vec::new());
 
     let dcx = &sess.parse_sess.dcx;
     let lhs_nm = Ident::new(sym::lhs, def.span);
@@ -456,19 +459,20 @@ pub fn compile_declarative_macro(
                 let mut err = sess.dcx().struct_span_err(sp, s);
                 err.span_label(sp, msg);
                 annotate_doc_comment(sess.dcx(), &mut err, sess.source_map(), sp);
-                err.emit();
-                return dummy_syn_ext();
+                let guar = err.emit();
+                return dummy_syn_ext(guar);
             }
             Error(sp, msg) => {
-                sess.dcx().span_err(sp.substitute_dummy(def.span), msg);
-                return dummy_syn_ext();
+                let guar = sess.dcx().span_err(sp.substitute_dummy(def.span), msg);
+                return dummy_syn_ext(guar);
             }
-            ErrorReported(_) => {
-                return dummy_syn_ext();
+            ErrorReported(guar) => {
+                return dummy_syn_ext(guar);
             }
         };
 
-    let mut valid = true;
+    let mut guar = None;
+    let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
 
     // Extract the arguments:
     let lhses = match &argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
@@ -488,7 +492,7 @@ pub fn compile_declarative_macro(
                     .unwrap();
                     // We don't handle errors here, the driver will abort
                     // after parsing/expansion. we can report every error in every macro this way.
-                    valid &= check_lhs_nt_follows(sess, def, &tt).is_ok();
+                    check_emission(check_lhs_nt_follows(sess, def, &tt));
                     return tt;
                 }
                 sess.dcx().span_bug(def.span, "wrong-structured lhs")
@@ -520,15 +524,21 @@ pub fn compile_declarative_macro(
     };
 
     for rhs in &rhses {
-        valid &= check_rhs(sess, rhs);
+        check_emission(check_rhs(sess, rhs));
     }
 
     // don't abort iteration early, so that errors for multiple lhses can be reported
     for lhs in &lhses {
-        valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
+        check_emission(check_lhs_no_empty_seq(sess, slice::from_ref(lhs)));
     }
 
-    valid &= macro_check::check_meta_variables(&sess.parse_sess, def.id, def.span, &lhses, &rhses);
+    check_emission(macro_check::check_meta_variables(
+        &sess.parse_sess,
+        def.id,
+        def.span,
+        &lhses,
+        &rhses,
+    ));
 
     let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
     match transparency_error {
@@ -541,11 +551,15 @@ pub fn compile_declarative_macro(
         None => {}
     }
 
+    if let Some(guar) = guar {
+        // To avoid warning noise, only consider the rules of this
+        // macro for the lint, if all rules are valid.
+        return dummy_syn_ext(guar);
+    }
+
     // Compute the spans of the macro rules for unused rule linting.
-    // To avoid warning noise, only consider the rules of this
-    // macro for the lint, if all rules are valid.
     // Also, we are only interested in non-foreign macros.
-    let rule_spans = if valid && def.id != DUMMY_NODE_ID {
+    let rule_spans = if def.id != DUMMY_NODE_ID {
         lhses
             .iter()
             .zip(rhses.iter())
@@ -562,23 +576,19 @@ pub fn compile_declarative_macro(
     };
 
     // Convert the lhses into `MatcherLoc` form, which is better for doing the
-    // actual matching. Unless the matcher is invalid.
-    let lhses = if valid {
-        lhses
-            .iter()
-            .map(|lhs| {
-                // Ignore the delimiters around the matcher.
-                match lhs {
-                    mbe::TokenTree::Delimited(.., delimited) => {
-                        mbe::macro_parser::compute_locs(&delimited.tts)
-                    }
-                    _ => sess.dcx().span_bug(def.span, "malformed macro lhs"),
+    // actual matching.
+    let lhses = lhses
+        .iter()
+        .map(|lhs| {
+            // Ignore the delimiters around the matcher.
+            match lhs {
+                mbe::TokenTree::Delimited(.., delimited) => {
+                    mbe::macro_parser::compute_locs(&delimited.tts)
                 }
-            })
-            .collect()
-    } else {
-        vec![]
-    };
+                _ => sess.dcx().span_bug(def.span, "malformed macro lhs"),
+            }
+        })
+        .collect();
 
     let expander = Box::new(MacroRulesMacroExpander {
         name: def.ident,
@@ -587,7 +597,6 @@ pub fn compile_declarative_macro(
         transparency,
         lhses,
         rhses,
-        valid,
     });
     (mk_syn_ext(expander), rule_spans)
 }
@@ -640,7 +649,7 @@ fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
 
 /// Checks that the lhs contains no repetition which could match an empty token
 /// tree, because then the matcher would hang indefinitely.
-fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> bool {
+fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
     use mbe::TokenTree;
     for tt in tts {
         match tt {
@@ -648,35 +657,26 @@ fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> bool {
             | TokenTree::MetaVar(..)
             | TokenTree::MetaVarDecl(..)
             | TokenTree::MetaVarExpr(..) => (),
-            TokenTree::Delimited(.., del) => {
-                if !check_lhs_no_empty_seq(sess, &del.tts) {
-                    return false;
-                }
-            }
+            TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
             TokenTree::Sequence(span, seq) => {
                 if is_empty_token_tree(sess, seq) {
                     let sp = span.entire();
-                    sess.dcx().span_err(sp, "repetition matches empty token tree");
-                    return false;
-                }
-                if !check_lhs_no_empty_seq(sess, &seq.tts) {
-                    return false;
+                    let guar = sess.dcx().span_err(sp, "repetition matches empty token tree");
+                    return Err(guar);
                 }
+                check_lhs_no_empty_seq(sess, &seq.tts)?
             }
         }
     }
 
-    true
+    Ok(())
 }
 
-fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> bool {
+fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
     match *rhs {
-        mbe::TokenTree::Delimited(..) => return true,
-        _ => {
-            sess.dcx().span_err(rhs.span(), "macro rhs must be delimited");
-        }
+        mbe::TokenTree::Delimited(..) => Ok(()),
+        _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
     }
-    false
 }
 
 fn check_matcher(