about summary refs log tree commit diff
path: root/compiler/rustc_expand/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_expand/src')
-rw-r--r--compiler/rustc_expand/src/build.rs11
-rw-r--r--compiler/rustc_expand/src/config.rs12
-rw-r--r--compiler/rustc_expand/src/expand.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs2
-rw-r--r--compiler/rustc_expand/src/mbe/macro_check.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs10
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs17
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs44
-rw-r--r--compiler/rustc_expand/src/parse/tests.rs21
-rw-r--r--compiler/rustc_expand/src/proc_macro.rs4
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs6
-rw-r--r--compiler/rustc_expand/src/tests.rs69
12 files changed, 121 insertions, 83 deletions
diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs
index 264f30fb10a..7de46994434 100644
--- a/compiler/rustc_expand/src/build.rs
+++ b/compiler/rustc_expand/src/build.rs
@@ -643,7 +643,16 @@ impl<'a> ExtCtxt<'a> {
             span,
             name,
             AttrVec::new(),
-            ast::ItemKind::Const(ast::ConstItem { defaultness, ty, expr: Some(expr) }.into()),
+            ast::ItemKind::Const(
+                ast::ConstItem {
+                    defaultness,
+                    // FIXME(generic_const_items): Pass the generics as a parameter.
+                    generics: ast::Generics::default(),
+                    ty,
+                    expr: Some(expr),
+                }
+                .into(),
+            ),
         )
     }
 
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index 4ec5ac22e90..8658cea137a 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -365,11 +365,11 @@ impl<'a> StripUnconfigured<'a> {
 
         // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
         // for `attr` when we expand it to `#[attr]`
-        let mut orig_trees = orig_tokens.into_trees();
+        let mut orig_trees = orig_tokens.trees();
         let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) =
-            orig_trees.next().unwrap()
+            orig_trees.next().unwrap().clone()
         else {
-            panic!("Bad tokens for attribute {:?}", attr);
+            panic!("Bad tokens for attribute {attr:?}");
         };
         let pound_span = pound_token.span;
 
@@ -377,9 +377,9 @@ impl<'a> StripUnconfigured<'a> {
         if attr.style == AttrStyle::Inner {
             // For inner attributes, we do the same thing for the `!` in `#![some_attr]`
             let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) =
-                orig_trees.next().unwrap()
+                orig_trees.next().unwrap().clone()
             else {
-                panic!("Bad tokens for attribute {:?}", attr);
+                panic!("Bad tokens for attribute {attr:?}");
             };
             trees.push(AttrTokenTree::Token(bang_token, Spacing::Alone));
         }
@@ -390,7 +390,7 @@ impl<'a> StripUnconfigured<'a> {
             Delimiter::Bracket,
             item.tokens
                 .as_ref()
-                .unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
+                .unwrap_or_else(|| panic!("Missing tokens for {item:?}"))
                 .to_attr_token_stream(),
         );
         trees.push(bracket_group);
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 3b6db9fd39c..165c6d47c8a 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -803,7 +803,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             &self.cx.sess.parse_sess,
             sym::proc_macro_hygiene,
             span,
-            format!("custom attributes cannot be applied to {}", kind),
+            format!("custom attributes cannot be applied to {kind}"),
         )
         .emit();
     }
@@ -1707,7 +1707,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
                         &UNUSED_ATTRIBUTES,
                         attr.span,
                         self.cx.current_expansion.lint_node_id,
-                        format!("unused attribute `{}`", attr_name),
+                        format!("unused attribute `{attr_name}`"),
                         BuiltinLintDiagnostics::UnusedBuiltinAttribute {
                             attr_name,
                             macro_name: pprust::path_to_string(&call.path),
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
index 03de33dc854..e060375646c 100644
--- a/compiler/rustc_expand/src/mbe/diagnostics.rs
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -257,7 +257,7 @@ pub(super) fn emit_frag_parse_err(
                         e.span_suggestion_verbose(
                             site_span,
                             "surround the macro invocation with `{}` to interpret the expansion as a statement",
-                            format!("{{ {}; }}", snippet),
+                            format!("{{ {snippet}; }}"),
                             Applicability::MaybeIncorrect,
                         );
                     }
diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs
index 34f998274e9..95f5bb2d2e2 100644
--- a/compiler/rustc_expand/src/mbe/macro_check.rs
+++ b/compiler/rustc_expand/src/mbe/macro_check.rs
@@ -593,7 +593,7 @@ fn check_ops_is_prefix(
             return;
         }
     }
-    buffer_lint(sess, span.into(), node_id, format!("unknown macro variable `{}`", name));
+    buffer_lint(sess, span.into(), node_id, format!("unknown macro variable `{name}`"));
 }
 
 /// Returns whether `binder_ops` is a prefix of `occurrence_ops`.
@@ -626,7 +626,7 @@ fn ops_is_prefix(
         if i >= occurrence_ops.len() {
             let mut span = MultiSpan::from_span(span);
             span.push_span_label(binder.span, "expected repetition");
-            let message = format!("variable '{}' is still repeating at this depth", name);
+            let message = format!("variable '{name}' is still repeating at this depth");
             buffer_lint(sess, span, node_id, message);
             return;
         }
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index f0e67cfd50e..05c0cd952b8 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -156,7 +156,7 @@ impl Display for MatcherLoc {
             MatcherLoc::MetaVarDecl { bind, kind, .. } => {
                 write!(f, "meta-variable `${bind}")?;
                 if let Some(kind) = kind {
-                    write!(f, ":{}", kind)?;
+                    write!(f, ":{kind}")?;
                 }
                 write!(f, "`")?;
                 Ok(())
@@ -723,7 +723,7 @@ impl TtParser {
             .iter()
             .map(|mp| match &matcher[mp.idx] {
                 MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
-                    format!("{} ('{}')", kind, bind)
+                    format!("{kind} ('{bind}')")
                 }
                 _ => unreachable!(),
             })
@@ -736,8 +736,8 @@ impl TtParser {
                 "local ambiguity when calling macro `{}`: multiple parsing options: {}",
                 self.macro_name,
                 match self.next_mps.len() {
-                    0 => format!("built-in NTs {}.", nts),
-                    n => format!("built-in NTs {} or {n} other option{s}.", nts, s = pluralize!(n)),
+                    0 => format!("built-in NTs {nts}."),
+                    n => format!("built-in NTs {nts} or {n} other option{s}.", s = pluralize!(n)),
                 }
             ),
         )
@@ -757,7 +757,7 @@ impl TtParser {
                     match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
                         Vacant(spot) => spot.insert(res.next().unwrap()),
                         Occupied(..) => {
-                            return Error(span, format!("duplicated bind name: {}", bind));
+                            return Error(span, format!("duplicated bind name: {bind}"));
                         }
                     };
                 } else {
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 102bae2a744..1aa0b1fc053 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -500,7 +500,7 @@ pub fn compile_declarative_macro(
             .map(|m| {
                 if let MatchedTokenTree(tt) = m {
                     let tt = mbe::quoted::parse(
-                        TokenStream::new(vec![tt.clone()]),
+                        &TokenStream::new(vec![tt.clone()]),
                         true,
                         &sess.parse_sess,
                         def.id,
@@ -524,7 +524,7 @@ pub fn compile_declarative_macro(
             .map(|m| {
                 if let MatchedTokenTree(tt) = m {
                     return mbe::quoted::parse(
-                        TokenStream::new(vec![tt.clone()]),
+                        &TokenStream::new(vec![tt.clone()]),
                         false,
                         &sess.parse_sess,
                         def.id,
@@ -554,7 +554,7 @@ pub fn compile_declarative_macro(
     let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
     match transparency_error {
         Some(TransparencyError::UnknownTransparency(value, span)) => {
-            diag.span_err(span, format!("unknown macro transparency: `{}`", value));
+            diag.span_err(span, format!("unknown macro transparency: `{value}`"));
         }
         Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) => {
             diag.span_err(vec![old_span, new_span], "multiple macro transparency attributes");
@@ -1197,7 +1197,7 @@ fn check_matcher_core<'tt>(
                                     may_be = may_be
                                 ),
                             );
-                            err.span_label(sp, format!("not allowed after `{}` fragments", kind));
+                            err.span_label(sp, format!("not allowed after `{kind}` fragments"));
 
                             if kind == NonterminalKind::PatWithOr
                                 && sess.edition.at_least_rust_2021()
@@ -1221,8 +1221,7 @@ fn check_matcher_core<'tt>(
                                 &[] => {}
                                 &[t] => {
                                     err.note(format!(
-                                        "only {} is allowed after `{}` fragments",
-                                        t, kind,
+                                        "only {t} is allowed after `{kind}` fragments",
                                     ));
                                 }
                                 ts => {
@@ -1407,9 +1406,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
 fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
     match tt {
         mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(),
-        mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
-        mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind),
-        mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name),
+        mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
+        mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${name}:{kind}"),
+        mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${name}:"),
         _ => panic!(
             "{}",
             "unexpected mbe::TokenTree::{Sequence or Delimited} \
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 40bfa3715be..6546199f5e6 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -36,7 +36,7 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
 ///
 /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
 pub(super) fn parse(
-    input: tokenstream::TokenStream,
+    input: &tokenstream::TokenStream,
     parsing_patterns: bool,
     sess: &ParseSess,
     node_id: NodeId,
@@ -48,7 +48,7 @@ pub(super) fn parse(
 
     // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
     // additional trees if need be.
-    let mut trees = input.into_trees();
+    let mut trees = input.trees();
     while let Some(tree) = trees.next() {
         // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
         // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
@@ -56,7 +56,7 @@ pub(super) fn parse(
         match tree {
             TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
                 let span = match trees.next() {
-                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => {
+                    Some(&tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => {
                         match trees.next() {
                             Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
                                 Some((frag, _)) => {
@@ -96,10 +96,10 @@ pub(super) fn parse(
                                 }
                                 _ => token.span,
                             },
-                            tree => tree.as_ref().map_or(span, tokenstream::TokenTree::span),
+                            tree => tree.map_or(span, tokenstream::TokenTree::span),
                         }
                     }
-                    tree => tree.as_ref().map_or(start_sp, tokenstream::TokenTree::span),
+                    tree => tree.map_or(start_sp, tokenstream::TokenTree::span),
                 };
 
                 result.push(TokenTree::MetaVarDecl(span, ident, None));
@@ -134,9 +134,9 @@ fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &ParseSess,
 /// - `parsing_patterns`: same as [parse].
 /// - `sess`: the parsing session. Any errors will be emitted to this session.
 /// - `features`: language features so we can do feature gating.
-fn parse_tree(
-    tree: tokenstream::TokenTree,
-    outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
+fn parse_tree<'a>(
+    tree: &'a tokenstream::TokenTree,
+    outer_trees: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
     parsing_patterns: bool,
     sess: &ParseSess,
     node_id: NodeId,
@@ -146,13 +146,13 @@ fn parse_tree(
     // Depending on what `tree` is, we could be parsing different parts of a macro
     match tree {
         // `tree` is a `$` token. Look at the next token in `trees`
-        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => {
+        &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => {
             // FIXME: Handle `Invisible`-delimited groups in a more systematic way
             // during parsing.
             let mut next = outer_trees.next();
-            let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
+            let mut trees: Box<dyn Iterator<Item = &tokenstream::TokenTree>>;
             if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next {
-                trees = Box::new(tts.into_trees());
+                trees = Box::new(tts.trees());
                 next = trees.next();
             } else {
                 trees = Box::new(outer_trees);
@@ -160,7 +160,7 @@ fn parse_tree(
 
             match next {
                 // `tree` is followed by a delimited set of token trees.
-                Some(tokenstream::TokenTree::Delimited(delim_span, delim, tts)) => {
+                Some(&tokenstream::TokenTree::Delimited(delim_span, delim, ref tts)) => {
                     if parsing_patterns {
                         if delim != Delimiter::Parenthesis {
                             span_dollar_dollar_or_metavar_in_the_lhs_err(
@@ -194,7 +194,7 @@ fn parse_tree(
                             Delimiter::Parenthesis => {}
                             _ => {
                                 let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
-                                let msg = format!("expected `(` or `{{`, found `{}`", tok);
+                                let msg = format!("expected `(` or `{{`, found `{tok}`");
                                 sess.span_diagnostic.span_err(delim_span.entire(), msg);
                             }
                         }
@@ -228,7 +228,7 @@ fn parse_tree(
                 }
 
                 // `tree` is followed by another `$`. This is an escaped `$`.
-                Some(tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => {
+                Some(&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => {
                     if parsing_patterns {
                         span_dollar_dollar_or_metavar_in_the_lhs_err(
                             sess,
@@ -256,11 +256,11 @@ fn parse_tree(
         }
 
         // `tree` is an arbitrary token. Keep it.
-        tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token),
+        tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
-        tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
+        &tokenstream::TokenTree::Delimited(span, delim, ref tts) => TokenTree::Delimited(
             span,
             Delimited {
                 delim,
@@ -286,16 +286,16 @@ fn kleene_op(token: &Token) -> Option<KleeneOp> {
 /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
 /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
 /// - Err(span) if the next token tree is not a token
-fn parse_kleene_op(
-    input: &mut impl Iterator<Item = tokenstream::TokenTree>,
+fn parse_kleene_op<'a>(
+    input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
     span: Span,
 ) -> Result<Result<(KleeneOp, Span), Token>, Span> {
     match input.next() {
         Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) {
             Some(op) => Ok(Ok((op, token.span))),
-            None => Ok(Err(token)),
+            None => Ok(Err(token.clone())),
         },
-        tree => Err(tree.as_ref().map_or(span, tokenstream::TokenTree::span)),
+        tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
     }
 }
 
@@ -311,8 +311,8 @@ fn parse_kleene_op(
 /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
 /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
 /// error with the appropriate span is emitted to `sess` and a dummy value is returned.
-fn parse_sep_and_kleene_op(
-    input: &mut impl Iterator<Item = tokenstream::TokenTree>,
+fn parse_sep_and_kleene_op<'a>(
+    input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
     span: Span,
     sess: &ParseSess,
 ) -> (Option<Token>, KleeneToken) {
diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs
index 8b37728b60f..bdc20882a9d 100644
--- a/compiler/rustc_expand/src/parse/tests.rs
+++ b/compiler/rustc_expand/src/parse/tests.rs
@@ -1,4 +1,6 @@
-use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
+use crate::tests::{
+    matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error,
+};
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token};
@@ -51,11 +53,15 @@ fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
     with_error_checking_parse(source_str, &sess(), |p| p.parse_item(ForceCollect::No))
 }
 
-#[should_panic]
 #[test]
 fn bad_path_expr_1() {
+    // This should trigger error: expected identifier, found keyword `return`
     create_default_session_globals_then(|| {
-        string_to_expr("::abc::def::return".to_string());
+        with_expected_parse_error(
+            "::abc::def::return",
+            "expected identifier, found keyword `return`",
+            |p| p.parse_expr(),
+        );
     })
 }
 
@@ -63,9 +69,8 @@ fn bad_path_expr_1() {
 #[test]
 fn string_to_tts_macro() {
     create_default_session_globals_then(|| {
-        let tts: Vec<_> =
-            string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).into_trees().collect();
-        let tts: &[TokenTree] = &tts[..];
+        let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
+        let tts = &stream.trees().collect::<Vec<_>>()[..];
 
         match tts {
             [
@@ -294,9 +299,7 @@ fn ttdelim_span() {
         .unwrap();
 
         let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
-        let tts: Vec<_> = mac.args.tokens.clone().into_trees().collect();
-
-        let span = tts.iter().rev().next().unwrap().span();
+        let span = mac.args.tokens.trees().last().unwrap().span();
 
         match sess.source_map().span_to_snippet(span) {
             Ok(s) => assert_eq!(&s[..], "{ body }"),
diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs
index 41b24407fa0..c617cd76e3c 100644
--- a/compiler/rustc_expand/src/proc_macro.rs
+++ b/compiler/rustc_expand/src/proc_macro.rs
@@ -95,7 +95,7 @@ impl base::AttrProcMacro for AttrProcMacro {
             |e| {
                 let mut err = ecx.struct_span_err(span, "custom attribute panicked");
                 if let Some(s) = e.as_str() {
-                    err.help(format!("message: {}", s));
+                    err.help(format!("message: {s}"));
                 }
                 err.emit()
             },
@@ -148,7 +148,7 @@ impl MultiItemModifier for DeriveProcMacro {
                 Err(e) => {
                     let mut err = ecx.struct_span_err(span, "proc-macro derive panicked");
                     if let Some(s) = e.as_str() {
-                        err.help(format!("message: {}", s));
+                        err.help(format!("message: {s}"));
                     }
                     err.emit();
                     return ExpandResult::Ready(vec![]);
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index ecd2315112a..2dc9b51a37e 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -94,10 +94,10 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
         // Estimate the capacity as `stream.len()` rounded up to the next power
         // of two to limit the number of required reallocations.
         let mut trees = Vec::with_capacity(stream.len().next_power_of_two());
-        let mut cursor = stream.into_trees();
+        let mut cursor = stream.trees();
 
         while let Some(tree) = cursor.next() {
-            let (Token { kind, span }, joint) = match tree {
+            let (Token { kind, span }, joint) = match tree.clone() {
                 tokenstream::TokenTree::Delimited(span, delim, tts) => {
                     let delimiter = pm::Delimiter::from_internal(delim);
                     trees.push(TokenTree::Group(Group {
@@ -622,7 +622,7 @@ impl server::SourceFile for Rustc<'_, '_> {
 impl server::Span for Rustc<'_, '_> {
     fn debug(&mut self, span: Self::Span) -> String {
         if self.ecx.ecfg.span_debug {
-            format!("{:?}", span)
+            format!("{span:?}")
         } else {
             format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
         }
diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs
index 6490e52955d..30fa5fea407 100644
--- a/compiler/rustc_expand/src/tests.rs
+++ b/compiler/rustc_expand/src/tests.rs
@@ -22,6 +22,33 @@ fn string_to_parser(ps: &ParseSess, source_str: String) -> Parser<'_> {
     new_parser_from_source_str(ps, PathBuf::from("bogofile").into(), source_str)
 }
 
+fn create_test_handler() -> (Handler, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
+    let output = Arc::new(Mutex::new(Vec::new()));
+    let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+    let fallback_bundle = rustc_errors::fallback_fluent_bundle(
+        vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
+        false,
+    );
+    let emitter = EmitterWriter::new(
+        Box::new(Shared { data: output.clone() }),
+        Some(source_map.clone()),
+        None,
+        fallback_bundle,
+        false,
+        false,
+        false,
+        Some(140),
+        false,
+        false,
+        TerminalUrl::No,
+    );
+    let handler = Handler::with_emitter(Box::new(emitter));
+    (handler, source_map, output)
+}
+
+/// Returns the result of parsing the given string via the given callback.
+///
+/// If there are any errors, this will panic.
 pub(crate) fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T
 where
     F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
@@ -32,6 +59,26 @@ where
     x
 }
 
+/// Verifies that parsing the given string using the given callback will
+/// generate an error that contains the given text.
+pub(crate) fn with_expected_parse_error<T, F>(source_str: &str, expected_output: &str, f: F)
+where
+    F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
+{
+    let (handler, source_map, output) = create_test_handler();
+    let ps = ParseSess::with_span_handler(handler, source_map);
+    let mut p = string_to_parser(&ps, source_str.to_string());
+    let result = f(&mut p);
+    assert!(result.is_ok());
+
+    let bytes = output.lock().unwrap();
+    let actual_output = str::from_utf8(&bytes).unwrap();
+    println!("expected output:\n------\n{}------", expected_output);
+    println!("actual output:\n------\n{}------", actual_output);
+
+    assert!(actual_output.contains(expected_output))
+}
+
 /// Maps a string to tts, using a made-up filename.
 pub(crate) fn string_to_stream(source_str: String) -> TokenStream {
     let ps = ParseSess::new(
@@ -130,13 +177,7 @@ impl<T: Write> Write for Shared<T> {
 
 fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
     create_default_session_if_not_set_then(|_| {
-        let output = Arc::new(Mutex::new(Vec::new()));
-
-        let fallback_bundle = rustc_errors::fallback_fluent_bundle(
-            vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
-            false,
-        );
-        let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let (handler, source_map, output) = create_test_handler();
         source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
 
         let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
@@ -148,20 +189,6 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
             println!("text: {:?}", source_map.span_to_snippet(span));
         }
 
-        let emitter = EmitterWriter::new(
-            Box::new(Shared { data: output.clone() }),
-            Some(source_map.clone()),
-            None,
-            fallback_bundle,
-            false,
-            false,
-            false,
-            None,
-            false,
-            false,
-            TerminalUrl::No,
-        );
-        let handler = Handler::with_emitter(true, None, Box::new(emitter), None);
         #[allow(rustc::untranslatable_diagnostic)]
         handler.span_err(msp, "foo");