about summary refs log tree commit diff
path: root/compiler/rustc_parse/src/parser
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src/parser')
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs56
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs49
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs40
-rw-r--r--compiler/rustc_parse/src/parser/item.rs32
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs98
-rw-r--r--compiler/rustc_parse/src/parser/mut_visit/tests.rs71
-rw-r--r--compiler/rustc_parse/src/parser/path.rs19
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs8
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs1422
-rw-r--r--compiler/rustc_parse/src/parser/tokenstream/tests.rs108
10 files changed, 1781 insertions, 122 deletions
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index ab5f51eedc3..d5d8060d909 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,7 +1,4 @@
-use crate::errors::{
-    InvalidMetaItem, InvalidMetaItemSuggQuoteIdent, InvalidMetaItemUnquotedIdent,
-    SuffixedLiteralInAttribute,
-};
+use crate::errors;
 use crate::fluent_generated as fluent;
 use crate::maybe_whole;
 
@@ -318,7 +315,7 @@ impl<'a> Parser<'a> {
         debug!("checking if {:?} is unsuffixed", lit);
 
         if !lit.kind.is_unsuffixed() {
-            self.dcx().emit_err(SuffixedLiteralInAttribute { span: lit.span });
+            self.dcx().emit_err(errors::SuffixedLiteralInAttribute { span: lit.span });
         }
 
         Ok(lit)
@@ -356,10 +353,11 @@ impl<'a> Parser<'a> {
         Ok(nmis)
     }
 
-    /// Matches the following grammar (per RFC 1559).
+    /// Parse a meta item per RFC 1559.
+    ///
     /// ```ebnf
-    /// meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
-    /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
+    /// MetaItem = SimplePath ( '=' UNSUFFIXED_LIT | '(' MetaSeq? ')' )? ;
+    /// MetaSeq = MetaItemInner (',' MetaItemInner)* ','? ;
     /// ```
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
         // We can't use `maybe_whole` here because it would bump in the `None`
@@ -387,7 +385,6 @@ impl<'a> Parser<'a> {
         Ok(if self.eat(&token::Eq) {
             ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
-            // Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
             let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
             ast::MetaItemKind::List(list)
         } else {
@@ -395,38 +392,45 @@ impl<'a> Parser<'a> {
         })
     }
 
-    /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
+    /// Parse an inner meta item per RFC 1559.
+    ///
+    /// ```ebnf
+    /// MetaItemInner = UNSUFFIXED_LIT | MetaItem ;
+    /// ```
     fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
         match self.parse_unsuffixed_meta_item_lit() {
             Ok(lit) => return Ok(ast::NestedMetaItem::Lit(lit)),
-            Err(err) => err.cancel(),
+            Err(err) => err.cancel(), // we provide a better error below
         }
 
         match self.parse_meta_item() {
             Ok(mi) => return Ok(ast::NestedMetaItem::MetaItem(mi)),
-            Err(err) => err.cancel(),
+            Err(err) => err.cancel(), // we provide a better error below
         }
 
-        let token = self.token.clone();
+        let mut err = errors::InvalidMetaItem {
+            span: self.token.span,
+            token: self.token.clone(),
+            quote_ident_sugg: None,
+        };
 
-        // Check for unquoted idents in meta items, e.g.: #[cfg(key = foo)]
-        // `from_expansion()` ensures we don't suggest for cases such as
-        // `#[cfg(feature = $expr)]` in macros
-        if self.prev_token == token::Eq && !self.token.span.from_expansion() {
+        // Suggest quoting idents, e.g. in `#[cfg(key = value)]`. We don't use `Token::ident` and
+        // don't `uninterpolate` the token to avoid suggesting anything butchered or questionable
+        // when macro metavariables are involved.
+        if self.prev_token == token::Eq
+            && let token::Ident(..) = self.token.kind
+        {
             let before = self.token.span.shrink_to_lo();
-            while matches!(self.token.kind, token::Ident(..)) {
+            while let token::Ident(..) = self.token.kind {
                 self.bump();
             }
-            let after = self.prev_token.span.shrink_to_hi();
-            let sugg = InvalidMetaItemSuggQuoteIdent { before, after };
-            return Err(self.dcx().create_err(InvalidMetaItemUnquotedIdent {
-                span: token.span,
-                token,
-                sugg,
-            }));
+            err.quote_ident_sugg = Some(errors::InvalidMetaItemQuoteIdentSugg {
+                before,
+                after: self.prev_token.span.shrink_to_hi(),
+            });
         }
 
-        Err(self.dcx().create_err(InvalidMetaItem { span: token.span, token }))
+        Err(self.dcx().create_err(err))
     }
 }
 
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index f256dbf4360..50698dbf9c1 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -22,7 +22,6 @@ use crate::fluent_generated as fluent;
 use crate::parser;
 use crate::parser::attr::InnerAttrPolicy;
 use ast::token::IdentIsRaw;
-use parser::Recovered;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
@@ -31,7 +30,7 @@ use rustc_ast::util::parser::AssocOp;
 use rustc_ast::{
     AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
     BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
-    PatKind, Path, PathSegment, QSelf, Ty, TyKind,
+    PatKind, Path, PathSegment, QSelf, Recovered, Ty, TyKind,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
@@ -527,14 +526,14 @@ impl<'a> Parser<'a> {
                 //
                 //   let x = 32:
                 //   let y = 42;
-                self.dcx().emit_err(ExpectedSemi {
+                let guar = self.dcx().emit_err(ExpectedSemi {
                     span: self.token.span,
                     token: self.token.clone(),
                     unexpected_token_label: None,
                     sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
                 });
                 self.bump();
-                return Ok(Recovered::Yes);
+                return Ok(Recovered::Yes(guar));
             } else if self.look_ahead(0, |t| {
                 t == &token::CloseDelim(Delimiter::Brace)
                     || ((t.can_begin_expr() || t.can_begin_item())
@@ -552,13 +551,13 @@ impl<'a> Parser<'a> {
                 //   let x = 32
                 //   let y = 42;
                 let span = self.prev_token.span.shrink_to_hi();
-                self.dcx().emit_err(ExpectedSemi {
+                let guar = self.dcx().emit_err(ExpectedSemi {
                     span,
                     token: self.token.clone(),
                     unexpected_token_label: Some(self.token.span),
                     sugg: ExpectedSemiSugg::AddSemi(span),
                 });
-                return Ok(Recovered::Yes);
+                return Ok(Recovered::Yes(guar));
             }
         }
 
@@ -712,8 +711,8 @@ impl<'a> Parser<'a> {
 
         if self.check_too_many_raw_str_terminators(&mut err) {
             if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
-                err.emit();
-                return Ok(Recovered::Yes);
+                let guar = err.emit();
+                return Ok(Recovered::Yes(guar));
             } else {
                 return Err(err);
             }
@@ -1224,7 +1223,11 @@ impl<'a> Parser<'a> {
             let x = self.parse_seq_to_before_end(
                 &token::Gt,
                 SeqSep::trailing_allowed(token::Comma),
-                |p| p.parse_generic_arg(None),
+                |p| match p.parse_generic_arg(None)? {
+                    Some(arg) => Ok(arg),
+                    // If we didn't eat a generic arg, then we should error.
+                    None => p.unexpected_any(),
+                },
             );
             match x {
                 Ok((_, _, Recovered::No)) => {
@@ -1251,7 +1254,7 @@ impl<'a> Parser<'a> {
                         }
                     }
                 }
-                Ok((_, _, Recovered::Yes)) => {}
+                Ok((_, _, Recovered::Yes(_))) => {}
                 Err(err) => {
                     err.cancel();
                 }
@@ -1284,13 +1287,13 @@ impl<'a> Parser<'a> {
 
     /// Check to see if a pair of chained operators looks like an attempt at chained comparison,
     /// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
-    /// parenthesising the leftmost comparison.
+    /// parenthesising the leftmost comparison. The return value indicates if recovery happened.
     fn attempt_chained_comparison_suggestion(
         &mut self,
         err: &mut ComparisonOperatorsCannotBeChained,
         inner_op: &Expr,
         outer_op: &Spanned<AssocOp>,
-    ) -> Recovered {
+    ) -> bool {
         if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
             if let ExprKind::Field(_, ident) = l1.kind
                 && ident.as_str().parse::<i32>().is_err()
@@ -1298,7 +1301,7 @@ impl<'a> Parser<'a> {
             {
                 // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
                 // suggestion being the only one to apply is high.
-                return Recovered::No;
+                return false;
             }
             return match (op.node, &outer_op.node) {
                 // `x == y == z`
@@ -1317,7 +1320,7 @@ impl<'a> Parser<'a> {
                         span: inner_op.span.shrink_to_hi(),
                         middle_term: expr_to_str(r1),
                     });
-                    Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`.
+                    false // Keep the current parse behavior, where the AST is `(x < y) < z`.
                 }
                 // `x == y < z`
                 (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
@@ -1331,12 +1334,12 @@ impl<'a> Parser<'a> {
                                 left: r1.span.shrink_to_lo(),
                                 right: r2.span.shrink_to_hi(),
                             });
-                            Recovered::Yes
+                            true
                         }
                         Err(expr_err) => {
                             expr_err.cancel();
                             self.restore_snapshot(snapshot);
-                            Recovered::Yes
+                            true
                         }
                     }
                 }
@@ -1351,19 +1354,19 @@ impl<'a> Parser<'a> {
                                 left: l1.span.shrink_to_lo(),
                                 right: r1.span.shrink_to_hi(),
                             });
-                            Recovered::Yes
+                            true
                         }
                         Err(expr_err) => {
                             expr_err.cancel();
                             self.restore_snapshot(snapshot);
-                            Recovered::No
+                            false
                         }
                     }
                 }
-                _ => Recovered::No,
+                _ => false
             };
         }
-        Recovered::No
+        false
     }
 
     /// Produces an error if comparison operators are chained (RFC #558).
@@ -1494,7 +1497,7 @@ impl<'a> Parser<'a> {
                         // misformatted turbofish, for instance), suggest a correct form.
                         let recovered = self
                             .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
-                        if matches!(recovered, Recovered::Yes) {
+                        if recovered {
                             let guar = self.dcx().emit_err(err);
                             mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar)
                         } else {
@@ -1503,10 +1506,10 @@ impl<'a> Parser<'a> {
                         }
                     };
                 }
-                let recover =
+                let recovered =
                     self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
                 let guar = self.dcx().emit_err(err);
-                if matches!(recover, Recovered::Yes) {
+                if recovered {
                     return mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar);
                 }
             }
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 8ed2a6edf1a..577003e94fb 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -3,7 +3,7 @@ use super::diagnostics::SnapshotParser;
 use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
-    AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions,
+    AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
     SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken,
 };
 
@@ -11,7 +11,7 @@ use crate::errors;
 use crate::maybe_recover_from_interpolated_ty_qpath;
 use ast::mut_visit::{noop_visit_expr, MutVisitor};
 use ast::token::IdentIsRaw;
-use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment};
+use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
 use core::mem;
 use core::ops::ControlFlow;
 use rustc_ast::ptr::P;
@@ -2629,7 +2629,7 @@ impl<'a> Parser<'a> {
 
         CondChecker::new(self).visit_expr(&mut cond);
 
-        if let ExprKind::Let(_, _, _, None) = cond.kind {
+        if let ExprKind::Let(_, _, _, Recovered::No) = cond.kind {
             // Remove the last feature gating of a `let` expression since it's stable.
             self.psess.gated_spans.ungate_last(sym::let_chains, cond.span);
         }
@@ -2639,7 +2639,7 @@ impl<'a> Parser<'a> {
 
     /// Parses a `let $pat = $expr` pseudo-expression.
     fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
-        let is_recovered = if !restrictions.contains(Restrictions::ALLOW_LET) {
+        let recovered = if !restrictions.contains(Restrictions::ALLOW_LET) {
             let err = errors::ExpectedExpressionFoundLet {
                 span: self.token.span,
                 reason: ForbiddenLetReason::OtherForbidden,
@@ -2650,10 +2650,10 @@ impl<'a> Parser<'a> {
                 // This was part of a closure, the that part of the parser recover.
                 return Err(self.dcx().create_err(err));
             } else {
-                Some(self.dcx().emit_err(err))
+                Recovered::Yes(self.dcx().emit_err(err))
             }
         } else {
-            None
+            Recovered::No
         };
         self.bump(); // Eat `let` token
         let lo = self.prev_token.span;
@@ -2674,7 +2674,7 @@ impl<'a> Parser<'a> {
         }
         let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), None.into())?;
         let span = lo.to(expr.span);
-        Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, is_recovered)))
+        Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
     }
 
     /// Parses an `else { ... }` expression (`else` token already eaten).
@@ -2998,7 +2998,7 @@ impl<'a> Parser<'a> {
         &mut self,
         first_expr: &P<Expr>,
         arrow_span: Span,
-    ) -> Option<P<Expr>> {
+    ) -> Option<(Span, ErrorGuaranteed)> {
         if self.token.kind != token::Semi {
             return None;
         }
@@ -3023,7 +3023,7 @@ impl<'a> Parser<'a> {
                     errors::MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
                 },
             });
-            this.mk_expr_err(span, guar)
+            (span, guar)
         };
         // We might have either a `,` -> `;` typo, or a block without braces. We need
         // a more subtle parsing strategy.
@@ -3143,9 +3143,12 @@ impl<'a> Parser<'a> {
                     arm_body = Some(expr);
                     this.eat(&token::Comma);
                     Ok(Recovered::No)
-                } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
+                } else if let Some((span, guar)) =
+                    this.parse_arm_body_missing_braces(&expr, arrow_span)
+                {
+                    let body = this.mk_expr_err(span, guar);
                     arm_body = Some(body);
-                    Ok(Recovered::Yes)
+                    Ok(Recovered::Yes(guar))
                 } else {
                     let expr_span = expr.span;
                     arm_body = Some(expr);
@@ -3223,10 +3226,10 @@ impl<'a> Parser<'a> {
                         .is_ok();
                     if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
                         err.cancel();
-                        this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
+                        let guar = this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
                             span: arm_span.shrink_to_hi(),
                         });
-                        return Ok(Recovered::Yes);
+                        return Ok(Recovered::Yes(guar));
                     }
                     Err(err)
                 });
@@ -3904,15 +3907,16 @@ impl MutVisitor for CondChecker<'_> {
 
         let span = e.span;
         match e.kind {
-            ExprKind::Let(_, _, _, ref mut is_recovered @ None) => {
+            ExprKind::Let(_, _, _, ref mut recovered @ Recovered::No) => {
                 if let Some(reason) = self.forbid_let_reason {
-                    *is_recovered =
-                        Some(self.parser.dcx().emit_err(errors::ExpectedExpressionFoundLet {
+                    *recovered = Recovered::Yes(self.parser.dcx().emit_err(
+                        errors::ExpectedExpressionFoundLet {
                             span,
                             reason,
                             missing_let: self.missing_let,
                             comparison: self.comparison,
-                        }));
+                        },
+                    ));
                 } else {
                     self.parser.psess.gated_spans.gate(sym::let_chains, span);
                 }
@@ -3980,7 +3984,7 @@ impl MutVisitor for CondChecker<'_> {
                 self.visit_expr(op);
                 self.forbid_let_reason = forbid_let_reason;
             }
-            ExprKind::Let(_, _, _, Some(_))
+            ExprKind::Let(_, _, _, Recovered::Yes(_))
             | ExprKind::Array(_)
             | ExprKind::ConstBlock(_)
             | ExprKind::Lit(_)
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 848277c4611..df6996dbc45 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1,8 +1,7 @@
 use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
-    AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing,
-    TrailingToken,
+    AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, TrailingToken,
 };
 use crate::errors::{self, MacroExpandsToAdtField};
 use crate::fluent_generated as fluent;
@@ -336,7 +335,7 @@ impl<'a> Parser<'a> {
                 UseTreeKind::Glob => {
                     e.note("the wildcard token must be last on the path");
                 }
-                UseTreeKind::Nested(..) => {
+                UseTreeKind::Nested { .. } => {
                     e.note("glob-like brace syntax must be last on the path");
                 }
                 _ => (),
@@ -1056,7 +1055,11 @@ impl<'a> Parser<'a> {
         Ok(if self.eat(&token::BinOp(token::Star)) {
             UseTreeKind::Glob
         } else {
-            UseTreeKind::Nested(self.parse_use_tree_list()?)
+            let lo = self.token.span;
+            UseTreeKind::Nested {
+                items: self.parse_use_tree_list()?,
+                span: lo.to(self.prev_token.span),
+            }
         })
     }
 
@@ -1536,8 +1539,8 @@ impl<'a> Parser<'a> {
                                 this.bump(); // }
                                 err.span_label(span, "while parsing this enum");
                                 err.help(help);
-                                err.emit();
-                                (thin_vec![], Recovered::Yes)
+                                let guar = err.emit();
+                                (thin_vec![], Recovered::Yes(guar))
                             }
                         };
                     VariantData::Struct { fields, recovered: recovered.into() }
@@ -1695,16 +1698,15 @@ impl<'a> Parser<'a> {
         let mut recovered = Recovered::No;
         if self.eat(&token::OpenDelim(Delimiter::Brace)) {
             while self.token != token::CloseDelim(Delimiter::Brace) {
-                let field = self.parse_field_def(adt_ty).map_err(|e| {
-                    self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
-                    recovered = Recovered::Yes;
-                    e
-                });
-                match field {
-                    Ok(field) => fields.push(field),
+                match self.parse_field_def(adt_ty) {
+                    Ok(field) => {
+                        fields.push(field);
+                    }
                     Err(mut err) => {
+                        self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
                         err.span_label(ident_span, format!("while parsing this {adt_ty}"));
-                        err.emit();
+                        let guar = err.emit();
+                        recovered = Recovered::Yes(guar);
                         break;
                     }
                 }
@@ -2465,7 +2467,7 @@ impl<'a> Parser<'a> {
             // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
             // account for this.
             match self.expect_one_of(&[], &[]) {
-                Ok(Recovered::Yes) => {}
+                Ok(Recovered::Yes(_)) => {}
                 Ok(Recovered::No) => unreachable!(),
                 Err(mut err) => {
                     // Qualifier keywords ordering check
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 7486da33b21..bfb6c4a3885 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -19,6 +19,7 @@ pub(crate) use item::FnParseMode;
 pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
 pub use path::PathStyle;
 
+use core::fmt;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
 use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
@@ -26,13 +27,12 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
     self as ast, AnonConst, AttrArgs, AttrArgsEq, AttrId, ByRef, Const, CoroutineKind, DelimArgs,
-    Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, StrLit, Unsafe, Visibility,
+    Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, StrLit, Unsafe, Visibility,
     VisibilityKind, DUMMY_NODE_ID,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
-use rustc_errors::PResult;
-use rustc_errors::{Applicability, Diag, FatalError, MultiSpan};
+use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
 use rustc_session::parse::ParseSess;
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
 use rustc_span::{Span, DUMMY_SP};
@@ -45,8 +45,22 @@ use crate::errors::{
     self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
 };
 
+#[cfg(test)]
+mod tests;
+
+// Ideally, these tests would be in `rustc_ast`. But they depend on having a
+// parser, so they are here.
+#[cfg(test)]
+mod tokenstream {
+    mod tests;
+}
+#[cfg(test)]
+mod mut_visit {
+    mod tests;
+}
+
 bitflags::bitflags! {
-    #[derive(Clone, Copy)]
+    #[derive(Clone, Copy, Debug)]
     struct Restrictions: u8 {
         const STMT_EXPR         = 1 << 0;
         const NO_STRUCT_LITERAL = 1 << 1;
@@ -72,7 +86,7 @@ enum BlockMode {
 
 /// Whether or not we should force collection of tokens for an AST node,
 /// regardless of whether or not it has attributes
-#[derive(Clone, Copy, PartialEq)]
+#[derive(Clone, Copy, Debug, PartialEq)]
 pub enum ForceCollect {
     Yes,
     No,
@@ -120,7 +134,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
     };
 }
 
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, Debug)]
 pub enum Recovery {
     Allowed,
     Forbidden,
@@ -170,7 +184,7 @@ pub struct Parser<'a> {
     capture_state: CaptureState,
     /// This allows us to recover when the user forget to add braces around
     /// multiple statements in the closure body.
-    pub current_closure: Option<ClosureSpans>,
+    current_closure: Option<ClosureSpans>,
     /// Whether the parser is allowed to do recovery.
     /// This is disabled when parsing macro arguments, see #103534
     pub recovery: Recovery,
@@ -182,7 +196,7 @@ pub struct Parser<'a> {
 rustc_data_structures::static_assert_size!(Parser<'_>, 264);
 
 /// Stores span information about a closure.
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 pub struct ClosureSpans {
     pub whole_closure: Span,
     pub closing_pipe: Span,
@@ -211,7 +225,7 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
 /// Controls how we capture tokens. Capturing can be expensive,
 /// so we try to avoid performing capturing in cases where
 /// we will never need an `AttrTokenStream`.
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, Debug)]
 pub enum Capturing {
     /// We aren't performing any capturing - this is the default mode.
     No,
@@ -219,7 +233,7 @@ pub enum Capturing {
     Yes,
 }
 
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 struct CaptureState {
     capturing: Capturing,
     replace_ranges: Vec<ReplaceRange>,
@@ -230,7 +244,7 @@ struct CaptureState {
 /// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
 /// use this type to emit them as a linear sequence. But a linear sequence is
 /// what the parser expects, for the most part.
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 struct TokenCursor {
     // Cursor for the current (innermost) token stream. The delimiters for this
     // token stream are found in `self.stack.last()`; when that is `None` then
@@ -335,6 +349,7 @@ enum TokenExpectType {
 }
 
 /// A sequence separator.
+#[derive(Debug)]
 struct SeqSep {
     /// The separator token.
     sep: Option<TokenKind>,
@@ -352,31 +367,19 @@ impl SeqSep {
     }
 }
 
+#[derive(Debug)]
 pub enum FollowedByType {
     Yes,
     No,
 }
 
-/// Whether a function performed recovery
-#[derive(Copy, Clone, Debug)]
-pub enum Recovered {
-    No,
-    Yes,
-}
-
-impl From<Recovered> for bool {
-    fn from(r: Recovered) -> bool {
-        matches!(r, Recovered::Yes)
-    }
-}
-
 #[derive(Copy, Clone, Debug)]
 pub enum Trailing {
     No,
     Yes,
 }
 
-#[derive(Clone, Copy, PartialEq, Eq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
 pub enum TokenDescription {
     ReservedIdentifier,
     Keyword,
@@ -839,9 +842,9 @@ impl<'a> Parser<'a> {
                         Ok(Recovered::No) => {
                             self.current_closure.take();
                         }
-                        Ok(Recovered::Yes) => {
+                        Ok(Recovered::Yes(guar)) => {
                             self.current_closure.take();
-                            recovered = Recovered::Yes;
+                            recovered = Recovered::Yes(guar);
                             break;
                         }
                         Err(mut expect_err) => {
@@ -1534,6 +1537,47 @@ impl<'a> Parser<'a> {
             })
     }
 
+    // debug view of the parser's token stream, up to `{lookahead}` tokens
+    pub fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug + '_ {
+        struct DebugParser<'dbg> {
+            parser: &'dbg Parser<'dbg>,
+            lookahead: usize,
+        }
+
+        impl fmt::Debug for DebugParser<'_> {
+            fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+                let Self { parser, lookahead } = self;
+                let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
+
+                // we don't need N spans, but we want at least one, so print all of prev_token
+                dbg_fmt.field("prev_token", &parser.prev_token);
+                // make it easier to peek farther ahead by taking TokenKinds only until EOF
+                let tokens = (0..*lookahead)
+                    .map(|i| parser.look_ahead(i, |tok| tok.kind.clone()))
+                    .scan(parser.prev_token == TokenKind::Eof, |eof, tok| {
+                        let current = eof.then_some(tok.clone()); // include a trailing EOF token
+                        *eof |= &tok == &TokenKind::Eof;
+                        current
+                    });
+                dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish());
+                dbg_fmt.field("approx_token_stream_pos", &parser.num_bump_calls);
+
+                // some fields are interesting for certain values, as they relate to macro parsing
+                if let Some(subparser) = parser.subparser_name {
+                    dbg_fmt.field("subparser_name", &subparser);
+                }
+                if let Recovery::Forbidden = parser.recovery {
+                    dbg_fmt.field("recovery", &parser.recovery);
+                }
+
+                // imply there's "more to know" than this view
+                dbg_fmt.finish_non_exhaustive()
+            }
+        }
+
+        DebugParser { parser: self, lookahead }
+    }
+
     pub fn clear_expected_tokens(&mut self) {
         self.expected_tokens.clear();
     }
diff --git a/compiler/rustc_parse/src/parser/mut_visit/tests.rs b/compiler/rustc_parse/src/parser/mut_visit/tests.rs
new file mode 100644
index 00000000000..b3cb28af657
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/mut_visit/tests.rs
@@ -0,0 +1,71 @@
+use crate::parser::tests::{matches_codepattern, string_to_crate};
+use rustc_ast as ast;
+use rustc_ast::mut_visit::MutVisitor;
+use rustc_ast_pretty::pprust;
+use rustc_span::create_default_session_globals_then;
+use rustc_span::symbol::Ident;
+
+// This version doesn't care about getting comments or doc-strings in.
+fn print_crate_items(krate: &ast::Crate) -> String {
+    krate.items.iter().map(|i| pprust::item_to_string(i)).collect::<Vec<_>>().join(" ")
+}
+
+// Change every identifier to "zz".
+struct ToZzIdentMutVisitor;
+
+impl MutVisitor for ToZzIdentMutVisitor {
+    const VISIT_TOKENS: bool = true;
+
+    fn visit_ident(&mut self, ident: &mut Ident) {
+        *ident = Ident::from_str("zz");
+    }
+}
+
+// Maybe add to `expand.rs`.
+macro_rules! assert_pred {
+    ($pred:expr, $predname:expr, $a:expr , $b:expr) => {{
+        let pred_val = $pred;
+        let a_val = $a;
+        let b_val = $b;
+        if !(pred_val(&a_val, &b_val)) {
+            panic!("expected args satisfying {}, got {} and {}", $predname, a_val, b_val);
+        }
+    }};
+}
+
+// Make sure idents get transformed everywhere.
+#[test]
+fn ident_transformation() {
+    create_default_session_globals_then(|| {
+        let mut zz_visitor = ToZzIdentMutVisitor;
+        let mut krate =
+            string_to_crate("#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
+        zz_visitor.visit_crate(&mut krate);
+        assert_pred!(
+            matches_codepattern,
+            "matches_codepattern",
+            print_crate_items(&krate),
+            "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()
+        );
+    })
+}
+
+// Make sure idents get transformed even inside macro defs.
+#[test]
+fn ident_transformation_in_defs() {
+    create_default_session_globals_then(|| {
+        let mut zz_visitor = ToZzIdentMutVisitor;
+        let mut krate = string_to_crate(
+            "macro_rules! a {(b $c:expr $(d $e:token)f+ => \
+            (g $(d $d $e)+))} "
+                .to_string(),
+        );
+        zz_visitor.visit_crate(&mut krate);
+        assert_pred!(
+            matches_codepattern,
+            "matches_codepattern",
+            print_crate_items(&krate),
+            "macro_rules! zz{(zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+))}".to_string()
+        );
+    })
+}
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index b97ec8c613d..3636a357978 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -160,7 +160,7 @@ impl<'a> Parser<'a> {
         style: PathStyle,
         ty_generics: Option<&Generics>,
     ) -> PResult<'a, Path> {
-        let reject_generics_if_mod_style = |parser: &Parser<'_>, path: &Path| {
+        let reject_generics_if_mod_style = |parser: &Parser<'_>, path: Path| {
             // Ensure generic arguments don't end up in attribute paths, such as:
             //
             //     macro_rules! m {
@@ -178,21 +178,26 @@ impl<'a> Parser<'a> {
                     .map(|arg| arg.span())
                     .collect::<Vec<_>>();
                 parser.dcx().emit_err(errors::GenericsInPath { span });
+                // Ignore these arguments to prevent unexpected behaviors.
+                let segments = path
+                    .segments
+                    .iter()
+                    .map(|segment| PathSegment { ident: segment.ident, id: segment.id, args: None })
+                    .collect();
+                Path { segments, ..path }
+            } else {
+                path
             }
         };
 
-        maybe_whole!(self, NtPath, |path| {
-            reject_generics_if_mod_style(self, &path);
-            path.into_inner()
-        });
+        maybe_whole!(self, NtPath, |path| reject_generics_if_mod_style(self, path.into_inner()));
 
         if let token::Interpolated(nt) = &self.token.kind {
             if let token::NtTy(ty) = &nt.0 {
                 if let ast::TyKind::Path(None, path) = &ty.kind {
                     let path = path.clone();
                     self.bump();
-                    reject_generics_if_mod_style(self, &path);
-                    return Ok(path);
+                    return Ok(reject_generics_if_mod_style(self, path));
                 }
             }
         }
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 6601011665b..d70afebf1b2 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -11,14 +11,13 @@ use crate::errors;
 use crate::maybe_whole;
 
 use crate::errors::MalformedLoopLabel;
-use crate::parser::Recovered;
 use ast::Label;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, TokenKind};
 use rustc_ast::util::classify;
 use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
-use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
+use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Recovered, Stmt};
 use rustc_ast::{StmtKind, DUMMY_NODE_ID};
 use rustc_errors::{Applicability, Diag, PResult};
 use rustc_span::symbol::{kw, sym, Ident};
@@ -675,11 +674,8 @@ impl<'a> Parser<'a> {
                 let replace_with_err = 'break_recover: {
                     match expect_result {
                         Ok(Recovered::No) => None,
-                        Ok(Recovered::Yes) => {
+                        Ok(Recovered::Yes(guar)) => {
                             // Skip type error to avoid extra errors.
-                            let guar = self
-                                .dcx()
-                                .span_delayed_bug(self.prev_token.span, "expected `;` or `}`");
                             Some(guar)
                         }
                         Err(e) => {
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
new file mode 100644
index 00000000000..a31e350541a
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -0,0 +1,1422 @@
+use crate::parser::ForceCollect;
+use crate::{new_parser_from_source_str, parser::Parser, source_file_to_stream};
+use ast::token::IdentIsRaw;
+use rustc_ast::ptr::P;
+use rustc_ast::token::{self, Delimiter, Token};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::visit;
+use rustc_ast::{self as ast, PatKind};
+use rustc_ast_pretty::pprust::item_to_string;
+use rustc_data_structures::sync::Lrc;
+use rustc_errors::emitter::HumanEmitter;
+use rustc_errors::{DiagCtxt, MultiSpan, PResult};
+use rustc_session::parse::ParseSess;
+use rustc_span::create_default_session_globals_then;
+use rustc_span::source_map::{FilePathMapping, SourceMap};
+use rustc_span::symbol::{kw, sym, Symbol};
+use rustc_span::{BytePos, FileName, Pos, Span};
+use std::io;
+use std::io::prelude::*;
+use std::iter::Peekable;
+use std::path::{Path, PathBuf};
+use std::str;
+use std::sync::{Arc, Mutex};
+use termcolor::WriteColor;
+
+fn psess() -> ParseSess {
+    ParseSess::new(vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE])
+}
+
+/// Map string to parser (via tts).
+fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> {
+    new_parser_from_source_str(psess, PathBuf::from("bogofile").into(), source_str)
+}
+
+fn create_test_handler() -> (DiagCtxt, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
+    let output = Arc::new(Mutex::new(Vec::new()));
+    let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+    let fallback_bundle = rustc_errors::fallback_fluent_bundle(
+        vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE],
+        false,
+    );
+    let emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), fallback_bundle)
+        .sm(Some(source_map.clone()))
+        .diagnostic_width(Some(140));
+    let dcx = DiagCtxt::new(Box::new(emitter));
+    (dcx, source_map, output)
+}
+
+/// Returns the result of parsing the given string via the given callback.
+///
+/// If there are any errors, this will panic.
+fn with_error_checking_parse<'a, T, F>(s: String, psess: &'a ParseSess, f: F) -> T
+where
+    F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
+{
+    let mut p = string_to_parser(&psess, s);
+    let x = f(&mut p).unwrap();
+    p.psess.dcx.abort_if_errors();
+    x
+}
+
+/// Verifies that parsing the given string using the given callback will
+/// generate an error that contains the given text.
+fn with_expected_parse_error<T, F>(source_str: &str, expected_output: &str, f: F)
+where
+    F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
+{
+    let (handler, source_map, output) = create_test_handler();
+    let psess = ParseSess::with_dcx(handler, source_map);
+    let mut p = string_to_parser(&psess, source_str.to_string());
+    let result = f(&mut p);
+    assert!(result.is_ok());
+
+    let bytes = output.lock().unwrap();
+    let actual_output = str::from_utf8(&bytes).unwrap();
+    println!("expected output:\n------\n{}------", expected_output);
+    println!("actual output:\n------\n{}------", actual_output);
+
+    assert!(actual_output.contains(expected_output))
+}
+
+/// Maps a string to tts, using a made-up filename.
+pub(crate) fn string_to_stream(source_str: String) -> TokenStream {
+    let psess = psess();
+    source_file_to_stream(
+        &psess,
+        psess.source_map().new_source_file(PathBuf::from("bogofile").into(), source_str),
+        None,
+    )
+}
+
+/// Parses a string, returns a crate.
+pub(crate) fn string_to_crate(source_str: String) -> ast::Crate {
+    let psess = psess();
+    with_error_checking_parse(source_str, &psess, |p| p.parse_crate_mod())
+}
+
+/// Does the given string match the pattern? whitespace in the first string
+/// may be deleted or replaced with other whitespace to match the pattern.
+/// This function is relatively Unicode-ignorant; fortunately, the careful design
+/// of UTF-8 mitigates this ignorance. It doesn't do NKF-normalization(?).
+pub(crate) fn matches_codepattern(a: &str, b: &str) -> bool {
+    let mut a_iter = a.chars().peekable();
+    let mut b_iter = b.chars().peekable();
+
+    loop {
+        let (a, b) = match (a_iter.peek(), b_iter.peek()) {
+            (None, None) => return true,
+            (None, _) => return false,
+            (Some(&a), None) => {
+                if rustc_lexer::is_whitespace(a) {
+                    break; // Trailing whitespace check is out of loop for borrowck.
+                } else {
+                    return false;
+                }
+            }
+            (Some(&a), Some(&b)) => (a, b),
+        };
+
+        if rustc_lexer::is_whitespace(a) && rustc_lexer::is_whitespace(b) {
+            // Skip whitespace for `a` and `b`.
+            scan_for_non_ws_or_end(&mut a_iter);
+            scan_for_non_ws_or_end(&mut b_iter);
+        } else if rustc_lexer::is_whitespace(a) {
+            // Skip whitespace for `a`.
+            scan_for_non_ws_or_end(&mut a_iter);
+        } else if a == b {
+            a_iter.next();
+            b_iter.next();
+        } else {
+            return false;
+        }
+    }
+
+    // Check if a has *only* trailing whitespace.
+    a_iter.all(rustc_lexer::is_whitespace)
+}
+
+/// Advances the given peekable `Iterator` until it reaches a non-whitespace character.
+fn scan_for_non_ws_or_end<I: Iterator<Item = char>>(iter: &mut Peekable<I>) {
+    while iter.peek().copied().is_some_and(rustc_lexer::is_whitespace) {
+        iter.next();
+    }
+}
+
+/// Identifies a position in the text by the n'th occurrence of a string.
+struct Position {
+    string: &'static str,
+    count: usize,
+}
+
+struct SpanLabel {
+    start: Position,
+    end: Position,
+    label: &'static str,
+}
+
+struct Shared<T: Write> {
+    data: Arc<Mutex<T>>,
+}
+
+impl<T: Write> WriteColor for Shared<T> {
+    fn supports_color(&self) -> bool {
+        false
+    }
+
+    fn set_color(&mut self, _spec: &termcolor::ColorSpec) -> io::Result<()> {
+        Ok(())
+    }
+
+    fn reset(&mut self) -> io::Result<()> {
+        Ok(())
+    }
+}
+
+impl<T: Write> Write for Shared<T> {
+    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+        self.data.lock().unwrap().write(buf)
+    }
+
+    fn flush(&mut self) -> io::Result<()> {
+        self.data.lock().unwrap().flush()
+    }
+}
+
+#[allow(rustc::untranslatable_diagnostic)] // no translation needed for tests
+fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
+    create_default_session_globals_then(|| {
+        let (handler, source_map, output) = create_test_handler();
+        source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
+
+        let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
+        let mut msp = MultiSpan::from_span(primary_span);
+        for span_label in span_labels {
+            let span = make_span(&file_text, &span_label.start, &span_label.end);
+            msp.push_span_label(span, span_label.label);
+            println!("span: {:?} label: {:?}", span, span_label.label);
+            println!("text: {:?}", source_map.span_to_snippet(span));
+        }
+
+        handler.span_err(msp, "foo");
+
+        assert!(
+            expected_output.chars().next() == Some('\n'),
+            "expected output should begin with newline"
+        );
+        let expected_output = &expected_output[1..];
+
+        let bytes = output.lock().unwrap();
+        let actual_output = str::from_utf8(&bytes).unwrap();
+        println!("expected output:\n------\n{}------", expected_output);
+        println!("actual output:\n------\n{}------", actual_output);
+
+        assert!(expected_output == actual_output)
+    })
+}
+
+fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
+    let start = make_pos(file_text, start);
+    let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
+    assert!(start <= end);
+    Span::with_root_ctxt(BytePos(start as u32), BytePos(end as u32))
+}
+
+fn make_pos(file_text: &str, pos: &Position) -> usize {
+    let mut remainder = file_text;
+    let mut offset = 0;
+    for _ in 0..pos.count {
+        if let Some(n) = remainder.find(&pos.string) {
+            offset += n;
+            remainder = &remainder[n + 1..];
+        } else {
+            panic!("failed to find {} instances of {:?} in {:?}", pos.count, pos.string, file_text);
+        }
+    }
+    offset
+}
+
+#[test]
+fn ends_on_col0() {
+    test_harness(
+        r#"
+fn foo() {
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "{", count: 1 },
+            end: Position { string: "}", count: 1 },
+            label: "test",
+        }],
+        r#"
+error: foo
+ --> test.rs:2:10
+  |
+2 |   fn foo() {
+  |  __________^
+3 | | }
+  | |_^ test
+
+"#,
+    );
+}
+
+#[test]
+fn ends_on_col2() {
+    test_harness(
+        r#"
+fn foo() {
+
+
+  }
+"#,
+        vec![SpanLabel {
+            start: Position { string: "{", count: 1 },
+            end: Position { string: "}", count: 1 },
+            label: "test",
+        }],
+        r#"
+error: foo
+ --> test.rs:2:10
+  |
+2 |   fn foo() {
+  |  __________^
+... |
+5 | |   }
+  | |___^ test
+
+"#,
+    );
+}
+#[test]
+fn non_nested() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0
+  X1 Y1
+  X2 Y2
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |      X0 Y0
+  |   ___^__-
+  |  |___|
+  | ||
+4 | ||   X1 Y1
+5 | ||   X2 Y2
+  | ||____^__- `Y` is a good letter too
+  | |_____|
+  |       `X` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn nested() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0
+  Y1 X1
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y1", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |      X0 Y0
+  |   ___^__-
+  |  |___|
+  | ||
+4 | ||   Y1 X1
+  | ||____-__^ `X` is a good letter
+  |  |____|
+  |       `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn different_overlap() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "X3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |      X0 Y0 Z0
+  |  _______^
+4 | |    X1 Y1 Z1
+  | | _________-
+5 | ||   X2 Y2 Z2
+  | ||____^ `X` is a good letter
+6 |  |   X3 Y3 Z3
+  |  |____- `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn triple_overlap() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+            SpanLabel {
+                start: Position { string: "Z0", count: 1 },
+                end: Position { string: "Z2", count: 1 },
+                label: "`Z` label",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |       X0 Y0 Z0
+  |    ___^__-__-
+  |   |___|__|
+  |  ||___|
+  | |||
+4 | |||   X1 Y1 Z1
+5 | |||   X2 Y2 Z2
+  | |||____^__-__- `Z` label
+  | ||_____|__|
+  | |______|  `Y` is a good letter too
+  |        `X` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn triple_exact_overlap() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X2", count: 1 },
+                label: "`Z` label",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 | /   X0 Y0 Z0
+4 | |   X1 Y1 Z1
+5 | |   X2 Y2 Z2
+  | |    ^
+  | |    |
+  | |    `X` is a good letter
+  | |____`Y` is a good letter too
+  |      `Z` label
+
+"#,
+    );
+}
+
+#[test]
+fn minimum_depth() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y1", count: 1 },
+                end: Position { string: "Z2", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+            SpanLabel {
+                start: Position { string: "X2", count: 1 },
+                end: Position { string: "Y3", count: 1 },
+                label: "`Z`",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |      X0 Y0 Z0
+  |  _______^
+4 | |    X1 Y1 Z1
+  | | ____^_-
+  | ||____|
+  |  |    `X` is a good letter
+5 |  |   X2 Y2 Z2
+  |  |___-______- `Y` is a good letter too
+  |   ___|
+  |  |
+6 |  |   X3 Y3 Z3
+  |  |_______- `Z`
+
+"#,
+    );
+}
+
+#[test]
+fn non_overlapping() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "X0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Y2", count: 1 },
+                end: Position { string: "Z3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 | /   X0 Y0 Z0
+4 | |   X1 Y1 Z1
+  | |____^ `X` is a good letter
+5 |     X2 Y2 Z2
+  |  ______-
+6 | |   X3 Y3 Z3
+  | |__________- `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn overlapping_start_and_end() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "Z3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:6
+  |
+3 |      X0 Y0 Z0
+  |  _______^
+4 | |    X1 Y1 Z1
+  | | ____^____-
+  | ||____|
+  |  |    `X` is a good letter
+5 |  |   X2 Y2 Z2
+6 |  |   X3 Y3 Z3
+  |  |__________- `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_primary_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "c", count: 1 },
+                label: "",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:7
+  |
+3 |   a { b { c } d }
+  |   ----^^^^-^^-- `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_secondary_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^ `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_primary_without_message_2() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "`b` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "c", count: 1 },
+                label: "",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:7
+  |
+3 |   a { b { c } d }
+  |   ----^^^^-^^--
+  |       |
+  |       `b` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_secondary_without_message_2() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "`b` is a good letter",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^
+  |       |
+  |       `b` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_secondary_without_message_3() {
+    test_harness(
+        r#"
+fn foo() {
+  a  bc  d
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "b", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a  bc  d
+  |   ^^^^----
+  |   |
+  |   `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_without_message_2() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "",
+            },
+            SpanLabel {
+                start: Position { string: "c", count: 1 },
+                end: Position { string: "c", count: 1 },
+                label: "",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:7
+  |
+3 |   a { b { c } d }
+  |   ----^^^^-^^--
+
+"#,
+    );
+}
+
+#[test]
+fn multiple_labels_with_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "a", count: 1 },
+                end: Position { string: "d", count: 1 },
+                label: "`a` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "b", count: 1 },
+                end: Position { string: "}", count: 1 },
+                label: "`b` is a good letter",
+            },
+        ],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^-------^^
+  |   |   |
+  |   |   `b` is a good letter
+  |   `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn single_label_with_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "a", count: 1 },
+            end: Position { string: "d", count: 1 },
+            label: "`a` is a good letter",
+        }],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^^^^^^^^^^ `a` is a good letter
+
+"#,
+    );
+}
+
+#[test]
+fn single_label_without_message() {
+    test_harness(
+        r#"
+fn foo() {
+  a { b { c } d }
+}
+"#,
+        vec![SpanLabel {
+            start: Position { string: "a", count: 1 },
+            end: Position { string: "d", count: 1 },
+            label: "",
+        }],
+        r#"
+error: foo
+ --> test.rs:3:3
+  |
+3 |   a { b { c } d }
+  |   ^^^^^^^^^^^^^
+
+"#,
+    );
+}
+
+#[test]
+fn long_snippet() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+  X1 Y1 Z1
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+  X2 Y2 Z2
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "X1", count: 1 },
+                label: "`X` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "Z3", count: 1 },
+                label: "`Y` is a good letter too",
+            },
+        ],
+        r#"
+error: foo
+  --> test.rs:3:6
+   |
+3  |      X0 Y0 Z0
+   |  _______^
+4  | |    X1 Y1 Z1
+   | | ____^____-
+   | ||____|
+   |  |    `X` is a good letter
+5  |  | 1
+6  |  | 2
+7  |  | 3
+...   |
+15 |  |   X2 Y2 Z2
+16 |  |   X3 Y3 Z3
+   |  |__________- `Y` is a good letter too
+
+"#,
+    );
+}
+
+#[test]
+fn long_snippet_multiple_spans() {
+    test_harness(
+        r#"
+fn foo() {
+  X0 Y0 Z0
+1
+2
+3
+  X1 Y1 Z1
+4
+5
+6
+  X2 Y2 Z2
+7
+8
+9
+10
+  X3 Y3 Z3
+}
+"#,
+        vec![
+            SpanLabel {
+                start: Position { string: "Y0", count: 1 },
+                end: Position { string: "Y3", count: 1 },
+                label: "`Y` is a good letter",
+            },
+            SpanLabel {
+                start: Position { string: "Z1", count: 1 },
+                end: Position { string: "Z2", count: 1 },
+                label: "`Z` is a good letter too",
+            },
+        ],
+        r#"
+error: foo
+  --> test.rs:3:6
+   |
+3  |      X0 Y0 Z0
+   |  _______^
+4  | |  1
+5  | |  2
+6  | |  3
+7  | |    X1 Y1 Z1
+   | | _________-
+8  | || 4
+9  | || 5
+10 | || 6
+11 | ||   X2 Y2 Z2
+   | ||__________- `Z` is a good letter too
+...  |
+15 | |  10
+16 | |    X3 Y3 Z3
+   | |________^ `Y` is a good letter
+
+"#,
+    );
+}
+
+/// Parses an item.
+///
+/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
+/// when a syntax error occurred.
+fn parse_item_from_source_str(
+    name: FileName,
+    source: String,
+    psess: &ParseSess,
+) -> PResult<'_, Option<P<ast::Item>>> {
+    new_parser_from_source_str(psess, name, source).parse_item(ForceCollect::No)
+}
+
+// Produces a `rustc_span::span`.
+fn sp(a: u32, b: u32) -> Span {
+    Span::with_root_ctxt(BytePos(a), BytePos(b))
+}
+
+/// Parses a string, return an expression.
+fn string_to_expr(source_str: String) -> P<ast::Expr> {
+    with_error_checking_parse(source_str, &psess(), |p| p.parse_expr())
+}
+
+/// Parses a string, returns an item.
+fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
+    with_error_checking_parse(source_str, &psess(), |p| p.parse_item(ForceCollect::No))
+}
+
+#[test]
+fn bad_path_expr_1() {
+    // This should trigger error: expected identifier, found keyword `return`
+    create_default_session_globals_then(|| {
+        with_expected_parse_error(
+            "::abc::def::return",
+            "expected identifier, found keyword `return`",
+            |p| p.parse_expr(),
+        );
+    })
+}
+
+// Checks the token-tree-ization of macros.
+#[test]
+fn string_to_tts_macro() {
+    create_default_session_globals_then(|| {
+        let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
+        let tts = &stream.trees().collect::<Vec<_>>()[..];
+
+        match tts {
+            [
+                TokenTree::Token(
+                    Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
+                    _,
+                ),
+                TokenTree::Token(Token { kind: token::Not, .. }, _),
+                TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
+                TokenTree::Delimited(.., macro_delim, macro_tts),
+            ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
+                let tts = &macro_tts.trees().collect::<Vec<_>>();
+                match &tts[..] {
+                    [
+                        TokenTree::Delimited(.., first_delim, first_tts),
+                        TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
+                        TokenTree::Delimited(.., second_delim, second_tts),
+                    ] if macro_delim == &Delimiter::Parenthesis => {
+                        let tts = &first_tts.trees().collect::<Vec<_>>();
+                        match &tts[..] {
+                            [
+                                TokenTree::Token(Token { kind: token::Dollar, .. }, _),
+                                TokenTree::Token(
+                                    Token { kind: token::Ident(name, IdentIsRaw::No), .. },
+                                    _,
+                                ),
+                            ] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
+                            }
+                            _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
+                        }
+                        let tts = &second_tts.trees().collect::<Vec<_>>();
+                        match &tts[..] {
+                            [
+                                TokenTree::Token(Token { kind: token::Dollar, .. }, _),
+                                TokenTree::Token(
+                                    Token { kind: token::Ident(name, IdentIsRaw::No), .. },
+                                    _,
+                                ),
+                            ] if second_delim == &Delimiter::Parenthesis
+                                && name.as_str() == "a" => {}
+                            _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
+                        }
+                    }
+                    _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
+                }
+            }
+            _ => panic!("value: {:?}", tts),
+        }
+    })
+}
+
+#[test]
+fn string_to_tts_1() {
+    create_default_session_globals_then(|| {
+        let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
+
+        let expected = TokenStream::new(vec![
+            TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)),
+            TokenTree::token_joint_hidden(
+                token::Ident(Symbol::intern("a"), IdentIsRaw::No),
+                sp(3, 4),
+            ),
+            TokenTree::Delimited(
+                DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
+                // `JointHidden` because the `(` is followed immediately by
+                // `b`, `Alone` because the `)` is followed by whitespace.
+                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
+                Delimiter::Parenthesis,
+                TokenStream::new(vec![
+                    TokenTree::token_joint(
+                        token::Ident(Symbol::intern("b"), IdentIsRaw::No),
+                        sp(5, 6),
+                    ),
+                    TokenTree::token_alone(token::Colon, sp(6, 7)),
+                    // `JointHidden` because the `i32` is immediately followed by the `)`.
+                    TokenTree::token_joint_hidden(
+                        token::Ident(sym::i32, IdentIsRaw::No),
+                        sp(8, 11),
+                    ),
+                ])
+                .into(),
+            ),
+            TokenTree::Delimited(
+                DelimSpan::from_pair(sp(13, 14), sp(18, 19)),
+                // First `Alone` because the `{` is followed by whitespace,
+                // second `Alone` because the `}` is followed immediately by
+                // EOF.
+                DelimSpacing::new(Spacing::Alone, Spacing::Alone),
+                Delimiter::Brace,
+                TokenStream::new(vec![
+                    TokenTree::token_joint(
+                        token::Ident(Symbol::intern("b"), IdentIsRaw::No),
+                        sp(15, 16),
+                    ),
+                    // `Alone` because the `;` is followed by whitespace.
+                    TokenTree::token_alone(token::Semi, sp(16, 17)),
+                ])
+                .into(),
+            ),
+        ]);
+
+        assert_eq!(tts, expected);
+    })
+}
+
+#[test]
+fn parse_use() {
+    create_default_session_globals_then(|| {
+        let use_s = "use foo::bar::baz;";
+        let vitem = string_to_item(use_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], use_s);
+
+        let use_s = "use foo::bar as baz;";
+        let vitem = string_to_item(use_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], use_s);
+    })
+}
+
+#[test]
+fn parse_extern_crate() {
+    create_default_session_globals_then(|| {
+        let ex_s = "extern crate foo;";
+        let vitem = string_to_item(ex_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], ex_s);
+
+        let ex_s = "extern crate foo as bar;";
+        let vitem = string_to_item(ex_s.to_string()).unwrap();
+        let vitem_s = item_to_string(&vitem);
+        assert_eq!(&vitem_s[..], ex_s);
+    })
+}
+
+fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
+    let item = string_to_item(src.to_string()).unwrap();
+
+    struct PatIdentVisitor {
+        spans: Vec<Span>,
+    }
+    impl<'a> visit::Visitor<'a> for PatIdentVisitor {
+        fn visit_pat(&mut self, p: &'a ast::Pat) {
+            match &p.kind {
+                PatKind::Ident(_, ident, _) => {
+                    self.spans.push(ident.span);
+                }
+                _ => {
+                    visit::walk_pat(self, p);
+                }
+            }
+        }
+    }
+    let mut v = PatIdentVisitor { spans: Vec::new() };
+    visit::walk_item(&mut v, &item);
+    return v.spans;
+}
+
+#[test]
+fn span_of_self_arg_pat_idents_are_correct() {
+    create_default_session_globals_then(|| {
+        let srcs = [
+            "impl z { fn a (&self, &myarg: i32) {} }",
+            "impl z { fn a (&mut self, &myarg: i32) {} }",
+            "impl z { fn a (&'a self, &myarg: i32) {} }",
+            "impl z { fn a (self, &myarg: i32) {} }",
+            "impl z { fn a (self: Foo, &myarg: i32) {} }",
+        ];
+
+        for src in srcs {
+            let spans = get_spans_of_pat_idents(src);
+            let (lo, hi) = (spans[0].lo(), spans[0].hi());
+            assert!(
+                "self" == &src[lo.to_usize()..hi.to_usize()],
+                "\"{}\" != \"self\". src=\"{}\"",
+                &src[lo.to_usize()..hi.to_usize()],
+                src
+            )
+        }
+    })
+}
+
+#[test]
+fn parse_exprs() {
+    create_default_session_globals_then(|| {
+        // just make sure that they parse....
+        string_to_expr("3 + 4".to_string());
+        string_to_expr("a::z.froob(b,&(987+3))".to_string());
+    })
+}
+
+#[test]
+fn attrs_fix_bug() {
+    create_default_session_globals_then(|| {
+        string_to_item(
+            "pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
+                -> Result<Box<Writer>, String> {
+#[cfg(windows)]
+fn wb() -> c_int {
+    (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
+}
+
+#[cfg(unix)]
+fn wb() -> c_int { O_WRONLY as c_int }
+
+let mut fflags: c_int = wb();
+}"
+            .to_string(),
+        );
+    })
+}
+
+#[test]
+fn crlf_doc_comments() {
+    create_default_session_globals_then(|| {
+        let psess = psess();
+
+        let name_1 = FileName::Custom("crlf_source_1".to_string());
+        let source = "/// doc comment\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_1, source, &psess).unwrap().unwrap();
+        let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
+        assert_eq!(doc.as_str(), " doc comment");
+
+        let name_2 = FileName::Custom("crlf_source_2".to_string());
+        let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_2, source, &psess).unwrap().unwrap();
+        let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::<Vec<_>>();
+        let b: &[_] = &[Symbol::intern(" doc comment"), Symbol::intern(" line 2")];
+        assert_eq!(&docs[..], b);
+
+        let name_3 = FileName::Custom("clrf_source_3".to_string());
+        let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
+        let item = parse_item_from_source_str(name_3, source, &psess).unwrap().unwrap();
+        let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
+        assert_eq!(doc.as_str(), " doc comment\n *  with CRLF ");
+    });
+}
+
+#[test]
+fn ttdelim_span() {
+    fn parse_expr_from_source_str(
+        name: FileName,
+        source: String,
+        psess: &ParseSess,
+    ) -> PResult<'_, P<ast::Expr>> {
+        new_parser_from_source_str(psess, name, source).parse_expr()
+    }
+
+    create_default_session_globals_then(|| {
+        let psess = psess();
+        let expr = parse_expr_from_source_str(
+            PathBuf::from("foo").into(),
+            "foo!( fn main() { body } )".to_string(),
+            &psess,
+        )
+        .unwrap();
+
+        let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
+        let span = mac.args.tokens.trees().last().unwrap().span();
+
+        match psess.source_map().span_to_snippet(span) {
+            Ok(s) => assert_eq!(&s[..], "{ body }"),
+            Err(_) => panic!("could not get snippet"),
+        }
+    });
+}
+
+// This tests that when parsing a string (rather than a file) we don't try
+// and read in a file for a module declaration and just parse a stub.
+// See `recurse_into_file_modules` in the parser.
+#[test]
+fn out_of_line_mod() {
+    create_default_session_globals_then(|| {
+        let item = parse_item_from_source_str(
+            PathBuf::from("foo").into(),
+            "mod foo { struct S; mod this_does_not_exist; }".to_owned(),
+            &psess(),
+        )
+        .unwrap()
+        .unwrap();
+
+        let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() };
+        assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
+    });
+}
+
+#[test]
+fn eqmodws() {
+    assert_eq!(matches_codepattern("", ""), true);
+    assert_eq!(matches_codepattern("", "a"), false);
+    assert_eq!(matches_codepattern("a", ""), false);
+    assert_eq!(matches_codepattern("a", "a"), true);
+    assert_eq!(matches_codepattern("a b", "a   \n\t\r  b"), true);
+    assert_eq!(matches_codepattern("a b ", "a   \n\t\r  b"), true);
+    assert_eq!(matches_codepattern("a b", "a   \n\t\r  b "), false);
+    assert_eq!(matches_codepattern("a   b", "a b"), true);
+    assert_eq!(matches_codepattern("ab", "a b"), false);
+    assert_eq!(matches_codepattern("a   b", "ab"), true);
+    assert_eq!(matches_codepattern(" a   b", "ab"), true);
+}
+
+#[test]
+fn pattern_whitespace() {
+    assert_eq!(matches_codepattern("", "\x0C"), false);
+    assert_eq!(matches_codepattern("a b ", "a   \u{0085}\n\t\r  b"), true);
+    assert_eq!(matches_codepattern("a b", "a   \u{0085}\n\t\r  b "), false);
+}
+
+#[test]
+fn non_pattern_whitespace() {
+    // These have the property 'White_Space' but not 'Pattern_White_Space'
+    assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
+    assert_eq!(matches_codepattern("a   b", "a\u{2002}b"), false);
+    assert_eq!(matches_codepattern("\u{205F}a   b", "ab"), false);
+    assert_eq!(matches_codepattern("a  \u{3000}b", "ab"), false);
+}
diff --git a/compiler/rustc_parse/src/parser/tokenstream/tests.rs b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
new file mode 100644
index 00000000000..9be00a14791
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
@@ -0,0 +1,108 @@
+use crate::parser::tests::string_to_stream;
+use rustc_ast::token::{self, IdentIsRaw};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_span::create_default_session_globals_then;
+use rustc_span::{BytePos, Span, Symbol};
+
+fn string_to_ts(string: &str) -> TokenStream {
+    string_to_stream(string.to_owned())
+}
+
+fn sp(a: u32, b: u32) -> Span {
+    Span::with_root_ctxt(BytePos(a), BytePos(b))
+}
+
+#[test]
+fn test_concat() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("foo::bar::baz");
+        let test_fst = string_to_ts("foo::bar");
+        let test_snd = string_to_ts("::baz");
+        let mut eq_res = TokenStream::default();
+        eq_res.push_stream(test_fst);
+        eq_res.push_stream(test_snd);
+        assert_eq!(test_res.trees().count(), 5);
+        assert_eq!(eq_res.trees().count(), 5);
+        assert_eq!(test_res.eq_unspanned(&eq_res), true);
+    })
+}
+
+#[test]
+fn test_to_from_bijection() {
+    create_default_session_globals_then(|| {
+        let test_start = string_to_ts("foo::bar(baz)");
+        let test_end = test_start.trees().cloned().collect();
+        assert_eq!(test_start, test_end)
+    })
+}
+
+#[test]
+fn test_eq_0() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("foo");
+        let test_eqs = string_to_ts("foo");
+        assert_eq!(test_res, test_eqs)
+    })
+}
+
+#[test]
+fn test_eq_1() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("::bar::baz");
+        let test_eqs = string_to_ts("::bar::baz");
+        assert_eq!(test_res, test_eqs)
+    })
+}
+
+#[test]
+fn test_eq_3() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("");
+        let test_eqs = string_to_ts("");
+        assert_eq!(test_res, test_eqs)
+    })
+}
+
+#[test]
+fn test_diseq_0() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("::bar::baz");
+        let test_eqs = string_to_ts("bar::baz");
+        assert_eq!(test_res == test_eqs, false)
+    })
+}
+
+#[test]
+fn test_diseq_1() {
+    create_default_session_globals_then(|| {
+        let test_res = string_to_ts("(bar,baz)");
+        let test_eqs = string_to_ts("bar,baz");
+        assert_eq!(test_res == test_eqs, false)
+    })
+}
+
+#[test]
+fn test_is_empty() {
+    create_default_session_globals_then(|| {
+        let test0 = TokenStream::default();
+        let test1 =
+            TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1));
+        let test2 = string_to_ts("foo(bar::baz)");
+
+        assert_eq!(test0.is_empty(), true);
+        assert_eq!(test1.is_empty(), false);
+        assert_eq!(test2.is_empty(), false);
+    })
+}
+
+#[test]
+fn test_dotdotdot() {
+    create_default_session_globals_then(|| {
+        let mut stream = TokenStream::default();
+        stream.push_tree(TokenTree::token_joint(token::Dot, sp(0, 1)));
+        stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2)));
+        stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3)));
+        assert!(stream.eq_unspanned(&string_to_ts("...")));
+        assert_eq!(stream.trees().count(), 1);
+    })
+}