about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs3
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs3
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs3
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs8
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs10
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs7
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs3
-rw-r--r--compiler/rustc_parse/src/parser/item.rs3
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs103
-rw-r--r--compiler/rustc_parse/src/parser/mut_visit/tests.rs3
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs2
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs9
-rw-r--r--compiler/rustc_parse/src/parser/path.rs3
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs3
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs15
-rw-r--r--compiler/rustc_parse/src/parser/tokenstream/tests.rs8
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs3
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs8
18 files changed, 105 insertions, 92 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 4c4e03cdfa3..f78d9dc2bfc 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -13,8 +13,7 @@ use rustc_errors::{
 use rustc_macros::{Diagnostic, Subdiagnostic};
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::edition::{Edition, LATEST_STABLE_EDITION};
-use rustc_span::symbol::Ident;
-use rustc_span::{Span, Symbol};
+use rustc_span::{Ident, Span, Symbol};
 
 use crate::fluent_generated as fluent;
 use crate::parser::{ForbiddenLetReason, TokenDescription};
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 443ddfc94ec..792e2cc26ef 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -14,8 +14,7 @@ use rustc_session::lint::builtin::{
     TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
 };
 use rustc_session::parse::ParseSess;
-use rustc_span::symbol::Symbol;
-use rustc_span::{BytePos, Pos, Span};
+use rustc_span::{BytePos, Pos, Span, Symbol};
 use tracing::debug;
 
 use crate::lexer::diagnostics::TokenTreeDiagInfo;
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 42eef27803e..ef8d0f96b61 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -1,8 +1,7 @@
 //! Characters and their corresponding confusables were collected from
 //! <https://www.unicode.org/Public/security/10.0.0/confusables.txt>
 
-use rustc_span::symbol::kw;
-use rustc_span::{BytePos, Pos, Span};
+use rustc_span::{BytePos, Pos, Span, kw};
 
 use super::Lexer;
 use crate::errors::TokenSubstitution;
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index b6fa2099588..9da4ab5a788 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,10 +1,8 @@
-use rustc_ast as ast;
-use rustc_ast::attr;
 use rustc_ast::token::{self, Delimiter};
+use rustc_ast::{self as ast, Attribute, attr};
 use rustc_errors::codes::*;
 use rustc_errors::{Diag, PResult};
-use rustc_span::symbol::kw;
-use rustc_span::{BytePos, Span};
+use rustc_span::{BytePos, Span, kw};
 use thin_vec::ThinVec;
 use tracing::debug;
 
@@ -48,7 +46,7 @@ impl<'a> Parser<'a> {
         let start_pos = self.num_bump_calls;
         loop {
             let attr = if self.check(&token::Pound) {
-                let prev_outer_attr_sp = outer_attrs.last().map(|attr| attr.span);
+                let prev_outer_attr_sp = outer_attrs.last().map(|attr: &Attribute| attr.span);
 
                 let inner_error_reason = if just_parsed_doc_comment {
                     Some(InnerAttrForbiddenReason::AfterOuterDocComment {
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index e5edf605d82..8417701ac0c 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -22,8 +22,8 @@ use rustc_errors::{
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::edit_distance::find_best_match_for_name;
 use rustc_span::source_map::Spanned;
-use rustc_span::symbol::{AllKeywords, Ident, kw, sym};
-use rustc_span::{BytePos, DUMMY_SP, Span, SpanSnippetError, Symbol};
+use rustc_span::symbol::used_keywords;
+use rustc_span::{BytePos, DUMMY_SP, Ident, Span, SpanSnippetError, Symbol, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
 use tracing::{debug, trace};
 
@@ -811,12 +811,12 @@ impl<'a> Parser<'a> {
             // so that it gets generated only when the diagnostic needs it.
             // Also, it is unlikely that this list is generated multiple times because the
             // parser halts after execution hits this path.
-            let all_keywords = AllKeywords::new().collect_used(|| prev_ident.span.edition());
+            let all_keywords = used_keywords(|| prev_ident.span.edition());
 
             // Otherwise, check the previous token with all the keywords as possible candidates.
             // This handles code like `Struct Human;` and `While a < b {}`.
-            // We check the previous token only when the current token is an identifier to avoid false
-            // positives like suggesting keyword `for` for `extern crate foo {}`.
+            // We check the previous token only when the current token is an identifier to avoid
+            // false positives like suggesting keyword `for` for `extern crate foo {}`.
             if let Some(misspelled_kw) = find_similar_kw(prev_ident, &all_keywords) {
                 err.subdiagnostic(misspelled_kw);
                 // We don't want other suggestions to be added as they are most likely meaningless
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index a2136399b0c..2f34dcb9308 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -8,6 +8,7 @@ use ast::token::IdentIsRaw;
 use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::tokenstream::TokenTree;
 use rustc_ast::util::case::Case;
 use rustc_ast::util::classify;
 use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par};
@@ -26,8 +27,7 @@ use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error};
 use rustc_session::lint::BuiltinLintDiag;
 use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
 use rustc_span::source_map::{self, Spanned};
-use rustc_span::symbol::{Ident, Symbol, kw, sym};
-use rustc_span::{BytePos, ErrorGuaranteed, Pos, Span};
+use rustc_span::{BytePos, ErrorGuaranteed, Ident, Pos, Span, Symbol, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
 use tracing::instrument;
 
@@ -2393,7 +2393,8 @@ impl<'a> Parser<'a> {
         }
 
         if self.token == TokenKind::Semi
-            && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
+            && let Some(last) = self.token_cursor.stack.last()
+            && let Some(TokenTree::Delimited(_, _, Delimiter::Parenthesis, _)) = last.curr()
             && self.may_recover()
         {
             // It is likely that the closure body is a block but where the
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 76ecb77d750..65e390c9a82 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -4,8 +4,7 @@ use rustc_ast::{
     WhereClause, token,
 };
 use rustc_errors::{Applicability, PResult};
-use rustc_span::Span;
-use rustc_span::symbol::{Ident, kw};
+use rustc_span::{Ident, Span, kw};
 use thin_vec::ThinVec;
 
 use super::{ForceCollect, Parser, Trailing, UsePreAttrPos};
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index e27fc963eb9..e6a8eda42e8 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -13,8 +13,7 @@ use rustc_errors::codes::*;
 use rustc_errors::{Applicability, PResult, StashKey, struct_span_code_err};
 use rustc_span::edit_distance::edit_distance;
 use rustc_span::edition::Edition;
-use rustc_span::symbol::{Ident, Symbol, kw, sym};
-use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, source_map};
+use rustc_span::{DUMMY_SP, ErrorGuaranteed, Ident, Span, Symbol, kw, source_map, sym};
 use thin_vec::{ThinVec, thin_vec};
 use tracing::debug;
 
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 37556c064d8..4bc8f5913b2 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -24,14 +24,12 @@ use rustc_ast::ptr::P;
 use rustc_ast::token::{
     self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
 };
-use rustc_ast::tokenstream::{
-    AttrsTarget, DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree, TokenTreeCursor,
-};
+use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
-    self as ast, AnonConst, AttrArgs, AttrArgsEq, AttrId, ByRef, Const, CoroutineKind,
-    DUMMY_NODE_ID, DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered,
-    Safety, StrLit, Visibility, VisibilityKind,
+    self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
+    DelimArgs, Expr, ExprKind, Extern, HasAttrs, HasTokens, Mutability, Recovered, Safety, StrLit,
+    Visibility, VisibilityKind,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
@@ -39,8 +37,7 @@ use rustc_data_structures::sync::Lrc;
 use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
 use rustc_index::interval::IntervalSet;
 use rustc_session::parse::ParseSess;
-use rustc_span::symbol::{Ident, Symbol, kw, sym};
-use rustc_span::{DUMMY_SP, Span};
+use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
 use thin_vec::ThinVec;
 use tracing::debug;
 
@@ -273,21 +270,48 @@ struct CaptureState {
     seen_attrs: IntervalSet<AttrId>,
 }
 
-/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
+#[derive(Clone, Debug)]
+struct TokenTreeCursor {
+    stream: TokenStream,
+    /// Points to the current token tree in the stream. In `TokenCursor::curr`,
+    /// this can be any token tree. In `TokenCursor::stack`, this is always a
+    /// `TokenTree::Delimited`.
+    index: usize,
+}
+
+impl TokenTreeCursor {
+    #[inline]
+    fn new(stream: TokenStream) -> Self {
+        TokenTreeCursor { stream, index: 0 }
+    }
+
+    #[inline]
+    fn curr(&self) -> Option<&TokenTree> {
+        self.stream.get(self.index)
+    }
+
+    #[inline]
+    fn bump(&mut self) {
+        self.index += 1;
+    }
+}
+
+/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
 /// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
 /// use this type to emit them as a linear sequence. But a linear sequence is
 /// what the parser expects, for the most part.
 #[derive(Clone, Debug)]
 struct TokenCursor {
-    // Cursor for the current (innermost) token stream. The delimiters for this
-    // token stream are found in `self.stack.last()`; when that is `None` then
-    // we are in the outermost token stream which never has delimiters.
-    tree_cursor: TokenTreeCursor,
-
-    // Token streams surrounding the current one. The delimiters for stack[n]'s
-    // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
-    // because it's the outermost token stream which never has delimiters.
-    stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
+    // Cursor for the current (innermost) token stream. The index within the
+    // cursor can point to any token tree in the stream (or one past the end).
+    // The delimiters for this token stream are found in `self.stack.last()`;
+    // if that is `None` we are in the outermost token stream which never has
+    // delimiters.
+    curr: TokenTreeCursor,
+
+    // Token streams surrounding the current one. The index within each cursor
+    // always points to a `TokenTree::Delimited`.
+    stack: Vec<TokenTreeCursor>,
 }
 
 impl TokenCursor {
@@ -302,32 +326,33 @@ impl TokenCursor {
             // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
             // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
             // below can be removed.
-            if let Some(tree) = self.tree_cursor.next_ref() {
+            if let Some(tree) = self.curr.curr() {
                 match tree {
                     &TokenTree::Token(ref token, spacing) => {
                         debug_assert!(!matches!(
                             token.kind,
                             token::OpenDelim(_) | token::CloseDelim(_)
                         ));
-                        return (token.clone(), spacing);
+                        let res = (token.clone(), spacing);
+                        self.curr.bump();
+                        return res;
                     }
                     &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
-                        let trees = tts.clone().into_trees();
-                        self.stack.push((
-                            mem::replace(&mut self.tree_cursor, trees),
-                            sp,
-                            spacing,
-                            delim,
-                        ));
+                        let trees = TokenTreeCursor::new(tts.clone());
+                        self.stack.push(mem::replace(&mut self.curr, trees));
                         if !delim.skip() {
                             return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
                         }
                         // No open delimiter to return; continue on to the next iteration.
                     }
                 };
-            } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
+            } else if let Some(parent) = self.stack.pop() {
                 // We have exhausted this token stream. Move back to its parent token stream.
-                self.tree_cursor = tree_cursor;
+                let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
+                    panic!("parent should be Delimited")
+                };
+                self.curr = parent;
+                self.curr.bump(); // move past the `Delimited`
                 if !delim.skip() {
                     return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
                 }
@@ -466,7 +491,7 @@ impl<'a> Parser<'a> {
             capture_cfg: false,
             restrictions: Restrictions::empty(),
             expected_tokens: Vec::new(),
-            token_cursor: TokenCursor { tree_cursor: stream.into_trees(), stack: Vec::new() },
+            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
             num_bump_calls: 0,
             break_last_token: 0,
             unmatched_angle_bracket_count: 0,
@@ -1192,7 +1217,7 @@ impl<'a> Parser<'a> {
         if dist == 1 {
             // The index is zero because the tree cursor's index always points
             // to the next token to be gotten.
-            match self.token_cursor.tree_cursor.look_ahead(0) {
+            match self.token_cursor.curr.curr() {
                 Some(tree) => {
                     // Indexing stayed within the current token tree.
                     match tree {
@@ -1202,12 +1227,13 @@ impl<'a> Parser<'a> {
                                 return looker(&Token::new(token::OpenDelim(delim), dspan.open));
                             }
                         }
-                    };
+                    }
                 }
                 None => {
                     // The tree cursor lookahead went (one) past the end of the
                     // current token tree. Try to return a close delimiter.
-                    if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
+                    if let Some(last) = self.token_cursor.stack.last()
+                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
                         && !delim.skip()
                     {
                         // We are not in the outermost token stream, so we have
@@ -1376,7 +1402,7 @@ impl<'a> Parser<'a> {
             AttrArgs::Delimited(args)
         } else if self.eat(&token::Eq) {
             let eq_span = self.prev_token.span;
-            AttrArgs::Eq { eq_span, value: AttrArgsEq::Ast(self.parse_expr_force_collect()?) }
+            AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
         } else {
             AttrArgs::Empty
         })
@@ -1399,9 +1425,10 @@ impl<'a> Parser<'a> {
     pub fn parse_token_tree(&mut self) -> TokenTree {
         match self.token.kind {
             token::OpenDelim(..) => {
-                // Grab the tokens within the delimiters.
-                let stream = self.token_cursor.tree_cursor.stream.clone();
-                let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
+                // Clone the `TokenTree::Delimited` that we are currently
+                // within. That's what we are going to return.
+                let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
+                debug_assert_matches!(tree, TokenTree::Delimited(..));
 
                 // Advance the token cursor through the entire delimited
                 // sequence. After getting the `OpenDelim` we are *within* the
@@ -1421,7 +1448,7 @@ impl<'a> Parser<'a> {
 
                 // Consume close delimiter
                 self.bump();
-                TokenTree::Delimited(span, spacing, delim, stream)
+                tree
             }
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
diff --git a/compiler/rustc_parse/src/parser/mut_visit/tests.rs b/compiler/rustc_parse/src/parser/mut_visit/tests.rs
index b82c295732d..46c678c3902 100644
--- a/compiler/rustc_parse/src/parser/mut_visit/tests.rs
+++ b/compiler/rustc_parse/src/parser/mut_visit/tests.rs
@@ -1,8 +1,7 @@
 use rustc_ast as ast;
 use rustc_ast::mut_visit::MutVisitor;
 use rustc_ast_pretty::pprust;
-use rustc_span::create_default_session_globals_then;
-use rustc_span::symbol::Ident;
+use rustc_span::{Ident, create_default_session_globals_then};
 
 use crate::parser::tests::{matches_codepattern, string_to_crate};
 
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 752a52b382b..67cabb757e9 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -9,7 +9,7 @@ use rustc_ast::token::{
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
 use rustc_errors::PResult;
-use rustc_span::symbol::{Ident, kw};
+use rustc_span::{Ident, kw};
 
 use crate::errors::UnexpectedNonterminal;
 use crate::parser::pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 4cda887a02b..9b63100105b 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -13,8 +13,7 @@ use rustc_ast_pretty::pprust;
 use rustc_errors::{Applicability, Diag, DiagArgValue, PResult, StashKey};
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::source_map::{Spanned, respan};
-use rustc_span::symbol::{Ident, kw, sym};
-use rustc_span::{BytePos, ErrorGuaranteed, Span};
+use rustc_span::{BytePos, ErrorGuaranteed, Ident, Span, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
 
 use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos};
@@ -1371,10 +1370,10 @@ impl<'a> Parser<'a> {
         self.bump();
         let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
             e.span_label(path.span, "while parsing the fields for this pattern");
-            e.emit();
+            let guar = e.emit();
             self.recover_stmt();
             // When recovering, pretend we had `Foo { .. }`, to avoid cascading errors.
-            (ThinVec::new(), PatFieldsRest::Rest)
+            (ThinVec::new(), PatFieldsRest::Recovered(guar))
         });
         self.bump();
         Ok(PatKind::Struct(qself, path, fields, etc))
@@ -1648,7 +1647,7 @@ impl<'a> Parser<'a> {
                 ident: prev_field,
             })
         } else {
-            self.dcx().create_err(AtInStructPattern { span: span })
+            self.dcx().create_err(AtInStructPattern { span })
         }
     }
 
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 6a7029a8f1c..f2f0c6dfad5 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -9,8 +9,7 @@ use rustc_ast::{
     Path, PathSegment, QSelf,
 };
 use rustc_errors::{Applicability, Diag, PResult};
-use rustc_span::symbol::{Ident, kw, sym};
-use rustc_span::{BytePos, Span};
+use rustc_span::{BytePos, Ident, Span, kw, sym};
 use thin_vec::ThinVec;
 use tracing::debug;
 
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 5fa2e01fc86..0f32e396273 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -12,8 +12,7 @@ use rustc_ast::{
     LocalKind, MacCall, MacCallStmt, MacStmtStyle, Recovered, Stmt, StmtKind,
 };
 use rustc_errors::{Applicability, Diag, PResult};
-use rustc_span::symbol::{Ident, kw, sym};
-use rustc_span::{BytePos, ErrorGuaranteed, Span};
+use rustc_span::{BytePos, ErrorGuaranteed, Ident, Span, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
 
 use super::attr::InnerAttrForbiddenReason;
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index 1813960dad0..3f8d66c2c95 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -18,8 +18,9 @@ use rustc_errors::emitter::{HumanEmitter, OutputTheme};
 use rustc_errors::{DiagCtxt, MultiSpan, PResult};
 use rustc_session::parse::ParseSess;
 use rustc_span::source_map::{FilePathMapping, SourceMap};
-use rustc_span::symbol::{Symbol, kw, sym};
-use rustc_span::{BytePos, FileName, Pos, Span, create_default_session_globals_then};
+use rustc_span::{
+    BytePos, FileName, Pos, Span, Symbol, create_default_session_globals_then, kw, sym,
+};
 use termcolor::WriteColor;
 
 use crate::parser::{ForceCollect, Parser};
@@ -2285,7 +2286,7 @@ fn bad_path_expr_1() {
 fn string_to_tts_macro() {
     create_default_session_globals_then(|| {
         let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
-        let tts = &stream.trees().collect::<Vec<_>>()[..];
+        let tts = &stream.iter().collect::<Vec<_>>()[..];
 
         match tts {
             [
@@ -2297,14 +2298,14 @@ fn string_to_tts_macro() {
                 TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
                 TokenTree::Delimited(.., macro_delim, macro_tts),
             ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
-                let tts = &macro_tts.trees().collect::<Vec<_>>();
+                let tts = &macro_tts.iter().collect::<Vec<_>>();
                 match &tts[..] {
                     [
                         TokenTree::Delimited(.., first_delim, first_tts),
                         TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
                         TokenTree::Delimited(.., second_delim, second_tts),
                     ] if macro_delim == &Delimiter::Parenthesis => {
-                        let tts = &first_tts.trees().collect::<Vec<_>>();
+                        let tts = &first_tts.iter().collect::<Vec<_>>();
                         match &tts[..] {
                             [
                                 TokenTree::Token(Token { kind: token::Dollar, .. }, _),
@@ -2316,7 +2317,7 @@ fn string_to_tts_macro() {
                             }
                             _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
                         }
-                        let tts = &second_tts.trees().collect::<Vec<_>>();
+                        let tts = &second_tts.iter().collect::<Vec<_>>();
                         match &tts[..] {
                             [
                                 TokenTree::Token(Token { kind: token::Dollar, .. }, _),
@@ -2544,7 +2545,7 @@ fn ttdelim_span() {
         .unwrap();
 
         let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
-        let span = mac.args.tokens.trees().last().unwrap().span();
+        let span = mac.args.tokens.iter().last().unwrap().span();
 
         match psess.source_map().span_to_snippet(span) {
             Ok(s) => assert_eq!(&s[..], "{ body }"),
diff --git a/compiler/rustc_parse/src/parser/tokenstream/tests.rs b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
index b13b68c266a..037b5b1a9de 100644
--- a/compiler/rustc_parse/src/parser/tokenstream/tests.rs
+++ b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
@@ -23,8 +23,8 @@ fn test_concat() {
         let mut eq_res = TokenStream::default();
         eq_res.push_stream(test_fst);
         eq_res.push_stream(test_snd);
-        assert_eq!(test_res.trees().count(), 5);
-        assert_eq!(eq_res.trees().count(), 5);
+        assert_eq!(test_res.iter().count(), 5);
+        assert_eq!(eq_res.iter().count(), 5);
         assert_eq!(test_res.eq_unspanned(&eq_res), true);
     })
 }
@@ -33,7 +33,7 @@ fn test_concat() {
 fn test_to_from_bijection() {
     create_default_session_globals_then(|| {
         let test_start = string_to_ts("foo::bar(baz)");
-        let test_end = test_start.trees().cloned().collect();
+        let test_end = test_start.iter().cloned().collect();
         assert_eq!(test_start, test_end)
     })
 }
@@ -105,6 +105,6 @@ fn test_dotdotdot() {
         stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2)));
         stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3)));
         assert!(stream.eq_unspanned(&string_to_ts("...")));
-        assert_eq!(stream.trees().count(), 1);
+        assert_eq!(stream.iter().count(), 1);
     })
 }
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index f696074e66a..6e720db4edf 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -8,8 +8,7 @@ use rustc_ast::{
     TyKind, UnsafeBinderTy,
 };
 use rustc_errors::{Applicability, PResult};
-use rustc_span::symbol::{Ident, kw, sym};
-use rustc_span::{ErrorGuaranteed, Span};
+use rustc_span::{ErrorGuaranteed, Ident, Span, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
 
 use super::{Parser, PathStyle, SeqSep, TokenType, Trailing};
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index aab3f10bc66..8b6b37c0f8f 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -3,8 +3,7 @@
 use rustc_ast::token::Delimiter;
 use rustc_ast::tokenstream::DelimSpan;
 use rustc_ast::{
-    self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MetaItem, MetaItemInner, MetaItemKind,
-    Safety,
+    self as ast, AttrArgs, Attribute, DelimArgs, MetaItem, MetaItemInner, MetaItemKind, Safety,
 };
 use rustc_errors::{Applicability, FatalError, PResult};
 use rustc_feature::{AttributeSafety, AttributeTemplate, BUILTIN_ATTRIBUTE_MAP, BuiltinAttribute};
@@ -70,7 +69,7 @@ pub fn parse_meta<'a>(psess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Met
                     parse_in(psess, tokens.clone(), "meta list", |p| p.parse_meta_seq_top())?;
                 MetaItemKind::List(nmis)
             }
-            AttrArgs::Eq { value: AttrArgsEq::Ast(expr), .. } => {
+            AttrArgs::Eq { expr, .. } => {
                 if let ast::ExprKind::Lit(token_lit) = expr.kind {
                     let res = ast::MetaItemLit::from_token_lit(token_lit, expr.span);
                     let res = match res {
@@ -116,9 +115,6 @@ pub fn parse_meta<'a>(psess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Met
                     return Err(err);
                 }
             }
-            AttrArgs::Eq { value: AttrArgsEq::Hir(lit), .. } => {
-                MetaItemKind::NameValue(lit.clone())
-            }
         },
     })
 }