about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs2
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs87
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs14
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs2
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs35
5 files changed, 81 insertions, 59 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 92df2da8710..b1dc1f98777 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -67,7 +67,7 @@ pub(crate) fn parse_token_trees<'a>(
     let (stream, res, unmatched_delims) =
         tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
     match res {
-        Ok(()) if unmatched_delims.is_empty() => Ok(stream),
+        Ok(_open_spacing) if unmatched_delims.is_empty() => Ok(stream),
         _ => {
             // Return error if there are unmatched delimiters or unclosed delimiters.
             // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index a80e8fac178..8cbadc26635 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -3,7 +3,7 @@ use super::diagnostics::same_indentation_level;
 use super::diagnostics::TokenTreeDiagInfo;
 use super::{StringReader, UnmatchedDelim};
 use rustc_ast::token::{self, Delimiter, Token};
-use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::token_to_string;
 use rustc_errors::{Applicability, PErr};
 use rustc_span::symbol::kw;
@@ -25,59 +25,46 @@ impl<'a> TokenTreesReader<'a> {
             token: Token::dummy(),
             diag_info: TokenTreeDiagInfo::default(),
         };
-        let (stream, res) = tt_reader.parse_token_trees(/* is_delimited */ false);
+        let (_open_spacing, stream, res) =
+            tt_reader.parse_token_trees(/* is_delimited */ false);
         (stream, res, tt_reader.diag_info.unmatched_delims)
     }
 
-    // Parse a stream of tokens into a list of `TokenTree`s.
+    // Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
+    // the result is that of the opening delimiter.
     fn parse_token_trees(
         &mut self,
         is_delimited: bool,
-    ) -> (TokenStream, Result<(), Vec<PErr<'a>>>) {
-        self.token = self.string_reader.next_token().0;
+    ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'a>>>) {
+        // Move past the opening delimiter.
+        let (_, open_spacing) = self.bump(false);
+
         let mut buf = Vec::new();
         loop {
             match self.token.kind {
                 token::OpenDelim(delim) => {
                     buf.push(match self.parse_token_tree_open_delim(delim) {
                         Ok(val) => val,
-                        Err(errs) => return (TokenStream::new(buf), Err(errs)),
+                        Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
                     })
                 }
                 token::CloseDelim(delim) => {
                     return (
+                        open_spacing,
                         TokenStream::new(buf),
                         if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
                     );
                 }
                 token::Eof => {
                     return (
+                        open_spacing,
                         TokenStream::new(buf),
                         if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
                     );
                 }
                 _ => {
-                    // Get the next normal token. This might require getting multiple adjacent
-                    // single-char tokens and joining them together.
-                    let (this_spacing, next_tok) = loop {
-                        let (next_tok, is_next_tok_preceded_by_whitespace) =
-                            self.string_reader.next_token();
-                        if is_next_tok_preceded_by_whitespace {
-                            break (Spacing::Alone, next_tok);
-                        } else if let Some(glued) = self.token.glue(&next_tok) {
-                            self.token = glued;
-                        } else {
-                            let this_spacing = if next_tok.is_punct() {
-                                Spacing::Joint
-                            } else if next_tok.kind == token::Eof {
-                                Spacing::Alone
-                            } else {
-                                Spacing::JointHidden
-                            };
-                            break (this_spacing, next_tok);
-                        }
-                    };
-                    let this_tok = std::mem::replace(&mut self.token, next_tok);
+                    // Get the next normal token.
+                    let (this_tok, this_spacing) = self.bump(true);
                     buf.push(TokenTree::Token(this_tok, this_spacing));
                 }
             }
@@ -121,7 +108,7 @@ impl<'a> TokenTreesReader<'a> {
         // Parse the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
         // uses an incorrect delimiter.
-        let (tts, res) = self.parse_token_trees(/* is_delimited */ true);
+        let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
         if let Err(errs) = res {
             return Err(self.unclosed_delim_err(tts, errs));
         }
@@ -130,7 +117,7 @@ impl<'a> TokenTreesReader<'a> {
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
         let sm = self.string_reader.sess.source_map();
 
-        match self.token.kind {
+        let close_spacing = match self.token.kind {
             // Correct delimiter.
             token::CloseDelim(close_delim) if close_delim == open_delim => {
                 let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
@@ -152,7 +139,7 @@ impl<'a> TokenTreesReader<'a> {
                 }
 
                 // Move past the closing delimiter.
-                self.token = self.string_reader.next_token().0;
+                self.bump(false).1
             }
             // Incorrect delimiter.
             token::CloseDelim(close_delim) => {
@@ -196,18 +183,50 @@ impl<'a> TokenTreesReader<'a> {
                 //     bar(baz(
                 // }  // Incorrect delimiter but matches the earlier `{`
                 if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
-                    self.token = self.string_reader.next_token().0;
+                    self.bump(false).1
+                } else {
+                    // The choice of value here doesn't matter.
+                    Spacing::Alone
                 }
             }
             token::Eof => {
                 // Silently recover, the EOF token will be seen again
                 // and an error emitted then. Thus we don't pop from
-                // self.open_braces here.
+                // self.open_braces here. The choice of spacing value here
+                // doesn't matter.
+                Spacing::Alone
             }
             _ => unreachable!(),
-        }
+        };
+
+        let spacing = DelimSpacing::new(open_spacing, close_spacing);
 
-        Ok(TokenTree::Delimited(delim_span, open_delim, tts))
+        Ok(TokenTree::Delimited(delim_span, spacing, open_delim, tts))
+    }
+
+    // Move on to the next token, returning the current token and its spacing.
+    // Will glue adjacent single-char tokens together if `glue` is set.
+    fn bump(&mut self, glue: bool) -> (Token, Spacing) {
+        let (this_spacing, next_tok) = loop {
+            let (next_tok, is_next_tok_preceded_by_whitespace) = self.string_reader.next_token();
+
+            if is_next_tok_preceded_by_whitespace {
+                break (Spacing::Alone, next_tok);
+            } else if glue && let Some(glued) = self.token.glue(&next_tok) {
+                self.token = glued;
+            } else {
+                let this_spacing = if next_tok.is_punct() {
+                    Spacing::Joint
+                } else if next_tok.kind == token::Eof {
+                    Spacing::Alone
+                } else {
+                    Spacing::JointHidden
+                };
+                break (this_spacing, next_tok);
+            }
+        };
+        let this_tok = std::mem::replace(&mut self.token, next_tok);
+        (this_tok, this_spacing)
     }
 
     fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index c66a7176aab..5e8447030f1 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,7 +1,7 @@
 use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream};
-use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
+use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
 use rustc_ast::{self as ast};
 use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
 use rustc_errors::PResult;
@@ -389,7 +389,7 @@ fn make_token_stream(
     #[derive(Debug)]
     struct FrameData {
         // This is `None` for the first frame, `Some` for all others.
-        open_delim_sp: Option<(Delimiter, Span)>,
+        open_delim_sp: Option<(Delimiter, Span, Spacing)>,
         inner: Vec<AttrTokenTree>,
     }
     let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
@@ -397,21 +397,23 @@ fn make_token_stream(
     while let Some((token, spacing)) = token_and_spacing {
         match token {
             FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
-                stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
+                stack
+                    .push(FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] });
             }
             FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
                 let frame_data = stack
                     .pop()
                     .unwrap_or_else(|| panic!("Token stack was empty for token: {token:?}"));
 
-                let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
+                let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
                 assert_eq!(
                     open_delim, delim,
                     "Mismatched open/close delims: open={open_delim:?} close={span:?}"
                 );
                 let dspan = DelimSpan::from_pair(open_sp, span);
+                let dspacing = DelimSpacing::new(open_spacing, spacing);
                 let stream = AttrTokenStream::new(frame_data.inner);
-                let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
+                let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
                 stack
                     .last_mut()
                     .unwrap_or_else(|| panic!("Bottom token frame is missing for token: {token:?}"))
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 406a6def019..3c0627526be 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -2276,7 +2276,7 @@ impl<'a> Parser<'a> {
         }
 
         if self.token.kind == TokenKind::Semi
-            && matches!(self.token_cursor.stack.last(), Some((_, Delimiter::Parenthesis, _)))
+            && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
             && self.may_recover()
         {
             // It is likely that the closure body is a block but where the
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 29709d92fad..2baedb2766f 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -20,7 +20,7 @@ pub use path::PathStyle;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
 use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
 use rustc_ast::util::case::Case;
 use rustc_ast::AttrId;
@@ -240,7 +240,7 @@ struct TokenCursor {
     // Token streams surrounding the current one. The delimiters for stack[n]'s
     // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
     // because it's the outermost token stream which never has delimiters.
-    stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
+    stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
 }
 
 impl TokenCursor {
@@ -264,24 +264,25 @@ impl TokenCursor {
                         ));
                         return (token.clone(), spacing);
                     }
-                    &TokenTree::Delimited(sp, delim, ref tts) => {
+                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
                         let trees = tts.clone().into_trees();
-                        self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
+                        self.stack.push((
+                            mem::replace(&mut self.tree_cursor, trees),
+                            sp,
+                            spacing,
+                            delim,
+                        ));
                         if delim != Delimiter::Invisible {
-                            // FIXME: add two `Spacing` fields to `TokenTree::Delimited`
-                            // and use the open delim one here.
-                            return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
+                            return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
                         }
                         // No open delimiter to return; continue on to the next iteration.
                     }
                 };
-            } else if let Some((tree_cursor, delim, span)) = self.stack.pop() {
+            } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
                 // We have exhausted this token stream. Move back to its parent token stream.
                 self.tree_cursor = tree_cursor;
                 if delim != Delimiter::Invisible {
-                    // FIXME: add two `Spacing` fields to `TokenTree::Delimited` and
-                    // use the close delim one here.
-                    return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
+                    return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
                 }
                 // No close delimiter to return; continue on to the next iteration.
             } else {
@@ -1074,7 +1075,7 @@ impl<'a> Parser<'a> {
             return looker(&self.token);
         }
 
-        if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
+        if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
             && delim != Delimiter::Invisible
         {
             // We are not in the outermost token stream, and the token stream
@@ -1083,7 +1084,7 @@ impl<'a> Parser<'a> {
             let tree_cursor = &self.token_cursor.tree_cursor;
             let all_normal = (0..dist).all(|i| {
                 let token = tree_cursor.look_ahead(i);
-                !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
+                !matches!(token, Some(TokenTree::Delimited(.., Delimiter::Invisible, _)))
             });
             if all_normal {
                 // There were no skipped delimiters. Do lookahead by plain indexing.
@@ -1092,7 +1093,7 @@ impl<'a> Parser<'a> {
                         // Indexing stayed within the current token stream.
                         match tree {
                             TokenTree::Token(token, _) => looker(token),
-                            TokenTree::Delimited(dspan, delim, _) => {
+                            TokenTree::Delimited(dspan, _, delim, _) => {
                                 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
                             }
                         }
@@ -1270,7 +1271,7 @@ impl<'a> Parser<'a> {
             || self.check(&token::OpenDelim(Delimiter::Brace));
 
         delimited.then(|| {
-            let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else {
+            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
                 unreachable!()
             };
             DelimArgs { dspan, delim, tokens }
@@ -1294,7 +1295,7 @@ impl<'a> Parser<'a> {
             token::OpenDelim(..) => {
                 // Grab the tokens within the delimiters.
                 let stream = self.token_cursor.tree_cursor.stream.clone();
-                let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
+                let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
 
                 // Advance the token cursor through the entire delimited
                 // sequence. After getting the `OpenDelim` we are *within* the
@@ -1314,7 +1315,7 @@ impl<'a> Parser<'a> {
 
                 // Consume close delimiter
                 self.bump();
-                TokenTree::Delimited(span, delim, stream)
+                TokenTree::Delimited(span, spacing, delim, stream)
             }
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {