diff options
| author | Konrad Borowski <konrad@borowski.pw> | 2018-12-23 16:47:11 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2018-12-23 16:47:11 +0100 |
| commit | 8ac5380ea0204dbdcbc8108d259928b67d5f8ebb (patch) | |
| tree | 174d912756fc2678af50d46ff457f7504750a975 /src/libsyntax/parse | |
| parent | b4a306c1e648c84f289c63e984941b7faad10af1 (diff) | |
| parent | ddab10a692aab2e2984b5c826ed9d78a57e94851 (diff) | |
| download | rust-8ac5380ea0204dbdcbc8108d259928b67d5f8ebb.tar.gz rust-8ac5380ea0204dbdcbc8108d259928b67d5f8ebb.zip | |
Merge branch 'master' into copied
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 28 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 40 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 25 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/unicode_chars.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 126 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 235 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 30 |
8 files changed, 279 insertions, 213 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index a240604bfe0..1bd0656846b 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -22,8 +22,8 @@ enum InnerAttributeParsePolicy<'a> { NotPermitted { reason: &'a str }, } -const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &'static str = "an inner attribute is not \ - permitted in this context"; +const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ + permitted in this context"; impl<'a> Parser<'a> { /// Parse attributes that appear before an item @@ -170,7 +170,7 @@ impl<'a> Parser<'a> { token::CloseDelim(_) | token::Eof => self.unexpected()?, _ => self.parse_token_tree(), }; - TokenStream::concat(vec![eq.into(), tree.into()]) + TokenStream::new(vec![eq.into(), tree.into()]) } else { TokenStream::empty() }; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 172a48ddba2..d3039326c89 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -16,7 +16,6 @@ use syntax_pos::{BytePos, CharPos, Pos, FileName}; use parse::lexer::{is_block_doc_comment, is_pattern_whitespace}; use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan}; use print::pprust; -use str::char_at; use std::io::Read; use std::usize; @@ -207,20 +206,14 @@ fn read_line_comments(rdr: &mut StringReader, /// Otherwise returns Some(k) where k is first char offset after that leading /// whitespace. Note k may be outside bounds of s. fn all_whitespace(s: &str, col: CharPos) -> Option<usize> { - let len = s.len(); - let mut col = col.to_usize(); - let mut cursor: usize = 0; - - while col > 0 && cursor < len { - let ch = char_at(s, cursor); + let mut idx = 0; + for (i, ch) in s.char_indices().take(col.to_usize()) { if !ch.is_whitespace() { return None; } - cursor += ch.len_utf8(); - col -= 1; + idx = i + ch.len_utf8(); } - - Some(cursor) + Some(idx) } fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String>, s: String, col: CharPos) { @@ -228,7 +221,7 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String>, s: String, col: let s1 = match all_whitespace(&s[..], col) { Some(col) => { if col < len { - (&s[col..len]).to_string() + s[col..len].to_string() } else { String::new() } @@ -247,20 +240,13 @@ fn read_block_comment(rdr: &mut StringReader, let mut lines: Vec<String> = Vec::new(); // Count the number of chars since the start of the line by rescanning. - let mut src_index = rdr.src_index(rdr.source_file.line_begin_pos(rdr.pos)); + let src_index = rdr.src_index(rdr.source_file.line_begin_pos(rdr.pos)); let end_src_index = rdr.src_index(rdr.pos); assert!(src_index <= end_src_index, "src_index={}, end_src_index={}, line_begin_pos={}", src_index, end_src_index, rdr.source_file.line_begin_pos(rdr.pos).to_u32()); - let mut n = 0; - - while src_index < end_src_index { - let c = char_at(&rdr.src, src_index); - src_index += c.len_utf8(); - n += 1; - } - let col = CharPos(n); + let col = CharPos(rdr.src[src_index..end_src_index].chars().count()); rdr.bump(); rdr.bump(); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index c90c62c13f9..4be54e5bacc 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -13,12 +13,12 @@ use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; use source_map::{SourceMap, FilePathMapping}; use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder}; use parse::{token, ParseSess}; -use str::char_at; use symbol::{Symbol, keywords}; use core::unicode::property::Pattern_White_Space; use std::borrow::Cow; use std::char; +use std::iter; use std::mem::replace; use rustc_data_structures::sync::Lrc; @@ -459,45 +459,42 @@ impl<'a> StringReader<'a> { /// Converts CRLF to LF in the given string, raising an error on bare CR. fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> Cow<'b, str> { - let mut i = 0; - while i < s.len() { - let ch = char_at(s, i); - let next = i + ch.len_utf8(); + let mut chars = s.char_indices().peekable(); + while let Some((i, ch)) = chars.next() { if ch == '\r' { - if next < s.len() && char_at(s, next) == '\n' { - return translate_crlf_(self, start, s, errmsg, i).into(); + if let Some((lf_idx, '\n')) = chars.peek() { + return translate_crlf_(self, start, s, *lf_idx, chars, errmsg).into(); } let pos = start + BytePos(i as u32); - let end_pos = start + BytePos(next as u32); + let end_pos = start + BytePos((i + ch.len_utf8()) as u32); self.err_span_(pos, end_pos, errmsg); } - i = next; } return s.into(); fn translate_crlf_(rdr: &StringReader, start: BytePos, s: &str, - errmsg: &str, - mut i: usize) + mut j: usize, + mut chars: iter::Peekable<impl Iterator<Item = (usize, char)>>, + errmsg: &str) -> String { let mut buf = String::with_capacity(s.len()); - let mut j = 0; - while i < s.len() { - let ch = char_at(s, i); - let next = i + ch.len_utf8(); + // Skip first CR + buf.push_str(&s[.. j - 1]); + while let Some((i, ch)) = chars.next() { if ch == '\r' { if j < i { buf.push_str(&s[j..i]); } + let next = i + ch.len_utf8(); j = next; - if next >= s.len() || char_at(s, next) != '\n' { + if chars.peek().map(|(_, ch)| *ch) != Some('\n') { let pos = start + BytePos(i as u32); let end_pos = start + BytePos(next as u32); rdr.err_span_(pos, end_pos, errmsg); } } - i = next; } if j < s.len() { buf.push_str(&s[j..]); @@ -1130,7 +1127,7 @@ impl<'a> StringReader<'a> { "expected at least one digit in exponent" ); if let Some(ch) = self.ch { - // check for e.g. Unicode minus '−' (Issue #49746) + // check for e.g., Unicode minus '−' (Issue #49746) if unicode_chars::check_for_substitution(self, ch, &mut err) { self.bump(); self.scan_digits(10, 10); @@ -1858,6 +1855,11 @@ fn ident_continue(c: Option<char>) -> bool { (c > '\x7f' && c.is_xid_continue()) } +#[inline] +fn char_at(s: &str, byte: usize) -> char { + s[byte..].chars().next().unwrap() +} + #[cfg(test)] mod tests { use super::*; @@ -1898,7 +1900,7 @@ mod tests { sess: &'a ParseSess, teststr: String) -> StringReader<'a> { - let sf = sm.new_source_file(PathBuf::from("zebra.rs").into(), teststr); + let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr); StringReader::new(sess, sf, None) } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 8047ab01465..6f9dc247a78 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -11,7 +11,7 @@ use print::pprust::token_to_string; use parse::lexer::StringReader; use parse::{token, PResult}; -use tokenstream::{Delimited, DelimSpan, TokenStream, TokenTree}; +use tokenstream::{DelimSpan, TokenStream, TokenTree}; impl<'a> StringReader<'a> { // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. @@ -22,7 +22,7 @@ impl<'a> StringReader<'a> { tts.push(self.parse_token_tree()?); } - Ok(TokenStream::concat(tts)) + Ok(TokenStream::new(tts)) } // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`. @@ -30,14 +30,14 @@ impl<'a> StringReader<'a> { let mut tts = vec![]; loop { if let token::CloseDelim(..) = self.token { - return TokenStream::concat(tts); + return TokenStream::new(tts); } match self.parse_token_tree() { Ok(tree) => tts.push(tree), Err(mut e) => { e.emit(); - return TokenStream::concat(tts); + return TokenStream::new(tts); } } } @@ -97,7 +97,15 @@ impl<'a> StringReader<'a> { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); - self.matching_delim_spans.push((open_brace, open_brace_span, self.span)); + if self.open_braces.len() == 0 { + // Clear up these spans to avoid suggesting them as we've found + // properly matched delimiters so far for an entire block. + self.matching_delim_spans.clear(); + } else { + self.matching_delim_spans.push( + (open_brace, open_brace_span, self.span), + ); + } // Parse the close delimiter. self.real_token(); } @@ -155,10 +163,11 @@ impl<'a> StringReader<'a> { _ => {} } - Ok(TokenTree::Delimited(delim_span, Delimited { + Ok(TokenTree::Delimited( + delim_span, delim, - tts: tts.into(), - }).into()) + tts.into(), + ).into()) }, token::CloseDelim(_) => { // An unexpected closing delimiter (i.e., there is no diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index 03bf1b5a4e1..8a620c8067d 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -306,7 +306,7 @@ const UNICODE_ARRAY: &[(char, &str, char)] = &[ ('>', "Fullwidth Greater-Than Sign", '>'), ]; -const ASCII_ARRAY: &'static [(char, &'static str)] = &[ +const ASCII_ARRAY: &[(char, &str)] = &[ (' ', "Space"), ('_', "Underscore"), ('-', "Minus/Hyphen"), diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index ac972f20f94..200b1cecc03 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -15,11 +15,10 @@ use ast::{self, CrateConfig, NodeId}; use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use source_map::{SourceMap, FilePathMapping}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; -use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder}; +use errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; use ptr::P; -use str::char_at; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree}; use diagnostics::plugin::ErrorMap; @@ -82,6 +81,7 @@ impl ParseSess { } } + #[inline] pub fn source_map(&self) -> &SourceMap { &self.source_map } @@ -193,6 +193,14 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> source_file_to_parser(sess, file_to_source_file(sess, path, None)) } +/// Create a new parser, returning buffered diagnostics if the file doesn't +/// exist or from lexing the initial token stream. +pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) + -> Result<Parser<'a>, Vec<Diagnostic>> { + let file = try_file_to_source_file(sess, path, None).map_err(|db| vec![db])?; + maybe_source_file_to_parser(sess, file) +} + /// Given a session, a crate config, a path, and a span, add /// the file at the given path to the source_map, and return a parser. /// On an error, use the given span as the source of the problem. @@ -238,17 +246,30 @@ pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser { // base abstractions /// Given a session and a path and an optional span (for error reporting), +/// add the path to the session's source_map and return the new source_file or +/// error when a file can't be read. +fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) + -> Result<Lrc<SourceFile>, Diagnostic> { + sess.source_map().load_file(path) + .map_err(|e| { + let msg = format!("couldn't read {}: {}", path.display(), e); + let mut diag = Diagnostic::new(Level::Fatal, &msg); + if let Some(sp) = spanopt { + diag.set_span(sp); + } + diag + }) +} + +/// Given a session and a path and an optional span (for error reporting), /// add the path to the session's source_map and return the new source_file. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> { - match sess.source_map().load_file(path) { + match try_file_to_source_file(sess, path, spanopt) { Ok(source_file) => source_file, - Err(e) => { - let msg = format!("couldn't read {}: {}", path.display(), e); - match spanopt { - Some(sp) => sess.span_diagnostic.span_fatal(sp, &msg).raise(), - None => sess.span_diagnostic.fatal(&msg).raise() - } + Err(d) => { + DiagnosticBuilder::new_diagnostic(&sess.span_diagnostic, d).emit(); + FatalError.raise(); } } } @@ -436,9 +457,7 @@ fn raw_str_lit(lit: &str) -> String { // check if `s` looks like i32 or u1234 etc. fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { - s.len() > 1 && - first_chars.contains(&char_at(s, 0)) && - s[1..].chars().all(|c| '0' <= c && c <= '9') + s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit()) } macro_rules! err { @@ -645,11 +664,11 @@ fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) let orig = s; let mut ty = ast::LitIntType::Unsuffixed; - if char_at(s, 0) == '0' && s.len() > 1 { - match char_at(s, 1) { - 'x' => base = 16, - 'o' => base = 8, - 'b' => base = 2, + if s.starts_with('0') && s.len() > 1 { + match s.as_bytes()[1] { + b'x' => base = 16, + b'o' => base = 8, + b'b' => base = 2, _ => { } } } @@ -767,7 +786,7 @@ mod tests { use attr::first_attr_value_str_by_name; use parse; use print::pprust::item_to_string; - use tokenstream::{self, DelimSpan, TokenTree}; + use tokenstream::{DelimSpan, TokenTree}; use util::parser_testing::string_to_stream; use util::parser_testing::{string_to_expr, string_to_item}; use with_globals; @@ -798,42 +817,41 @@ mod tests { Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))), Some(&TokenTree::Token(_, token::Not)), Some(&TokenTree::Token(_, token::Ident(name_zip, false))), - Some(&TokenTree::Delimited(_, ref macro_delimed)), + Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), ) if name_macro_rules.name == "macro_rules" && name_zip.name == "zip" => { - let tts = ¯o_delimed.stream().trees().collect::<Vec<_>>(); + let tts = ¯o_tts.stream().trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, - Some(&TokenTree::Delimited(_, ref first_delimed)), + Some(&TokenTree::Delimited(_, first_delim, ref first_tts)), Some(&TokenTree::Token(_, token::FatArrow)), - Some(&TokenTree::Delimited(_, ref second_delimed)), + Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ) - if macro_delimed.delim == token::Paren => { - let tts = &first_delimed.stream().trees().collect::<Vec<_>>(); + if macro_delim == token::Paren => { + let tts = &first_tts.stream().trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Ident(ident, false))), ) - if first_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 3: {:?}", *first_delimed), + if first_delim == token::Paren && ident.name == "a" => {}, + _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } - let tts = &second_delimed.stream().trees().collect::<Vec<_>>(); + let tts = &second_tts.stream().trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Ident(ident, false))), ) - if second_delimed.delim == token::Paren - && ident.name == "a" => {}, - _ => panic!("value 4: {:?}", *second_delimed), + if second_delim == token::Paren && ident.name == "a" => {}, + _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), } }, - _ => panic!("value 2: {:?}", *macro_delimed), + _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts), } }, _ => panic!("value: {:?}",tts), @@ -846,31 +864,29 @@ mod tests { with_globals(|| { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - let expected = TokenStream::concat(vec![ + let expected = TokenStream::new(vec![ TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(5, 6), sp(13, 14)), - tokenstream::Delimited { - delim: token::DelimToken::Paren, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(6, 7), - token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(8, 9), token::Colon).into(), - TokenTree::Token(sp(10, 13), - token::Ident(Ident::from_str("i32"), false)).into(), - ]).into(), - }).into(), + token::DelimToken::Paren, + TokenStream::new(vec![ + TokenTree::Token(sp(6, 7), + token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::Token(sp(8, 9), token::Colon).into(), + TokenTree::Token(sp(10, 13), + token::Ident(Ident::from_str("i32"), false)).into(), + ]).into(), + ).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(15, 16), sp(20, 21)), - tokenstream::Delimited { - delim: token::DelimToken::Brace, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(17, 18), - token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(18, 19), token::Semi).into(), - ]).into(), - }).into() + token::DelimToken::Brace, + TokenStream::new(vec![ + TokenTree::Token(sp(17, 18), + token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::Token(sp(18, 19), token::Semi).into(), + ]).into(), + ).into() ]); assert_eq!(tts, expected); @@ -977,23 +993,25 @@ mod tests { with_globals(|| { let sess = ParseSess::new(FilePathMapping::empty()); - let name = FileName::Custom("source".to_string()); + let name_1 = FileName::Custom("crlf_source_1".to_string()); let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) + let item = parse_item_from_source_str(name_1, source, &sess) .unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(doc, "/// doc comment"); + let name_2 = FileName::Custom("crlf_source_2".to_string()); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) + let item = parse_item_from_source_str(name_2, source, &sess) .unwrap().unwrap(); let docs = item.attrs.iter().filter(|a| a.path == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[..], b); + let name_3 = FileName::Custom("clrf_source_3".to_string()); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); + let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(doc, "/** doc comment\n * with CRLF */"); }); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 1cd5006f330..5a51b629826 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,13 +48,14 @@ use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use parse::{self, SeqSep, classify, token}; use parse::lexer::TokenAndSpan; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; +use parse::token::DelimToken; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; use ThinVec; -use tokenstream::{self, Delimited, DelimSpan, ThinTokenStream, TokenTree, TokenStream}; +use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream}; use symbol::{Symbol, keywords}; use std::borrow::Cow; @@ -92,12 +93,12 @@ pub enum PathStyle { /// `x<y>` - comparisons, `x::<y>` - unambiguously a path. Expr, /// In other contexts, notably in types, no ambiguity exists and paths can be written - /// without the disambiguator, e.g. `x<y>` - unambiguously a path. + /// without the disambiguator, e.g., `x<y>` - unambiguously a path. /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too. Type, - /// A path with generic arguments disallowed, e.g. `foo::bar::Baz`, used in imports, + /// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports, /// visibilities or attributes. - /// Technically, this variant is unnecessary and e.g. `Expr` can be used instead + /// Technically, this variant is unnecessary and e.g., `Expr` can be used instead /// (paths in "mod" contexts have to be checked later for absence of generic arguments /// anyway, due to macros), but it is used to avoid weird suggestions about expected /// tokens when something goes wrong. @@ -293,13 +294,13 @@ enum LastToken { } impl TokenCursorFrame { - fn new(sp: DelimSpan, delimited: &Delimited) -> Self { + fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self { TokenCursorFrame { - delim: delimited.delim, + delim: delim, span: sp, - open_delim: delimited.delim == token::NoDelim, - tree_cursor: delimited.stream().into_trees(), - close_delim: delimited.delim == token::NoDelim, + open_delim: delim == token::NoDelim, + tree_cursor: tts.stream().into_trees(), + close_delim: delim == token::NoDelim, last_token: LastToken::Was(None), } } @@ -310,14 +311,12 @@ impl TokenCursor { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; - Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } - .open_tt(self.frame.span.open) + TokenTree::open_tt(self.frame.span.open, self.frame.delim) } else if let Some(tree) = self.frame.tree_cursor.next() { tree } else if !self.frame.close_delim { self.frame.close_delim = true; - Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } - .close_tt(self.frame.span.close) + TokenTree::close_tt(self.frame.span.close, self.frame.delim) } else if let Some(frame) = self.stack.pop() { self.frame = frame; continue @@ -332,8 +331,8 @@ impl TokenCursor { match tree { TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, - TokenTree::Delimited(sp, ref delimited) => { - let frame = TokenCursorFrame::new(sp, delimited); + TokenTree::Delimited(sp, delim, tts) => { + let frame = TokenCursorFrame::new(sp, delim, &tts); self.stack.push(mem::replace(&mut self.frame, frame)); } } @@ -362,25 +361,28 @@ impl TokenCursor { } let delim_span = DelimSpan::from_single(sp); - let body = TokenTree::Delimited(delim_span, Delimited { - delim: token::Bracket, - tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)), - TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::Literal( - token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))] - .iter().cloned().collect::<TokenStream>().into(), - }); + let body = TokenTree::Delimited( + delim_span, + token::Bracket, + [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)), + TokenTree::Token(sp, token::Eq), + TokenTree::Token(sp, token::Literal( + token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None)) + ] + .iter().cloned().collect::<TokenStream>().into(), + ); - self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(delim_span, &Delimited { - delim: token::NoDelim, - tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner { + self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new( + delim_span, + token::NoDelim, + &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] .iter().cloned().collect::<TokenStream>().into() } else { [TokenTree::Token(sp, token::Pound), body] .iter().cloned().collect::<TokenStream>().into() }, - }))); + ))); self.next() } @@ -561,10 +563,11 @@ impl<'a> Parser<'a> { root_module_name: None, expected_tokens: Vec::new(), token_cursor: TokenCursor { - frame: TokenCursorFrame::new(DelimSpan::dummy(), &Delimited { - delim: token::NoDelim, - tts: tokens.into(), - }), + frame: TokenCursorFrame::new( + DelimSpan::dummy(), + token::NoDelim, + &tokens.into(), + ), stack: Vec::new(), }, desugar_doc_comments, @@ -1238,7 +1241,7 @@ impl<'a> Parser<'a> { f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { TokenTree::Token(_, tok) => tok, - TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), + TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), }, None => token::CloseDelim(self.token_cursor.frame.delim), }) @@ -1251,7 +1254,7 @@ impl<'a> Parser<'a> { match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(TokenTree::Token(span, _)) => span, - Some(TokenTree::Delimited(span, _)) => span.entire(), + Some(TokenTree::Delimited(span, ..)) => span.entire(), None => self.look_ahead_span(dist - 1), } } @@ -1404,7 +1407,7 @@ impl<'a> Parser<'a> { // definition... // We don't allow argument names to be left off in edition 2018. - p.parse_arg_general(p.span.rust_2018()) + p.parse_arg_general(p.span.rust_2018(), true) })?; generics.where_clause = self.parse_where_clause()?; @@ -1817,7 +1820,7 @@ impl<'a> Parser<'a> { /// This version of parse arg doesn't necessarily require /// identifier names. - fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> { + fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> { maybe_whole!(self, NtArg, |x| x); if let Ok(Some(_)) = self.parse_self_arg() { @@ -1849,6 +1852,17 @@ impl<'a> Parser<'a> { String::from("<identifier>: <type>"), Applicability::HasPlaceholders, ); + } else if require_name && is_trait_item { + if let PatKind::Ident(_, ident, _) = pat.node { + err.span_suggestion_with_applicability( + pat.span, + "explicitly ignore parameter", + format!("_: {}", ident), + Applicability::MachineApplicable, + ); + } + + err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); } return Err(err); @@ -1914,10 +1928,10 @@ impl<'a> Parser<'a> { /// Parse a single function argument crate fn parse_arg(&mut self) -> PResult<'a, Arg> { - self.parse_arg_general(true) + self.parse_arg_general(true, false) } - /// Parse an argument in a lambda header e.g. |arg, arg| + /// Parse an argument in a lambda header e.g., |arg, arg| fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { let pat = self.parse_pat(Some("argument name"))?; let t = if self.eat(&token::Colon) { @@ -2017,6 +2031,17 @@ impl<'a> Parser<'a> { } } + fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { + match self.token { + token::Ident(ident, false) if ident.name == keywords::Underscore.name() => { + let span = self.span; + self.bump(); + Ok(Ident::new(ident.name, span)) + } + _ => self.parse_ident(), + } + } + /// Parses qualified path. /// Assumes that the leading `<` has been parsed already. /// @@ -2082,7 +2107,7 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); let mod_sep_ctxt = self.span.ctxt(); if self.eat(&token::ModSep) { - segments.push(PathSegment::crate_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); + segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); } self.parse_path_segments(&mut segments, style, enable_warning)?; @@ -2306,8 +2331,8 @@ impl<'a> Parser<'a> { return Err(err) } }; - let delimited = match self.parse_token_tree() { - TokenTree::Delimited(_, delimited) => delimited, + let tts = match self.parse_token_tree() { + TokenTree::Delimited(_, _, tts) => tts, _ => unreachable!(), }; let delim = match delim { @@ -2316,14 +2341,14 @@ impl<'a> Parser<'a> { token::Brace => MacDelimiter::Brace, token::NoDelim => self.bug("unexpected no delimiter"), }; - Ok((delim, delimited.stream().into())) + Ok((delim, tts.stream().into())) } /// At the bottom (top?) of the precedence hierarchy, /// parse things like parenthesized exprs, /// macros, return, etc. /// - /// NB: This does not parse outer attributes, + /// N.B., this does not parse outer attributes, /// and is private because it only works /// correctly if called from parse_dot_or_call_expr(). fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> { @@ -2881,10 +2906,11 @@ impl<'a> Parser<'a> { self.token_cursor.stack.pop().unwrap()); self.span = frame.span.entire(); self.bump(); - TokenTree::Delimited(frame.span, Delimited { - delim: frame.delim, - tts: frame.tree_cursor.original_stream().into(), - }) + TokenTree::Delimited( + frame.span, + frame.delim, + frame.tree_cursor.original_stream().into(), + ) }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { @@ -2913,7 +2939,7 @@ impl<'a> Parser<'a> { _ => result.push(self.parse_token_tree().into()), } } - TokenStream::concat(result) + TokenStream::new(result) } /// Parse a prefix-unary-operator expr @@ -3036,6 +3062,7 @@ impl<'a> Parser<'a> { /// /// This parses an expression accounting for associativity and precedence of the operators in /// the expression. + #[inline] fn parse_assoc_expr(&mut self, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { @@ -3696,6 +3723,7 @@ impl<'a> Parser<'a> { } /// Parse an expression + #[inline] pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> { self.parse_expr_res(Restrictions::empty(), None) } @@ -3715,13 +3743,14 @@ impl<'a> Parser<'a> { } /// Parse an expression, subject to the given restrictions + #[inline] fn parse_expr_res(&mut self, r: Restrictions, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) } - /// Parse the RHS of a local variable declaration (e.g. '= 14;') + /// Parse the RHS of a local variable declaration (e.g., '= 14;') fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> { if self.eat(&token::Eq) { Ok(Some(self.parse_expr()?)) @@ -4103,7 +4132,7 @@ impl<'a> Parser<'a> { self.parse_pat_with_range_pat(true, expected) } - /// Parse a pattern, with a setting whether modern range patterns e.g. `a..=b`, `a..b` are + /// Parse a pattern, with a setting whether modern range patterns e.g., `a..=b`, `a..b` are /// allowed. fn parse_pat_with_range_pat( &mut self, @@ -4445,7 +4474,7 @@ impl<'a> Parser<'a> { } /// Parse a statement. This stops just before trailing semicolons on everything but items. - /// e.g. a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed. + /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed. pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> { Ok(self.parse_stmt_(true)) } @@ -4598,7 +4627,7 @@ impl<'a> Parser<'a> { let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { match self.parse_token_tree() { - TokenTree::Delimited(_, ref delimited) => delimited.stream(), + TokenTree::Delimited(_, _, tts) => tts.stream(), _ => unreachable!(), } } else if self.check(&token::OpenDelim(token::Paren)) { @@ -4609,7 +4638,7 @@ impl<'a> Parser<'a> { self.unexpected()?; unreachable!() }; - TokenStream::concat(vec![ + TokenStream::new(vec![ args.into(), TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(), body.into(), @@ -5042,9 +5071,9 @@ impl<'a> Parser<'a> { // Parse bounds of a type parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. // BOUND = TY_BOUND | LT_BOUND - // LT_BOUND = LIFETIME (e.g. `'a`) + // LT_BOUND = LIFETIME (e.g., `'a`) // TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN) - // TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g. `?for<'a: 'b> m::Trait<'a>`) + // TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`) fn parse_generic_bounds_common(&mut self, allow_plus: bool) -> PResult<'a, GenericBounds> { let mut bounds = Vec::new(); loop { @@ -5099,7 +5128,7 @@ impl<'a> Parser<'a> { } // Parse bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. - // BOUND = LT_BOUND (e.g. `'a`) + // BOUND = LT_BOUND (e.g., `'a`) fn parse_lt_param_bounds(&mut self) -> GenericBounds { let mut lifetimes = Vec::new(); while self.check_lifetime() { @@ -5458,7 +5487,7 @@ impl<'a> Parser<'a> { } } } else { - match p.parse_arg_general(named_args) { + match p.parse_arg_general(named_args, false) { Ok(arg) => Ok(Some(arg)), Err(mut e) => { e.emit(); @@ -5508,7 +5537,7 @@ impl<'a> Parser<'a> { _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { - this.look_ahead(n, |t| t.is_keyword(keywords::SelfValue)) && + this.look_ahead(n, |t| t.is_keyword(keywords::SelfLower)) && this.look_ahead(n + 1, |t| t != &token::ModSep) }; @@ -5800,20 +5829,14 @@ impl<'a> Parser<'a> { } fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) { - if let Err(mut err) = self.complain_if_pub_macro_diag(vis, sp) { - err.emit(); - } - } - - fn complain_if_pub_macro_diag(&mut self, vis: &VisibilityKind, sp: Span) -> PResult<'a, ()> { match *vis { - VisibilityKind::Inherited => Ok(()), + VisibilityKind::Inherited => {} _ => { let is_macro_rules: bool = match self.token { token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"), _ => false, }; - if is_macro_rules { + let mut err = if is_macro_rules { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); err.span_suggestion_with_applicability( @@ -5822,13 +5845,14 @@ impl<'a> Parser<'a> { "#[macro_export]".to_owned(), Applicability::MaybeIncorrect // speculative ); - Err(err) + err } else { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro invocation with `pub`"); err.help("try adjusting the macro to put `pub` inside the invocation"); - Err(err) - } + err + }; + err.emit(); } } } @@ -6137,9 +6161,6 @@ impl<'a> Parser<'a> { fn consume_block(&mut self, delim: token::DelimToken) { let mut brace_depth = 0; - if !self.eat(&token::OpenDelim(delim)) { - return; - } loop { if self.eat(&token::OpenDelim(delim)) { brace_depth += 1; @@ -6150,7 +6171,7 @@ impl<'a> Parser<'a> { brace_depth -= 1; continue; } - } else if self.eat(&token::Eof) || self.eat(&token::CloseDelim(token::NoDelim)) { + } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) { return; } else { self.bump(); @@ -6282,7 +6303,7 @@ impl<'a> Parser<'a> { /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`, /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. - /// If the following element can't be a tuple (i.e. it's a function definition, + /// If the following element can't be a tuple (i.e., it's a function definition, /// it's not a tuple struct field) and the contents within the parens /// isn't valid, emit a proper diagnostic. pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> { @@ -6330,7 +6351,7 @@ impl<'a> Parser<'a> { return Ok(vis) } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) && self.look_ahead(1, |t| t.is_keyword(keywords::Super) || - t.is_keyword(keywords::SelfValue)) + t.is_keyword(keywords::SelfLower)) { // `pub(self)` or `pub(super)` self.bump(); // `(` @@ -6434,13 +6455,7 @@ impl<'a> Parser<'a> { } fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> { - let id = match self.token { - token::Ident(ident, false) if ident.name == keywords::Underscore.name() => { - self.bump(); // `_` - ident.gensym() - }, - _ => self.parse_ident()?, - }; + let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; self.expect(&token::Eq)?; @@ -6782,7 +6797,7 @@ impl<'a> Parser<'a> { let error_msg = "crate name using dashes are not valid in `extern crate` statements"; let suggestion_msg = "if the original crate name uses dashes you need to use underscores \ in the code"; - let mut ident = if self.token.is_keyword(keywords::SelfValue) { + let mut ident = if self.token.is_keyword(keywords::SelfLower) { self.parse_path_segment_ident() } else { self.parse_ident() @@ -7405,17 +7420,27 @@ impl<'a> Parser<'a> { return Err(err); } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) { let ident = self.parse_ident().unwrap(); + self.bump(); // `(` + let kw_name = if let Ok(Some(_)) = self.parse_self_arg() { + "method" + } else { + "function" + }; self.consume_block(token::Paren); - let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) || - self.check(&token::OpenDelim(token::Brace)) - { - ("fn", "method", false) + let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) { + self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]); + self.bump(); // `{` + ("fn", kw_name, false) + } else if self.check(&token::OpenDelim(token::Brace)) { + self.bump(); // `{` + ("fn", kw_name, false) } else if self.check(&token::Colon) { let kw = "struct"; (kw, kw, false) } else { - ("fn` or `struct", "method or struct", true) + ("fn` or `struct", "function or struct", true) }; + self.consume_block(token::Brace); let msg = format!("missing `{}` for {} definition", kw, kw_name); let mut err = self.diagnostic().struct_span_err(sp, &msg); @@ -7442,6 +7467,32 @@ impl<'a> Parser<'a> { } } return Err(err); + } else if self.look_ahead(1, |t| *t == token::Lt) { + let ident = self.parse_ident().unwrap(); + self.eat_to_tokens(&[&token::Gt]); + self.bump(); // `>` + let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) { + if let Ok(Some(_)) = self.parse_self_arg() { + ("fn", "method", false) + } else { + ("fn", "function", false) + } + } else if self.check(&token::OpenDelim(token::Brace)) { + ("struct", "struct", false) + } else { + ("fn` or `struct", "function or struct", true) + }; + let msg = format!("missing `{}` for {} definition", kw, kw_name); + let mut err = self.diagnostic().struct_span_err(sp, &msg); + if !ambiguous { + err.span_suggestion_short_with_applicability( + sp, + &format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name), + format!(" {} ", kw), + Applicability::MachineApplicable, + ); + } + return Err(err); } } self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility) @@ -7685,7 +7736,7 @@ impl<'a> Parser<'a> { let mod_sep_ctxt = self.span.ctxt(); if self.eat(&token::ModSep) { prefix.segments.push( - PathSegment::crate_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)) + PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)) ); } @@ -7725,13 +7776,7 @@ impl<'a> Parser<'a> { fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> { if self.eat_keyword(keywords::As) { - match self.token { - token::Ident(ident, false) if ident.name == keywords::Underscore.name() => { - self.bump(); // `_` - Ok(Some(ident.gensym())) - } - _ => self.parse_ident().map(Some), - } + self.parse_ident_or_underscore().map(Some) } else { Ok(None) } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 4a5f3e240da..badcc4ed876 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -207,6 +207,10 @@ pub enum Token { Eof, } +// `Token` is used a lot. Make sure it doesn't unintentionally get bigger. +#[cfg(target_arch = "x86_64")] +static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::<Token>() == 16); + impl Token { pub fn interpolated(nt: Nonterminal) -> Token { Token::Interpolated(Lrc::new((nt, LazyTokenStream::new()))) @@ -545,11 +549,12 @@ impl Token { let tokens_for_real = nt.1.force(|| { // FIXME(#43081): Avoid this pretty-print + reparse hack let source = pprust::token_to_string(self); - parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span)) + let filename = FileName::macro_expansion_source_code(&source); + parse_stream_from_source_str(filename, source, sess, Some(span)) }); // During early phases of the compiler the AST could get modified - // directly (e.g. attributes added or removed) and the internal cache + // directly (e.g., attributes added or removed) and the internal cache // of tokens my not be invalidated or updated. Consequently if the // "lossless" token stream disagrees with our actual stringification // (which has historically been much more battle-tested) then we go @@ -628,7 +633,9 @@ impl Token { (&Shebang(a), &Shebang(b)) => a == b, (&Lifetime(a), &Lifetime(b)) => a.name == b.name, - (&Ident(a, b), &Ident(c, d)) => a.name == c.name && b == d, + (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name || + a.name == keywords::DollarCrate.name() || + c.name == keywords::DollarCrate.name()), (&Literal(ref a, b), &Literal(ref c, d)) => { b == d && a.probably_equal_for_proc_macro(c) @@ -781,10 +788,12 @@ fn prepend_attrs(sess: &ParseSess, assert_eq!(attr.style, ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing"); + let source = pprust::attr_to_string(attr); + let macro_filename = FileName::macro_expansion_source_code(&source); if attr.is_sugared_doc { let stream = parse_stream_from_source_str( - FileName::MacroExpansion, - pprust::attr_to_string(attr), + macro_filename, + source, sess, Some(span), ); @@ -805,8 +814,8 @@ fn prepend_attrs(sess: &ParseSess, // should eventually be removed. } else { let stream = parse_stream_from_source_str( - FileName::MacroExpansion, - pprust::path_to_string(&attr.path), + macro_filename, + source, sess, Some(span), ); @@ -815,16 +824,13 @@ fn prepend_attrs(sess: &ParseSess, brackets.push(attr.tokens.clone()); - let tokens = tokenstream::Delimited { - delim: DelimToken::Bracket, - tts: brackets.build().into(), - }; // The span we list here for `#` and for `[ ... ]` are both wrong in // that it encompasses more than each token, but it hopefully is "good // enough" for now at least. builder.push(tokenstream::TokenTree::Token(attr.span, Pound)); let delim_span = DelimSpan::from_single(attr.span); - builder.push(tokenstream::TokenTree::Delimited(delim_span, tokens)); + builder.push(tokenstream::TokenTree::Delimited( + delim_span, DelimToken::Bracket, brackets.build().into())); } builder.push(tokens.clone()); Some(builder.build()) |
