diff options
| author | gaurikholkar <f2013002@goa.bits-pilani.ac.in> | 2018-04-05 21:52:40 +0530 |
|---|---|---|
| committer | gaurikholkar <f2013002@goa.bits-pilani.ac.in> | 2018-04-05 21:52:40 +0530 |
| commit | 1b06fe1ef53775d8ff747528d429dc92054c20b2 (patch) | |
| tree | fe6cb06792c90c44f0a4f5b3c8ec3d0ab914dfcb /src/libsyntax/parse | |
| parent | 6c649fbed4d4d86aed16dff8c0245b4871353cd1 (diff) | |
| parent | 56714acc5eb0687ed9a7566fdebe5528657fc5b3 (diff) | |
| download | rust-1b06fe1ef53775d8ff747528d429dc92054c20b2.tar.gz rust-1b06fe1ef53775d8ff747528d429dc92054c20b2.zip | |
Merge branch 'master' of https://github.com/rust-lang/rust into e0389
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 11 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 341 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 618 | ||||
| -rw-r--r-- | src/libsyntax/parse/obsolete.rs | 67 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 253 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 100 |
6 files changed, 711 insertions, 679 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 053746b579d..4c3f42d9c6b 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -90,7 +90,7 @@ impl<'a> Parser<'a> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); - let (span, path, tokens, mut style) = match self.token { + let (span, path, tokens, style) = match self.token { token::Pound => { let lo = self.span; self.bump(); @@ -129,15 +129,6 @@ impl<'a> Parser<'a> { } }; - if inner_parse_policy == InnerAttributeParsePolicy::Permitted && - self.token == token::Semi { - self.bump(); - self.span_warn(span, - "this inner attribute syntax is deprecated. The new syntax is \ - `#![foo]`, with a bang and no semicolon"); - style = ast::AttrStyle::Inner; - } - Ok(ast::Attribute { id: attr::mk_attr_id(), style, diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index cdf38453d7e..068929c8948 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -14,7 +14,7 @@ use codemap::{CodeMap, FilePathMapping}; use errors::{FatalError, DiagnosticBuilder}; use parse::{token, ParseSess}; use str::char_at; -use symbol::Symbol; +use symbol::{Symbol, keywords}; use std_unicode::property::Pattern_White_Space; use std::borrow::Cow; @@ -34,7 +34,7 @@ pub struct TokenAndSpan { impl Default for TokenAndSpan { fn default() -> Self { - TokenAndSpan { tok: token::Underscore, sp: syntax_pos::DUMMY_SP } + TokenAndSpan { tok: token::Whitespace, sp: syntax_pos::DUMMY_SP } } } @@ -126,7 +126,7 @@ impl<'a> StringReader<'a> { pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> { assert!(self.fatal_errs.is_empty()); let ret_val = TokenAndSpan { - tok: replace(&mut self.peek_tok, token::Underscore), + tok: replace(&mut self.peek_tok, token::Whitespace), sp: self.peek_span, }; self.advance_token()?; @@ -214,7 +214,7 @@ impl<'a> StringReader<'a> { // Make the range zero-length if the span is invalid. if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos { - span = span.with_hi(span.lo()); + span = span.shrink_to_lo(); } let mut sr = StringReader::new_raw_internal(sess, begin.fm); @@ -611,7 +611,7 @@ impl<'a> StringReader<'a> { // I guess this is the only way to figure out if // we're at the beginning of the file... let cmap = CodeMap::new(FilePathMapping::empty()); - cmap.files.borrow_mut().push(self.filemap.clone()); + cmap.files.borrow_mut().file_maps.push(self.filemap.clone()); let loc = cmap.lookup_char_pos_adj(self.pos); debug!("Skipping a shebang"); if loc.line == 1 && loc.col == CharPos(0) { @@ -1115,32 +1115,53 @@ impl<'a> StringReader<'a> { /// token, and updates the interner fn next_token_inner(&mut self) -> Result<token::Token, ()> { let c = self.ch; - if ident_start(c) && - match (c.unwrap(), self.nextch(), self.nextnextch()) { - // Note: r as in r" or r#" is part of a raw string literal, - // b as in b' is part of a byte literal. - // They are not identifiers, and are handled further down. - ('r', Some('"'), _) | - ('r', Some('#'), _) | - ('b', Some('"'), _) | - ('b', Some('\''), _) | - ('b', Some('r'), Some('"')) | - ('b', Some('r'), Some('#')) => false, - _ => true, - } { - let start = self.pos; - while ident_continue(self.ch) { - self.bump(); - } - return Ok(self.with_str_from(start, |string| { - if string == "_" { - token::Underscore - } else { - // FIXME: perform NFKC normalization here. (Issue #2253) - token::Ident(self.mk_ident(string)) + if ident_start(c) { + let (is_ident_start, is_raw_ident) = + match (c.unwrap(), self.nextch(), self.nextnextch()) { + // r# followed by an identifier starter is a raw identifier. + // This is an exception to the r# case below. + ('r', Some('#'), x) if ident_start(x) => (true, true), + // r as in r" or r#" is part of a raw string literal. + // b as in b' is part of a byte literal. + // They are not identifiers, and are handled further down. + ('r', Some('"'), _) | + ('r', Some('#'), _) | + ('b', Some('"'), _) | + ('b', Some('\''), _) | + ('b', Some('r'), Some('"')) | + ('b', Some('r'), Some('#')) => (false, false), + _ => (true, false), + }; + if is_ident_start { + let raw_start = self.pos; + if is_raw_ident { + // Consume the 'r#' characters. + self.bump(); + self.bump(); + } + + let start = self.pos; + while ident_continue(self.ch) { + self.bump(); } - })); + + return Ok(self.with_str_from(start, |string| { + // FIXME: perform NFKC normalization here. (Issue #2253) + let ident = self.mk_ident(string); + if is_raw_ident && (token::is_path_segment_keyword(ident) || + ident.name == keywords::Underscore.name()) { + self.fatal_span_(raw_start, self.pos, + &format!("`r#{}` is not currently supported.", ident.name) + ).raise(); + } + if is_raw_ident { + let span = self.mk_sp(raw_start, self.pos); + self.sess.raw_identifier_spans.borrow_mut().push(span); + } + token::Ident(ident, is_raw_ident) + })); + } } if is_dec_digit(c) { @@ -1766,6 +1787,7 @@ mod tests { use std::path::PathBuf; use diagnostics::plugin::ErrorMap; use rustc_data_structures::sync::Lock; + use with_globals; fn mk_sess(cm: Lrc<CodeMap>) -> ParseSess { let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), Some(cm.clone()), @@ -1778,6 +1800,7 @@ mod tests { included_mod_stack: RefCell::new(Vec::new()), code_map: cm, missing_fragment_specifiers: RefCell::new(HashSet::new()), + raw_identifier_spans: RefCell::new(Vec::new()), registered_diagnostics: Lock::new(ErrorMap::new()), non_modrs_mods: RefCell::new(vec![]), } @@ -1794,33 +1817,35 @@ mod tests { #[test] fn t1() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut string_reader = setup(&cm, - &sh, - "/* my source file */ fn main() { println!(\"zebra\"); }\n" - .to_string()); - let id = Ident::from_str("fn"); - assert_eq!(string_reader.next_token().tok, token::Comment); - assert_eq!(string_reader.next_token().tok, token::Whitespace); - let tok1 = string_reader.next_token(); - let tok2 = TokenAndSpan { - tok: token::Ident(id), - sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), - }; - assert_eq!(tok1, tok2); - assert_eq!(string_reader.next_token().tok, token::Whitespace); - // the 'main' id is already read: - assert_eq!(string_reader.pos.clone(), BytePos(28)); - // read another token: - let tok3 = string_reader.next_token(); - let tok4 = TokenAndSpan { - tok: token::Ident(Ident::from_str("main")), - sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), - }; - assert_eq!(tok3, tok4); - // the lparen is already read: - assert_eq!(string_reader.pos.clone(), BytePos(29)) + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut string_reader = setup(&cm, + &sh, + "/* my source file */ fn main() { println!(\"zebra\"); }\n" + .to_string()); + let id = Ident::from_str("fn"); + assert_eq!(string_reader.next_token().tok, token::Comment); + assert_eq!(string_reader.next_token().tok, token::Whitespace); + let tok1 = string_reader.next_token(); + let tok2 = TokenAndSpan { + tok: token::Ident(id, false), + sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), + }; + assert_eq!(tok1, tok2); + assert_eq!(string_reader.next_token().tok, token::Whitespace); + // the 'main' id is already read: + assert_eq!(string_reader.pos.clone(), BytePos(28)); + // read another token: + let tok3 = string_reader.next_token(); + let tok4 = TokenAndSpan { + tok: mk_ident("main"), + sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), + }; + assert_eq!(tok3, tok4); + // the lparen is already read: + assert_eq!(string_reader.pos.clone(), BytePos(29)) + }) } // check that the given reader produces the desired stream @@ -1833,118 +1858,138 @@ mod tests { // make the identifier by looking up the string in the interner fn mk_ident(id: &str) -> token::Token { - token::Ident(Ident::from_str(id)) + token::Token::from_ast_ident(Ident::from_str(id)) } #[test] fn doublecolonparsing() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a b".to_string()), - vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a b".to_string()), + vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); + }) } #[test] fn dcparsing_2() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a::b".to_string()), - vec![mk_ident("a"), token::ModSep, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a::b".to_string()), + vec![mk_ident("a"), token::ModSep, mk_ident("b")]); + }) } #[test] fn dcparsing_3() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a ::b".to_string()), - vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a ::b".to_string()), + vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); + }) } #[test] fn dcparsing_4() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - check_tokenization(setup(&cm, &sh, "a:: b".to_string()), - vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + check_tokenization(setup(&cm, &sh, "a:: b".to_string()), + vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); + }) } #[test] fn character_a() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern("a")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern("a")), None)); + }) } #[test] fn character_space() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern(" ")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern(" ")), None)); + }) } #[test] fn character_escaped() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, - token::Literal(token::Char(Symbol::intern("\\n")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, + token::Literal(token::Char(Symbol::intern("\\n")), None)); + }) } #[test] fn lifetime_name() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, - token::Lifetime(Ident::from_str("'abc"))); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, + token::Lifetime(Ident::from_str("'abc"))); + }) } #[test] fn raw_string() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) - .next_token() - .tok, - token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) + .next_token() + .tok, + token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); + }) } #[test] fn literal_suffixes() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - macro_rules! test { - ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(Symbol::intern($tok_contents)), - Some(Symbol::intern("suffix")))); - // with a whitespace separator: - assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(Symbol::intern($tok_contents)), - None)); - }} - } - - test!("'a'", Char, "a"); - test!("b'a'", Byte, "a"); - test!("\"a\"", Str_, "a"); - test!("b\"a\"", ByteStr, "a"); - test!("1234", Integer, "1234"); - test!("0b101", Integer, "0b101"); - test!("0xABC", Integer, "0xABC"); - test!("1.0", Float, "1.0"); - test!("1.0e10", Float, "1.0e10"); - - assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, - token::Literal(token::Integer(Symbol::intern("2")), - Some(Symbol::intern("us")))); - assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::StrRaw(Symbol::intern("raw"), 3), - Some(Symbol::intern("suffix")))); - assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), - Some(Symbol::intern("suffix")))); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + macro_rules! test { + ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ + assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + Some(Symbol::intern("suffix")))); + // with a whitespace separator: + assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + None)); + }} + } + + test!("'a'", Char, "a"); + test!("b'a'", Byte, "a"); + test!("\"a\"", Str_, "a"); + test!("b\"a\"", ByteStr, "a"); + test!("1234", Integer, "1234"); + test!("0b101", Integer, "0b101"); + test!("0xABC", Integer, "0xABC"); + test!("1.0", Float, "1.0"); + test!("1.0e10", Float, "1.0e10"); + + assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, + token::Literal(token::Integer(Symbol::intern("2")), + Some(Symbol::intern("us")))); + assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, + token::Literal(token::StrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); + assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, + token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); + }) } #[test] @@ -1956,27 +2001,31 @@ mod tests { #[test] fn nested_block_comments() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); - match lexer.next_token().tok { - token::Comment => {} - _ => panic!("expected a comment!"), - } - assert_eq!(lexer.next_token().tok, - token::Literal(token::Char(Symbol::intern("a")), None)); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); + match lexer.next_token().tok { + token::Comment => {} + _ => panic!("expected a comment!"), + } + assert_eq!(lexer.next_token().tok, + token::Literal(token::Char(Symbol::intern("a")), None)); + }) } #[test] fn crlf_comments() { - let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); - let sh = mk_sess(cm.clone()); - let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); - let comment = lexer.next_token(); - assert_eq!(comment.tok, token::Comment); - assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token().tok, token::Whitespace); - assert_eq!(lexer.next_token().tok, - token::DocComment(Symbol::intern("/// test"))); + with_globals(|| { + let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); + let sh = mk_sess(cm.clone()); + let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); + let comment = lexer.next_token(); + assert_eq!(comment.tok, token::Comment); + assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); + assert_eq!(lexer.next_token().tok, token::Whitespace); + assert_eq!(lexer.next_token().tok, + token::DocComment(Symbol::intern("/// test"))); + }) } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 3fb0c209f70..1483691a1ea 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -40,7 +40,6 @@ pub mod attr; pub mod common; pub mod classify; -pub mod obsolete; /// Info about a parsing session. pub struct ParseSess { @@ -48,6 +47,9 @@ pub struct ParseSess { pub unstable_features: UnstableFeatures, pub config: CrateConfig, pub missing_fragment_specifiers: RefCell<HashSet<Span>>, + /// Places where raw identifiers were used. This is used for feature gating + /// raw identifiers + pub raw_identifier_spans: RefCell<Vec<Span>>, /// The registered diagnostics codes pub registered_diagnostics: Lock<ErrorMap>, // Spans where a `mod foo;` statement was included in a non-mod.rs file. @@ -74,6 +76,7 @@ impl ParseSess { unstable_features: UnstableFeatures::from_environment(), config: HashSet::new(), missing_fragment_specifiers: RefCell::new(HashSet::new()), + raw_identifier_spans: RefCell::new(Vec::new()), registered_diagnostics: Lock::new(ErrorMap::new()), included_mod_stack: RefCell::new(vec![]), code_map, @@ -298,7 +301,6 @@ pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String { debug!("parse_str_lit: given {}", escape_default(lit)); let mut res = String::with_capacity(lit.len()); - // FIXME #8372: This could be a for-loop if it didn't borrow the iterator let error = |i| format!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace @@ -503,7 +505,6 @@ pub fn byte_lit(lit: &str) -> (u8, usize) { pub fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> { let mut res = Vec::with_capacity(lit.len()); - // FIXME #8372: This could be a for-loop if it didn't borrow the iterator let error = |i| format!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace @@ -680,6 +681,7 @@ mod tests { use util::parser_testing::{string_to_stream, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt}; use util::ThinVec; + use with_globals; // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { @@ -691,156 +693,172 @@ mod tests { } #[test] fn path_exprs_1() { - assert!(string_to_expr("a".to_string()) == - P(ast::Expr{ - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { + with_globals(|| { + assert!(string_to_expr("a".to_string()) == + P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span: sp(0, 1), + segments: vec![str2seg("a", 0, 1)], + }), span: sp(0, 1), - segments: vec![str2seg("a", 0, 1)], - }), - span: sp(0, 1), - attrs: ThinVec::new(), - })) + attrs: ThinVec::new(), + })) + }) } #[test] fn path_exprs_2 () { - assert!(string_to_expr("::a::b".to_string()) == - P(ast::Expr { - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { + with_globals(|| { + assert!(string_to_expr("::a::b".to_string()) == + P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span: sp(0, 6), + segments: vec![ast::PathSegment::crate_root(sp(0, 0)), + str2seg("a", 2, 3), + str2seg("b", 5, 6)] + }), span: sp(0, 6), - segments: vec![ast::PathSegment::crate_root(sp(0, 2)), - str2seg("a", 2, 3), - str2seg("b", 5, 6)] - }), - span: sp(0, 6), - attrs: ThinVec::new(), - })) + attrs: ThinVec::new(), + })) + }) } #[should_panic] #[test] fn bad_path_expr_1() { - string_to_expr("::abc::def::return".to_string()); + with_globals(|| { + string_to_expr("::abc::def::return".to_string()); + }) } // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts: Vec<_> = - string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); - let tts: &[TokenTree] = &tts[..]; - - match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { - ( - 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), - Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip))), - Some(&TokenTree::Delimited(_, ref macro_delimed)), - ) - if name_macro_rules.name == "macro_rules" - && name_zip.name == "zip" => { - let tts = ¯o_delimed.stream().trees().collect::<Vec<_>>(); - match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { - ( - 3, - Some(&TokenTree::Delimited(_, ref first_delimed)), - Some(&TokenTree::Token(_, token::FatArrow)), - Some(&TokenTree::Delimited(_, ref second_delimed)), - ) - if macro_delimed.delim == token::Paren => { - let tts = &first_delimed.stream().trees().collect::<Vec<_>>(); - match (tts.len(), tts.get(0), tts.get(1)) { - ( - 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident))), - ) - if first_delimed.delim == token::Paren && ident.name == "a" => {}, - _ => panic!("value 3: {:?}", *first_delimed), - } - let tts = &second_delimed.stream().trees().collect::<Vec<_>>(); - match (tts.len(), tts.get(0), tts.get(1)) { - ( - 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident))), - ) - if second_delimed.delim == token::Paren - && ident.name == "a" => {}, - _ => panic!("value 4: {:?}", *second_delimed), - } - }, - _ => panic!("value 2: {:?}", *macro_delimed), - } - }, - _ => panic!("value: {:?}",tts), - } + with_globals(|| { + let tts: Vec<_> = + string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); + let tts: &[TokenTree] = &tts[..]; + + match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { + ( + 4, + Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))), + Some(&TokenTree::Token(_, token::Not)), + Some(&TokenTree::Token(_, token::Ident(name_zip, false))), + Some(&TokenTree::Delimited(_, ref macro_delimed)), + ) + if name_macro_rules.name == "macro_rules" + && name_zip.name == "zip" => { + let tts = ¯o_delimed.stream().trees().collect::<Vec<_>>(); + match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { + ( + 3, + Some(&TokenTree::Delimited(_, ref first_delimed)), + Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Delimited(_, ref second_delimed)), + ) + if macro_delimed.delim == token::Paren => { + let tts = &first_delimed.stream().trees().collect::<Vec<_>>(); + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident, false))), + ) + if first_delimed.delim == token::Paren && ident.name == "a" => {}, + _ => panic!("value 3: {:?}", *first_delimed), + } + let tts = &second_delimed.stream().trees().collect::<Vec<_>>(); + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&TokenTree::Token(_, token::Dollar)), + Some(&TokenTree::Token(_, token::Ident(ident, false))), + ) + if second_delimed.delim == token::Paren + && ident.name == "a" => {}, + _ => panic!("value 4: {:?}", *second_delimed), + } + }, + _ => panic!("value 2: {:?}", *macro_delimed), + } + }, + _ => panic!("value: {:?}",tts), + } + }) } #[test] fn string_to_tts_1() { - let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - - let expected = TokenStream::concat(vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), - TokenTree::Delimited( - sp(5, 14), - tokenstream::Delimited { - delim: token::DelimToken::Paren, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), - TokenTree::Token(sp(8, 9), token::Colon).into(), - TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(), - ]).into(), - }).into(), - TokenTree::Delimited( - sp(15, 21), - tokenstream::Delimited { - delim: token::DelimToken::Brace, - tts: TokenStream::concat(vec![ - TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), - TokenTree::Token(sp(18, 19), token::Semi).into(), - ]).into(), - }).into() - ]); - - assert_eq!(tts, expected); + with_globals(|| { + let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); + + let expected = TokenStream::concat(vec![ + TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), + TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), + TokenTree::Delimited( + sp(5, 14), + tokenstream::Delimited { + delim: token::DelimToken::Paren, + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(6, 7), + token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::Token(sp(8, 9), token::Colon).into(), + TokenTree::Token(sp(10, 13), + token::Ident(Ident::from_str("i32"), false)).into(), + ]).into(), + }).into(), + TokenTree::Delimited( + sp(15, 21), + tokenstream::Delimited { + delim: token::DelimToken::Brace, + tts: TokenStream::concat(vec![ + TokenTree::Token(sp(17, 18), + token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::Token(sp(18, 19), token::Semi).into(), + ]).into(), + }).into() + ]); + + assert_eq!(tts, expected); + }) } #[test] fn ret_expr() { - assert!(string_to_expr("return d".to_string()) == - P(ast::Expr{ - id: ast::DUMMY_NODE_ID, - node:ast::ExprKind::Ret(Some(P(ast::Expr{ + with_globals(|| { + assert!(string_to_expr("return d".to_string()) == + P(ast::Expr{ id: ast::DUMMY_NODE_ID, - node:ast::ExprKind::Path(None, ast::Path{ - span: sp(7, 8), - segments: vec![str2seg("d", 7, 8)], - }), - span:sp(7,8), + node:ast::ExprKind::Ret(Some(P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node:ast::ExprKind::Path(None, ast::Path{ + span: sp(7, 8), + segments: vec![str2seg("d", 7, 8)], + }), + span:sp(7,8), + attrs: ThinVec::new(), + }))), + span:sp(0,8), attrs: ThinVec::new(), - }))), - span:sp(0,8), - attrs: ThinVec::new(), - })) + })) + }) } #[test] fn parse_stmt_1 () { - assert!(string_to_stmt("b;".to_string()) == - Some(ast::Stmt { - node: ast::StmtKind::Expr(P(ast::Expr { - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, ast::Path { - span:sp(0,1), - segments: vec![str2seg("b", 0, 1)], - }), - span: sp(0,1), - attrs: ThinVec::new()})), - id: ast::DUMMY_NODE_ID, - span: sp(0,1)})) - + with_globals(|| { + assert!(string_to_stmt("b;".to_string()) == + Some(ast::Stmt { + node: ast::StmtKind::Expr(P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, ast::Path { + span:sp(0,1), + segments: vec![str2seg("b", 0, 1)], + }), + span: sp(0,1), + attrs: ThinVec::new()})), + id: ast::DUMMY_NODE_ID, + span: sp(0,1)})) + }) } fn parser_done(p: Parser){ @@ -848,120 +866,128 @@ mod tests { } #[test] fn parse_ident_pat () { - let sess = ParseSess::new(FilePathMapping::empty()); - let mut parser = string_to_parser(&sess, "b".to_string()); - assert!(panictry!(parser.parse_pat()) - == P(ast::Pat{ - id: ast::DUMMY_NODE_ID, - node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), - Spanned{ span:sp(0, 1), - node: Ident::from_str("b") - }, - None), - span: sp(0,1)})); - parser_done(parser); + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let mut parser = string_to_parser(&sess, "b".to_string()); + assert!(panictry!(parser.parse_pat()) + == P(ast::Pat{ + id: ast::DUMMY_NODE_ID, + node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), + Spanned{ span:sp(0, 1), + node: Ident::from_str("b") + }, + None), + span: sp(0,1)})); + parser_done(parser); + }) } // check the contents of the tt manually: #[test] fn parse_fundecl () { - // this test depends on the intern order of "fn" and "i32" - let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| { - m.map(|mut m| { - m.tokens = None; - m - }) - }); - assert_eq!(item, - Some( - P(ast::Item{ident:Ident::from_str("a"), - attrs:Vec::new(), - id: ast::DUMMY_NODE_ID, - tokens: None, - node: ast::ItemKind::Fn(P(ast::FnDecl { - inputs: vec![ast::Arg{ - ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, - node: ast::TyKind::Path(None, ast::Path{ - span:sp(10,13), - segments: vec![str2seg("i32", 10, 13)], + with_globals(|| { + // this test depends on the intern order of "fn" and "i32" + let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| { + m.map(|mut m| { + m.tokens = None; + m + }) + }); + assert_eq!(item, + Some( + P(ast::Item{ident:Ident::from_str("a"), + attrs:Vec::new(), + id: ast::DUMMY_NODE_ID, + tokens: None, + node: ast::ItemKind::Fn(P(ast::FnDecl { + inputs: vec![ast::Arg{ + ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, + node: ast::TyKind::Path(None, ast::Path{ + span:sp(10,13), + segments: vec![str2seg("i32", 10, 13)], + }), + span:sp(10,13) }), - span:sp(10,13) - }), - pat: P(ast::Pat { - id: ast::DUMMY_NODE_ID, - node: PatKind::Ident( - ast::BindingMode::ByValue( - ast::Mutability::Immutable), - Spanned{ - span: sp(6,7), - node: Ident::from_str("b")}, - None - ), - span: sp(6,7) - }), - id: ast::DUMMY_NODE_ID - }], - output: ast::FunctionRetTy::Default(sp(15, 15)), - variadic: false - }), - ast::Unsafety::Normal, - Spanned { - span: sp(0,2), - node: ast::Constness::NotConst, - }, - Abi::Rust, - ast::Generics{ - params: Vec::new(), - where_clause: ast::WhereClause { + pat: P(ast::Pat { id: ast::DUMMY_NODE_ID, - predicates: Vec::new(), + node: PatKind::Ident( + ast::BindingMode::ByValue( + ast::Mutability::Immutable), + Spanned{ + span: sp(6,7), + node: Ident::from_str("b")}, + None + ), + span: sp(6,7) + }), + id: ast::DUMMY_NODE_ID + }], + output: ast::FunctionRetTy::Default(sp(15, 15)), + variadic: false + }), + ast::Unsafety::Normal, + Spanned { + span: sp(0,2), + node: ast::Constness::NotConst, + }, + Abi::Rust, + ast::Generics{ + params: Vec::new(), + where_clause: ast::WhereClause { + id: ast::DUMMY_NODE_ID, + predicates: Vec::new(), + span: syntax_pos::DUMMY_SP, + }, span: syntax_pos::DUMMY_SP, }, - span: syntax_pos::DUMMY_SP, - }, - P(ast::Block { - stmts: vec![ast::Stmt { - node: ast::StmtKind::Semi(P(ast::Expr{ + P(ast::Block { + stmts: vec![ast::Stmt { + node: ast::StmtKind::Semi(P(ast::Expr{ + id: ast::DUMMY_NODE_ID, + node: ast::ExprKind::Path(None, + ast::Path{ + span:sp(17,18), + segments: vec![str2seg("b", 17, 18)], + }), + span: sp(17,18), + attrs: ThinVec::new()})), id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Path(None, - ast::Path{ - span:sp(17,18), - segments: vec![str2seg("b", 17, 18)], - }), - span: sp(17,18), - attrs: ThinVec::new()})), + span: sp(17,19)}], id: ast::DUMMY_NODE_ID, - span: sp(17,19)}], - id: ast::DUMMY_NODE_ID, - rules: ast::BlockCheckMode::Default, // no idea - span: sp(15,21), - recovered: false, - })), - vis: respan(sp(0, 0), ast::VisibilityKind::Inherited), - span: sp(0,21)}))); + rules: ast::BlockCheckMode::Default, // no idea + span: sp(15,21), + recovered: false, + })), + vis: respan(sp(0, 0), ast::VisibilityKind::Inherited), + span: sp(0,21)}))); + }) } #[test] fn parse_use() { - let use_s = "use foo::bar::baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); - - let use_s = "use foo::bar as baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); + with_globals(|| { + let use_s = "use foo::bar::baz;"; + let vitem = string_to_item(use_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], use_s); + + let use_s = "use foo::bar as baz;"; + let vitem = string_to_item(use_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], use_s); + }) } #[test] fn parse_extern_crate() { - let ex_s = "extern crate foo;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); - - let ex_s = "extern crate foo as bar;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); + with_globals(|| { + let ex_s = "extern crate foo;"; + let vitem = string_to_item(ex_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], ex_s); + + let ex_s = "extern crate foo as bar;"; + let vitem = string_to_item(ex_s.to_string()).unwrap(); + let vitem_s = item_to_string(&vitem); + assert_eq!(&vitem_s[..], ex_s); + }) } fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { @@ -988,31 +1014,36 @@ mod tests { } #[test] fn span_of_self_arg_pat_idents_are_correct() { - - let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", - "impl z { fn a (&mut self, &myarg: i32) {} }", - "impl z { fn a (&'a self, &myarg: i32) {} }", - "impl z { fn a (self, &myarg: i32) {} }", - "impl z { fn a (self: Foo, &myarg: i32) {} }", - ]; - - for &src in &srcs { - let spans = get_spans_of_pat_idents(src); - let (lo, hi) = (spans[0].lo(), spans[0].hi()); - assert!("self" == &src[lo.to_usize()..hi.to_usize()], - "\"{}\" != \"self\". src=\"{}\"", - &src[lo.to_usize()..hi.to_usize()], src) - } + with_globals(|| { + + let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", + "impl z { fn a (&mut self, &myarg: i32) {} }", + "impl z { fn a (&'a self, &myarg: i32) {} }", + "impl z { fn a (self, &myarg: i32) {} }", + "impl z { fn a (self: Foo, &myarg: i32) {} }", + ]; + + for &src in &srcs { + let spans = get_spans_of_pat_idents(src); + let (lo, hi) = (spans[0].lo(), spans[0].hi()); + assert!("self" == &src[lo.to_usize()..hi.to_usize()], + "\"{}\" != \"self\". src=\"{}\"", + &src[lo.to_usize()..hi.to_usize()], src) + } + }) } #[test] fn parse_exprs () { - // just make sure that they parse.... - string_to_expr("3 + 4".to_string()); - string_to_expr("a::z.froob(b,&(987+3))".to_string()); + with_globals(|| { + // just make sure that they parse.... + string_to_expr("3 + 4".to_string()); + string_to_expr("a::z.froob(b,&(987+3))".to_string()); + }) } #[test] fn attrs_fix_bug () { - string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) + with_globals(|| { + string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result<Box<Writer>, String> { #[cfg(windows)] fn wb() -> c_int { @@ -1024,49 +1055,54 @@ mod tests { let mut fflags: c_int = wb(); }".to_string()); + }) } #[test] fn crlf_doc_comments() { - let sess = ParseSess::new(FilePathMapping::empty()); - - let name = FileName::Custom("source".to_string()); - let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) - .unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc, "/// doc comment"); - - let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, &sess) - .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.path == "doc") - .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); - let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(&docs[..], b); - - let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(doc, "/** doc comment\n * with CRLF */"); + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + + let name = FileName::Custom("source".to_string()); + let source = "/// doc comment\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name.clone(), source, &sess) + .unwrap().unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); + assert_eq!(doc, "/// doc comment"); + + let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name.clone(), source, &sess) + .unwrap().unwrap(); + let docs = item.attrs.iter().filter(|a| a.path == "doc") + .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); + let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; + assert_eq!(&docs[..], b); + + let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); + let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); + assert_eq!(doc, "/** doc comment\n * with CRLF */"); + }); } #[test] fn ttdelim_span() { - let sess = ParseSess::new(FilePathMapping::empty()); - let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(), - "foo!( fn main() { body } )".to_string(), &sess).unwrap(); - - let tts: Vec<_> = match expr.node { - ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), - _ => panic!("not a macro"), - }; + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(), + "foo!( fn main() { body } )".to_string(), &sess).unwrap(); + + let tts: Vec<_> = match expr.node { + ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), + _ => panic!("not a macro"), + }; - let span = tts.iter().rev().next().unwrap().span(); + let span = tts.iter().rev().next().unwrap().span(); - match sess.codemap().span_to_snippet(span) { - Ok(s) => assert_eq!(&s[..], "{ body }"), - Err(_) => panic!("could not get snippet"), - } + match sess.codemap().span_to_snippet(span) { + Ok(s) => assert_eq!(&s[..], "{ body }"), + Err(_) => panic!("could not get snippet"), + } + }); } // This tests that when parsing a string (rather than a file) we don't try @@ -1074,17 +1110,19 @@ mod tests { // See `recurse_into_file_modules` in the parser. #[test] fn out_of_line_mod() { - let sess = ParseSess::new(FilePathMapping::empty()); - let item = parse_item_from_source_str( - PathBuf::from("foo").into(), - "mod foo { struct S; mod this_does_not_exist; }".to_owned(), - &sess, - ).unwrap().unwrap(); - - if let ast::ItemKind::Mod(ref m) = item.node { - assert!(m.items.len() == 2); - } else { - panic!(); - } + with_globals(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let item = parse_item_from_source_str( + PathBuf::from("foo").into(), + "mod foo { struct S; mod this_does_not_exist; }".to_owned(), + &sess, + ).unwrap().unwrap(); + + if let ast::ItemKind::Mod(ref m) = item.node { + assert!(m.items.len() == 2); + } else { + panic!(); + } + }); } } diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs deleted file mode 100644 index 49a697edf41..00000000000 --- a/src/libsyntax/parse/obsolete.rs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Support for parsing unsupported, old syntaxes, for the purpose of reporting errors. Parsing of -//! these syntaxes is tested by compile-test/obsolete-syntax.rs. -//! -//! Obsolete syntax that becomes too hard to parse can be removed. - -use syntax_pos::Span; -use parse::parser; - -/// The specific types of unsupported syntax -#[derive(Copy, Clone, PartialEq, Eq, Hash)] -pub enum ObsoleteSyntax { - // Nothing here at the moment -} - -pub trait ParserObsoleteMethods { - /// Reports an obsolete syntax non-fatal error. - fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax); - fn report(&mut self, - sp: Span, - kind: ObsoleteSyntax, - kind_str: &str, - desc: &str, - error: bool); -} - -impl<'a> ParserObsoleteMethods for parser::Parser<'a> { - /// Reports an obsolete syntax non-fatal error. - #[allow(unused_variables)] - #[allow(unreachable_code)] - fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax) { - let (kind_str, desc, error) = match kind { - // Nothing here at the moment - }; - - self.report(sp, kind, kind_str, desc, error); - } - - fn report(&mut self, - sp: Span, - kind: ObsoleteSyntax, - kind_str: &str, - desc: &str, - error: bool) { - let mut err = if error { - self.diagnostic().struct_span_err(sp, &format!("obsolete syntax: {}", kind_str)) - } else { - self.diagnostic().struct_span_warn(sp, &format!("obsolete syntax: {}", kind_str)) - }; - - if !self.obsolete_set.contains(&kind) && - (error || self.sess.span_diagnostic.flags.can_emit_warnings) { - err.note(desc); - self.obsolete_set.insert(kind); - } - err.emit(); - } -} diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index f5aa01fb034..f5ab023b30e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,7 +48,6 @@ use parse::{self, classify, token}; use parse::common::SeqSep; use parse::lexer::TokenAndSpan; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use parse::obsolete::ObsoleteSyntax; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use util::parser::{AssocOp, Fixity}; use print::pprust; @@ -59,7 +58,6 @@ use symbol::{Symbol, keywords}; use util::ThinVec; use std::cmp; -use std::collections::HashSet; use std::mem; use std::path::{self, Path, PathBuf}; use std::slice; @@ -229,9 +227,6 @@ pub struct Parser<'a> { /// the previous token kind prev_token_kind: PrevTokenKind, pub restrictions: Restrictions, - /// The set of seen errors about obsolete syntax. Used to suppress - /// extra detail when the same error is seen twice - pub obsolete_set: HashSet<ObsoleteSyntax>, /// Used to determine the path to externally loaded source files pub directory: Directory, /// Whether to parse sub-modules in other files. @@ -358,7 +353,7 @@ impl TokenCursor { let body = TokenTree::Delimited(sp, Delimited { delim: token::Bracket, - tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))), + tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))] @@ -460,7 +455,7 @@ impl Error { ref dir_path } => { let mut err = struct_span_err!(handler, sp, E0583, "file not found for module `{}`", mod_name); - err.help(&format!("name the file either {} or {} inside the directory {:?}", + err.help(&format!("name the file either {} or {} inside the directory \"{}\"", default_path, secondary_path, dir_path)); @@ -549,13 +544,12 @@ impl<'a> Parser<'a> { -> Self { let mut parser = Parser { sess, - token: token::Underscore, + token: token::Whitespace, span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), - obsolete_set: HashSet::new(), recurse_into_file_modules, directory: Directory { path: PathBuf::new(), @@ -784,7 +778,7 @@ impl<'a> Parser<'a> { fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { match self.token { - token::Ident(i) => { + token::Ident(i, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { @@ -800,11 +794,7 @@ impl<'a> Parser<'a> { Err(if self.prev_token_kind == PrevTokenKind::DocComment { self.span_fatal_err(self.prev_span, Error::UselessDocComment) } else { - let mut err = self.expected_ident_found(); - if self.token == token::Underscore { - err.note("`_` is a wildcard pattern, not an identifier"); - } - err + self.expected_ident_found() }) } } @@ -1512,7 +1502,7 @@ impl<'a> Parser<'a> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?)) } else { - Ok(FunctionRetTy::Default(self.span.with_hi(self.span.lo()))) + Ok(FunctionRetTy::Default(self.span.shrink_to_lo())) } } @@ -1602,7 +1592,7 @@ impl<'a> Parser<'a> { let e = self.parse_expr()?; self.expect(&token::CloseDelim(token::Paren))?; TyKind::Typeof(e) - } else if self.eat(&token::Underscore) { + } else if self.eat_keyword(keywords::Underscore) { // A type to be inferred `_` TyKind::Infer } else if self.token_is_bare_fn_keyword() { @@ -1796,7 +1786,7 @@ impl<'a> Parser<'a> { _ => 0, }; - self.look_ahead(offset, |t| t.is_ident() || t == &token::Underscore) && + self.look_ahead(offset, |t| t.is_ident()) && self.look_ahead(offset + 1, |t| t == &token::Colon) } @@ -1929,7 +1919,7 @@ impl<'a> Parser<'a> { pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token { - token::Ident(sid) if self.token.is_path_segment_keyword() => { + token::Ident(sid, _) if self.token.is_path_segment_keyword() => { self.bump(); Ok(sid) } @@ -1990,7 +1980,7 @@ impl<'a> Parser<'a> { let lo = self.meta_var_span.unwrap_or(self.span); let mut segments = Vec::new(); if self.eat(&token::ModSep) { - segments.push(PathSegment::crate_root(lo)); + segments.push(PathSegment::crate_root(lo.shrink_to_lo())); } self.parse_path_segments(&mut segments, style, enable_warning)?; @@ -2025,7 +2015,7 @@ impl<'a> Parser<'a> { loop { segments.push(self.parse_path_segment(style, enable_warning)?); - if self.is_import_coupler(false) || !self.eat(&token::ModSep) { + if self.is_import_coupler() || !self.eat(&token::ModSep) { return Ok(()); } } @@ -2744,11 +2734,14 @@ impl<'a> Parser<'a> { } pub fn process_potential_macro_variable(&mut self) { - let ident = match self.token { + let (ident, is_raw) = match self.token { token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); - let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() }; + let name = match self.token { + token::Ident(ident, _) => ident, + _ => unreachable!() + }; let mut err = self.fatal(&format!("unknown macro variable `{}`", name)); err.span_label(self.span, "unknown macro variable"); err.emit(); @@ -2757,13 +2750,13 @@ impl<'a> Parser<'a> { token::Interpolated(ref nt) => { self.meta_var_span = Some(self.span); match nt.0 { - token::NtIdent(ident) => ident, + token::NtIdent(ident, is_raw) => (ident, is_raw), _ => return, } } _ => return, }; - self.token = token::Ident(ident.node); + self.token = token::Ident(ident.node, is_raw); self.span = ident.span; } @@ -2782,7 +2775,7 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span); + let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); self.bump(); TokenTree::Token(span, token) } @@ -2831,9 +2824,10 @@ impl<'a> Parser<'a> { let (span, e) = self.interpolated_or_expr_span(e)?; let span_of_tilde = lo; let mut err = self.diagnostic().struct_span_err(span_of_tilde, - "`~` can not be used as a unary operator"); - err.span_label(span_of_tilde, "did you mean `!`?"); - err.help("use `!` instead of `~` if you meant to perform bitwise negation"); + "`~` cannot be used as a unary operator"); + err.span_suggestion_short(span_of_tilde, + "use `!` to perform bitwise negation", + "!".to_owned()); err.emit(); (lo.to(span), self.mk_unary(UnOp::Not, e)) } @@ -2856,17 +2850,6 @@ impl<'a> Parser<'a> { let (span, e) = self.interpolated_or_expr_span(e)?; (lo.to(span), ExprKind::AddrOf(m, e)) } - token::Ident(..) if self.token.is_keyword(keywords::In) => { - self.bump(); - let place = self.parse_expr_res( - Restrictions::NO_STRUCT_LITERAL, - None, - )?; - let blk = self.parse_block()?; - let span = blk.span; - let blk_expr = self.mk_expr(span, ExprKind::Block(blk), ThinVec::new()); - (lo.to(span), ExprKind::InPlace(place, blk_expr)) - } token::Ident(..) if self.token.is_keyword(keywords::Box) => { self.bump(); let e = self.parse_prefix_expr(None); @@ -3029,8 +3012,6 @@ impl<'a> Parser<'a> { } AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()), - AssocOp::Inplace => - self.mk_expr(span, ExprKind::InPlace(lhs, rhs), ThinVec::new()), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, @@ -3389,7 +3370,7 @@ impl<'a> Parser<'a> { None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { if self.token == token::Token::Semi { - e.span_note(match_span, "did you mean to remove this `match` keyword?"); + e.span_suggestion_short(match_span, "try removing this `match`", "".to_owned()); } return Err(e) } @@ -3621,7 +3602,7 @@ impl<'a> Parser<'a> { slice = Some(P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Wild, - span: self.span, + span: self.prev_span, })); before_slice = false; } @@ -3675,7 +3656,13 @@ impl<'a> Parser<'a> { if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); let mut err = self.fatal(&format!("expected `{}`, found `{}`", "}", token_str)); - err.span_label(self.span, "expected `}`"); + if self.token == token::Comma { // Issue #49257 + err.span_label(self.span, + "`..` must be in the last position, \ + and cannot have a trailing comma"); + } else { + err.span_label(self.span, "expected `}`"); + } return Err(err); } etc = true; @@ -3803,16 +3790,17 @@ impl<'a> Parser<'a> { /// Parse a pattern. pub fn parse_pat(&mut self) -> PResult<'a, P<Pat>> { + self.parse_pat_with_range_pat(true) + } + + /// Parse a pattern, with a setting whether modern range patterns e.g. `a..=b`, `a..b` are + /// allowed. + fn parse_pat_with_range_pat(&mut self, allow_range_pat: bool) -> PResult<'a, P<Pat>> { maybe_whole!(self, NtPat, |x| x); let lo = self.span; let pat; match self.token { - token::Underscore => { - // Parse _ - self.bump(); - pat = PatKind::Wild; - } token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; @@ -3823,7 +3811,7 @@ impl<'a> Parser<'a> { err.span_label(self.span, "unexpected lifetime"); return Err(err); } - let subpat = self.parse_pat()?; + let subpat = self.parse_pat_with_range_pat(false)?; pat = PatKind::Ref(subpat, mutbl); } token::OpenDelim(token::Paren) => { @@ -3842,8 +3830,11 @@ impl<'a> Parser<'a> { self.expect(&token::CloseDelim(token::Bracket))?; pat = PatKind::Slice(before, slice, after); } - // At this point, token != _, &, &&, (, [ - _ => if self.eat_keyword(keywords::Mut) { + // At this point, token != &, &&, (, [ + _ => if self.eat_keyword(keywords::Underscore) { + // Parse _ + pat = PatKind::Wild; + } else if self.eat_keyword(keywords::Mut) { // Parse mut ident @ pat / mut ref ident @ pat let mutref_span = self.prev_span.to(self.span); let binding_mode = if self.eat_keyword(keywords::Ref) { @@ -3862,7 +3853,7 @@ impl<'a> Parser<'a> { pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?; } else if self.eat_keyword(keywords::Box) { // Parse box pat - let subpat = self.parse_pat()?; + let subpat = self.parse_pat_with_range_pat(false)?; pat = PatKind::Box(subpat); } else if self.token.is_ident() && !self.token.is_reserved_ident() && self.parse_as_ident() { @@ -3967,6 +3958,25 @@ impl<'a> Parser<'a> { let pat = Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID }; let pat = self.maybe_recover_from_bad_qpath(pat, true)?; + if !allow_range_pat { + match pat.node { + PatKind::Range(_, _, RangeEnd::Included(RangeSyntax::DotDotDot)) => {} + PatKind::Range(..) => { + let mut err = self.struct_span_err( + pat.span, + "the range pattern here has ambiguous interpretation", + ); + err.span_suggestion( + pat.span, + "add parentheses to clarify the precedence", + format!("({})", pprust::pat_to_string(&pat)), + ); + return Err(err); + } + _ => {} + } + } + Ok(P(pat)) } @@ -4225,7 +4235,7 @@ impl<'a> Parser<'a> { -> PResult<'a, Option<P<Item>>> { let token_lo = self.span; let (ident, def) = match self.token { - token::Ident(ident) if ident.name == keywords::Macro.name() => { + token::Ident(ident, false) if ident.name == keywords::Macro.name() => { self.bump(); let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { @@ -4253,7 +4263,7 @@ impl<'a> Parser<'a> { (ident, ast::MacroDef { tokens: tokens.into(), legacy: false }) } - token::Ident(ident) if ident.name == "macro_rules" && + token::Ident(ident, _) if ident.name == "macro_rules" && self.look_ahead(1, |t| *t == token::Not) => { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis.node, prev_span); @@ -4578,6 +4588,9 @@ impl<'a> Parser<'a> { /// Parse a statement, including the trailing semicolon. pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { + // skip looking for a trailing semicolon when we have an interpolated statement + maybe_whole!(self, NtStmt, |x| Some(x)); + let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? { Some(stmt) => stmt, None => return Ok(None), @@ -5058,7 +5071,9 @@ impl<'a> Parser<'a> { fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> { let expect_ident = |this: &mut Self| match this.token { // Preserve hygienic context. - token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) } + token::Ident(ident, _) => { + let sp = this.span; this.bump(); codemap::respan(sp, ident) + } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { @@ -5355,13 +5370,15 @@ impl<'a> Parser<'a> { VisibilityKind::Inherited => Ok(()), _ => { let is_macro_rules: bool = match self.token { - token::Ident(sid) => sid.name == Symbol::intern("macro_rules"), + token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"), _ => false, }; if is_macro_rules { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); - err.help("did you mean #[macro_export]?"); + err.span_suggestion(sp, + "try exporting the macro", + "#[macro_export]".to_owned()); Err(err) } else { let mut err = self.diagnostic() @@ -5841,7 +5858,7 @@ impl<'a> Parser<'a> { // `pub(in path)` self.bump(); // `(` self.bump(); // `in` - let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `path` + let path = self.parse_path(PathStyle::Mod)?; // `path` self.expect(&token::CloseDelim(token::Paren))?; // `)` let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { path: P(path), @@ -5854,7 +5871,7 @@ impl<'a> Parser<'a> { { // `pub(self)` or `pub(super)` self.bump(); // `(` - let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `super`/`self` + let path = self.parse_path(PathStyle::Mod)?; // `super`/`self` self.expect(&token::CloseDelim(token::Paren))?; // `)` let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { path: P(path), @@ -6263,23 +6280,17 @@ impl<'a> Parser<'a> { lo: Span, visibility: Visibility, attrs: Vec<Attribute>) - -> PResult<'a, P<Item>> { - - let crate_name = self.parse_ident()?; - let (maybe_path, ident) = if let Some(ident) = self.parse_rename()? { - (Some(crate_name.name), ident) + -> PResult<'a, P<Item>> { + let orig_name = self.parse_ident()?; + let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? { + (rename, Some(orig_name.name)) } else { - (None, crate_name) + (orig_name, None) }; self.expect(&token::Semi)?; - let prev_span = self.prev_span; - - Ok(self.mk_item(lo.to(prev_span), - ident, - ItemKind::ExternCrate(maybe_path), - visibility, - attrs)) + let span = lo.to(self.prev_span); + Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs)) } /// Parse `extern` for foreign ABIs @@ -6458,12 +6469,11 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Use) { // USE ITEM - let item_ = ItemKind::Use(P(self.parse_use_tree(false)?)); + let item_ = ItemKind::Use(P(self.parse_use_tree()?)); self.expect(&token::Semi)?; - let prev_span = self.prev_span; - let invalid = keywords::Invalid.ident(); - let item = self.mk_item(lo.to(prev_span), invalid, item_, visibility, attrs); + let span = lo.to(self.prev_span); + let item = self.mk_item(span, keywords::Invalid.ident(), item_, visibility, attrs); return Ok(Some(item)); } @@ -6938,90 +6948,53 @@ impl<'a> Parser<'a> { })) } - /// `{` or `::{` or `*` or `::*` - /// `::{` or `::*` (also `{` or `*` if unprefixed is true) - fn is_import_coupler(&mut self, unprefixed: bool) -> bool { - self.is_import_coupler_inner(&token::OpenDelim(token::Brace), unprefixed) || - self.is_import_coupler_inner(&token::BinOp(token::Star), unprefixed) - } - - fn is_import_coupler_inner(&mut self, token: &token::Token, unprefixed: bool) -> bool { - if self.check(&token::ModSep) { - self.look_ahead(1, |t| t == token) - } else if unprefixed { - self.check(token) - } else { - false - } + /// `::{` or `::*` + fn is_import_coupler(&mut self) -> bool { + self.check(&token::ModSep) && + self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) || + *t == token::BinOp(token::Star)) } /// Parse UseTree /// - /// USE_TREE = `*` | - /// `{` USE_TREE_LIST `}` | + /// USE_TREE = [`::`] `*` | + /// [`::`] `{` USE_TREE_LIST `}` | /// PATH `::` `*` | /// PATH `::` `{` USE_TREE_LIST `}` | /// PATH [`as` IDENT] - fn parse_use_tree(&mut self, nested: bool) -> PResult<'a, UseTree> { + fn parse_use_tree(&mut self) -> PResult<'a, UseTree> { let lo = self.span; - let mut prefix = ast::Path { - segments: vec![], - span: lo.to(self.span), - }; - - let kind = if self.is_import_coupler(true) { - // `use *;` or `use ::*;` or `use {...};` `use ::{...};` - - // Remove the first `::` + let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() }; + let kind = if self.check(&token::OpenDelim(token::Brace)) || + self.check(&token::BinOp(token::Star)) || + self.is_import_coupler() { + // `use *;` or `use ::*;` or `use {...};` or `use ::{...};` if self.eat(&token::ModSep) { - prefix.segments.push(PathSegment::crate_root(self.prev_span)); - } else if !nested { - prefix.segments.push(PathSegment::crate_root(self.span)); + prefix.segments.push(PathSegment::crate_root(lo.shrink_to_lo())); } if self.eat(&token::BinOp(token::Star)) { - // `use *;` UseTreeKind::Glob - } else if self.check(&token::OpenDelim(token::Brace)) { - // `use {...};` - UseTreeKind::Nested(self.parse_use_tree_list()?) } else { - return self.unexpected(); + UseTreeKind::Nested(self.parse_use_tree_list()?) } } else { - // `use path::...;` - let mut parsed = self.parse_path(PathStyle::Mod)?; - if !nested { - parsed = parsed.default_to_global(); - } - - prefix.segments.append(&mut parsed.segments); - prefix.span = prefix.span.to(parsed.span); + // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;` + prefix = self.parse_path(PathStyle::Mod)?; if self.eat(&token::ModSep) { if self.eat(&token::BinOp(token::Star)) { - // `use path::*;` UseTreeKind::Glob - } else if self.check(&token::OpenDelim(token::Brace)) { - // `use path::{...};` - UseTreeKind::Nested(self.parse_use_tree_list()?) } else { - return self.unexpected(); + UseTreeKind::Nested(self.parse_use_tree_list()?) } } else { - // `use path::foo;` or `use path::foo as bar;` - let rename = self.parse_rename()?. - unwrap_or(prefix.segments.last().unwrap().identifier); - UseTreeKind::Simple(rename) + UseTreeKind::Simple(self.parse_rename()?) } }; - Ok(UseTree { - span: lo.to(self.prev_span), - kind, - prefix, - }) + Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) }) } /// Parse UseTreeKind::Nested(list) @@ -7031,13 +7004,19 @@ impl<'a> Parser<'a> { self.parse_unspanned_seq(&token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), SeqSep::trailing_allowed(token::Comma), |this| { - Ok((this.parse_use_tree(true)?, ast::DUMMY_NODE_ID)) + Ok((this.parse_use_tree()?, ast::DUMMY_NODE_ID)) }) } fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> { if self.eat_keyword(keywords::As) { - self.parse_ident().map(Some) + match self.token { + token::Ident(ident, false) if ident.name == keywords::Underscore.name() => { + self.bump(); // `_` + Ok(Some(Ident { name: ident.name.gensymed(), ..ident })) + } + _ => self.parse_ident().map(Some), + } } else { Ok(None) } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 097a2eb89fd..e2dfca5d10a 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -91,8 +91,8 @@ impl Lit { } } -fn ident_can_begin_expr(ident: ast::Ident) -> bool { - let ident_token: Token = Ident(ident); +fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { + let ident_token: Token = Ident(ident, is_raw); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -116,12 +116,13 @@ fn ident_can_begin_expr(ident: ast::Ident) -> bool { ].contains(&ident.name) } -fn ident_can_begin_type(ident: ast::Ident) -> bool { - let ident_token: Token = Ident(ident); +fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { + let ident_token: Token = Ident(ident, is_raw); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || [ + keywords::Underscore.name(), keywords::For.name(), keywords::Impl.name(), keywords::Fn.name(), @@ -131,6 +132,44 @@ fn ident_can_begin_type(ident: ast::Ident) -> bool { ].contains(&ident.name) } +pub fn is_path_segment_keyword(id: ast::Ident) -> bool { + id.name == keywords::Super.name() || + id.name == keywords::SelfValue.name() || + id.name == keywords::SelfType.name() || + id.name == keywords::Extern.name() || + id.name == keywords::Crate.name() || + id.name == keywords::CrateRoot.name() || + id.name == keywords::DollarCrate.name() +} + +// We see this identifier in a normal identifier position, like variable name or a type. +// How was it written originally? Did it use the raw form? Let's try to guess. +pub fn is_raw_guess(ident: ast::Ident) -> bool { + ident.name != keywords::Invalid.name() && + is_reserved_ident(ident) && !is_path_segment_keyword(ident) +} + +// Returns true for reserved identifiers used internally for elided lifetimes, +// unnamed method parameters, crate root module, error recovery etc. +pub fn is_special_ident(id: ast::Ident) -> bool { + id.name <= keywords::Underscore.name() +} + +/// Returns `true` if the token is a keyword used in the language. +pub fn is_used_keyword(id: ast::Ident) -> bool { + id.name >= keywords::As.name() && id.name <= keywords::While.name() +} + +/// Returns `true` if the token is a keyword reserved for possible future use. +pub fn is_unused_keyword(id: ast::Ident) -> bool { + id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name() +} + +/// Returns `true` if the token is either a special identifier or a keyword. +pub fn is_reserved_ident(id: ast::Ident) -> bool { + is_special_ident(id) || is_used_keyword(id) || is_unused_keyword(id) +} + #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)] pub enum Token { /* Expression-operator symbols. */ @@ -174,8 +213,7 @@ pub enum Token { Literal(Lit, Option<ast::Name>), /* Name components */ - Ident(ast::Ident), - Underscore, + Ident(ast::Ident, /* is_raw */ bool), Lifetime(ast::Ident), // The `LazyTokenStream` is a pure function of the `Nonterminal`, @@ -203,6 +241,11 @@ impl Token { Token::Interpolated(Lrc::new((nt, LazyTokenStream::new()))) } + /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary. + pub fn from_ast_ident(ident: ast::Ident) -> Token { + Ident(ident, is_raw_guess(ident)) + } + /// Returns `true` if the token starts with '>'. pub fn is_like_gt(&self) -> bool { match *self { @@ -214,7 +257,8 @@ impl Token { /// Returns `true` if the token can appear at the start of an expression. pub fn can_begin_expr(&self) -> bool { match *self { - Ident(ident) => ident_can_begin_expr(ident), // value name or keyword + Ident(ident, is_raw) => + ident_can_begin_expr(ident, is_raw), // value name or keyword OpenDelim(..) | // tuple, array or block Literal(..) | // literal Not | // operator not @@ -239,10 +283,10 @@ impl Token { /// Returns `true` if the token can appear at the start of a type. pub fn can_begin_type(&self) -> bool { match *self { - Ident(ident) => ident_can_begin_type(ident), // type name or keyword + Ident(ident, is_raw) => + ident_can_begin_type(ident, is_raw), // type name or keyword OpenDelim(Paren) | // tuple OpenDelim(Bracket) | // array - Underscore | // placeholder Not | // never BinOp(Star) | // raw pointer BinOp(And) | // reference @@ -273,11 +317,11 @@ impl Token { } } - pub fn ident(&self) -> Option<ast::Ident> { + pub fn ident(&self) -> Option<(ast::Ident, bool)> { match *self { - Ident(ident) => Some(ident), + Ident(ident, is_raw) => Some((ident, is_raw)), Interpolated(ref nt) => match nt.0 { - NtIdent(ident) => Some(ident.node), + NtIdent(ident, is_raw) => Some((ident.node, is_raw)), _ => None, }, _ => None, @@ -352,18 +396,13 @@ impl Token { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { - self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false) + self.ident().map(|(ident, is_raw)| ident.name == kw.name() && !is_raw).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { match self.ident() { - Some(id) => id.name == keywords::Super.name() || - id.name == keywords::SelfValue.name() || - id.name == keywords::SelfType.name() || - id.name == keywords::Extern.name() || - id.name == keywords::Crate.name() || - id.name == keywords::DollarCrate.name(), - None => false, + Some((id, false)) => is_path_segment_keyword(id), + _ => false, } } @@ -371,7 +410,7 @@ impl Token { // unnamed method parameters, crate root module, error recovery etc. pub fn is_special_ident(&self) -> bool { match self.ident() { - Some(id) => id.name <= keywords::DollarCrate.name(), + Some((id, false)) => is_special_ident(id), _ => false, } } @@ -379,7 +418,7 @@ impl Token { /// Returns `true` if the token is a keyword used in the language. pub fn is_used_keyword(&self) -> bool { match self.ident() { - Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(), + Some((id, false)) => is_used_keyword(id), _ => false, } } @@ -387,7 +426,7 @@ impl Token { /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_unused_keyword(&self) -> bool { match self.ident() { - Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(), + Some((id, false)) => is_unused_keyword(id), _ => false, } } @@ -441,7 +480,7 @@ impl Token { Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | DotEq | DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | - Question | OpenDelim(..) | CloseDelim(..) | Underscore => return None, + Question | OpenDelim(..) | CloseDelim(..) => return None, Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) | Whitespace | Comment | Shebang(..) | Eof => return None, @@ -460,7 +499,10 @@ impl Token { /// Returns `true` if the token is either a special identifier or a keyword. pub fn is_reserved_ident(&self) -> bool { - self.is_special_ident() || self.is_used_keyword() || self.is_unused_keyword() + match self.ident() { + Some((id, false)) => is_reserved_ident(id), + _ => false, + } } pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span) @@ -496,8 +538,8 @@ impl Token { Nonterminal::NtImplItem(ref item) => { tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span); } - Nonterminal::NtIdent(ident) => { - let token = Token::Ident(ident.node); + Nonterminal::NtIdent(ident, is_raw) => { + let token = Token::Ident(ident.node, is_raw); tokens = Some(TokenTree::Token(ident.span, token).into()); } Nonterminal::NtLifetime(lifetime) => { @@ -529,7 +571,7 @@ pub enum Nonterminal { NtPat(P<ast::Pat>), NtExpr(P<ast::Expr>), NtTy(P<ast::Ty>), - NtIdent(ast::SpannedIdent), + NtIdent(ast::SpannedIdent, /* is_raw */ bool), /// Stuff inside brackets for attributes NtMeta(ast::MetaItem), NtPath(ast::Path), @@ -573,7 +615,7 @@ impl fmt::Debug for Nonterminal { pub fn is_op(tok: &Token) -> bool { match *tok { OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | - Ident(..) | Underscore | Lifetime(..) | Interpolated(..) | + Ident(..) | Lifetime(..) | Interpolated(..) | Whitespace | Comment | Shebang(..) | Eof => false, _ => true, } |
