diff options
| author | bors <bors@rust-lang.org> | 2016-04-24 13:47:22 -0700 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2016-04-24 13:47:22 -0700 |
| commit | 19304837c86cc406ee042c99e12fa34debae4e8a (patch) | |
| tree | 180cf9b6c1b7e850bf19c7101e112729ad381b6e /src/libsyntax/parse | |
| parent | 91aea5cf87953788477ccaa3a37c3f2c855e7a0a (diff) | |
| parent | a31658de51444d1b5193ac203a1bd7ace5621f93 (diff) | |
| download | rust-19304837c86cc406ee042c99e12fa34debae4e8a.tar.gz rust-19304837c86cc406ee042c99e12fa34debae4e8a.zip | |
Auto merge of #33179 - Manishearth:breaking-batch, r=Manishearth
Batch up breaking libsyntax changes Contains: - #33125 - #33041 - #33157 cc https://github.com/rust-lang/rust/issues/31645
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 49 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 36 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 349 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 358 |
4 files changed, 298 insertions, 494 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a5cb5c7117e..2eda13adcb5 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -13,8 +13,7 @@ use codemap::{BytePos, CharPos, CodeMap, Pos, Span}; use codemap; use errors::{FatalError, Handler, DiagnosticBuilder}; use ext::tt::transcribe::tt_next_token; -use parse::token::str_to_ident; -use parse::token; +use parse::token::{self, keywords, str_to_ident}; use str::char_at; use rustc_unicode::property::Pattern_White_Space; @@ -1039,11 +1038,7 @@ impl<'a> StringReader<'a> { token::Underscore } else { // FIXME: perform NFKC normalization here. (Issue #2253) - if self.curr_is(':') && self.nextch_is(':') { - token::Ident(str_to_ident(string), token::ModName) - } else { - token::Ident(str_to_ident(string), token::Plain) - } + token::Ident(str_to_ident(string)) } }); } @@ -1231,17 +1226,11 @@ impl<'a> StringReader<'a> { let keyword_checking_ident = self.with_str_from(start, |lifetime_name| { str_to_ident(lifetime_name) }); - let keyword_checking_token = &token::Ident(keyword_checking_ident, - token::Plain); + let keyword_checking_token = &token::Ident(keyword_checking_ident); let last_bpos = self.last_pos; - if keyword_checking_token.is_keyword(token::keywords::SelfValue) { - self.err_span_(start, - last_bpos, - "invalid lifetime name: 'self is no longer a special \ - lifetime"); - } else if keyword_checking_token.is_any_keyword() && - !keyword_checking_token.is_keyword(token::keywords::Static) { - self.err_span_(start, last_bpos, "invalid lifetime name"); + if keyword_checking_token.is_any_keyword() && + !keyword_checking_token.is_keyword(keywords::Static) { + self.err_span_(start, last_bpos, "lifetimes cannot use keyword names"); } return token::Lifetime(ident); @@ -1687,7 +1676,7 @@ mod tests { assert_eq!(string_reader.next_token().tok, token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = TokenAndSpan { - tok: token::Ident(id, token::Plain), + tok: token::Ident(id), sp: Span { lo: BytePos(21), hi: BytePos(23), @@ -1701,7 +1690,7 @@ mod tests { // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan { - tok: token::Ident(str_to_ident("main"), token::Plain), + tok: token::Ident(str_to_ident("main")), sp: Span { lo: BytePos(24), hi: BytePos(28), @@ -1722,8 +1711,8 @@ mod tests { } // make the identifier by looking up the string in the interner - fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token { - token::Ident(str_to_ident(id), style) + fn mk_ident(id: &str) -> token::Token { + token::Ident(str_to_ident(id)) } #[test] @@ -1731,9 +1720,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a b".to_string()), - vec![mk_ident("a", token::Plain), - token::Whitespace, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); } #[test] @@ -1741,9 +1728,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a::b".to_string()), - vec![mk_ident("a", token::ModName), - token::ModSep, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::ModSep, mk_ident("b")]); } #[test] @@ -1751,10 +1736,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a ::b".to_string()), - vec![mk_ident("a", token::Plain), - token::Whitespace, - token::ModSep, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); } #[test] @@ -1762,10 +1744,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a:: b".to_string()), - vec![mk_ident("a", token::ModName), - token::ModSep, - token::Whitespace, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); } #[test] diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 29b1d5b9aff..c2050d2a8f4 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -734,9 +734,9 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(name_zip))), Some(&TokenTree::Delimited(_, ref macro_delimed)), ) if name_macro_rules.name.as_str() == "macro_rules" @@ -755,7 +755,7 @@ mod tests { ( 2, Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(ident))), ) if first_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -766,7 +766,7 @@ mod tests { ( 2, Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(ident))), ) if second_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -785,26 +785,17 @@ mod tests { let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); let expected = vec![ - TokenTree::Token(sp(0, 2), - token::Ident(str_to_ident("fn"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(3, 4), - token::Ident(str_to_ident("a"), - token::IdentStyle::Plain)), + TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))), + TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))), TokenTree::Delimited( sp(5, 14), Rc::new(ast::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ - TokenTree::Token(sp(6, 7), - token::Ident(str_to_ident("b"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(8, 9), - token::Colon), - TokenTree::Token(sp(10, 13), - token::Ident(str_to_ident("i32"), - token::IdentStyle::Plain)), + TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(8, 9), token::Colon), + TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))), ], close_span: sp(13, 14), })), @@ -814,11 +805,8 @@ mod tests { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ - TokenTree::Token(sp(17, 18), - token::Ident(str_to_ident("b"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(18, 19), - token::Semi) + TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(18, 19), token::Semi), ], close_span: sp(20, 21), })) @@ -937,7 +925,7 @@ mod tests { Abi::Rust, ast::Generics{ // no idea on either of these: lifetimes: Vec::new(), - ty_params: P::empty(), + ty_params: P::new(), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index f3d3bbd9f99..8722fe9d79d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use self::PathParsingMode::*; - use abi::{self, Abi}; use ast::BareFnTy; use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; @@ -51,7 +49,7 @@ use parse::common::SeqSep; use parse::lexer::{Reader, TokenAndSpan}; use parse::obsolete::{ParserObsoleteMethods, ObsoleteSyntax}; use parse::token::{self, intern, MatchNt, SubstNt, SpecialVarNt, InternedString}; -use parse::token::{keywords, special_idents, SpecialMacroVar}; +use parse::token::{keywords, SpecialMacroVar}; use parse::{new_sub_parser_from_file, ParseSess}; use util::parser::{AssocOp, Fixity}; use print::pprust; @@ -69,26 +67,24 @@ bitflags! { const RESTRICTION_STMT_EXPR = 1 << 0, const RESTRICTION_NO_STRUCT_LITERAL = 1 << 1, const NO_NONINLINE_MOD = 1 << 2, - const ALLOW_MODULE_PATHS = 1 << 3, } } type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute> >); -/// How to parse a path. There are four different kinds of paths, all of which +/// How to parse a path. There are three different kinds of paths, all of which /// are parsed somewhat differently. #[derive(Copy, Clone, PartialEq)] -pub enum PathParsingMode { - /// A path with no type parameters; e.g. `foo::bar::Baz` - NoTypesAllowed, - /// Same as `NoTypesAllowed`, but may end with `::{` or `::*`, which are left unparsed - ImportPrefix, +pub enum PathStyle { + /// A path with no type parameters, e.g. `foo::bar::Baz`, used in imports or visibilities. + Mod, /// A path with a lifetime and type parameters, with no double colons - /// before the type parameters; e.g. `foo::bar<'a>::Baz<T>` - LifetimeAndTypesWithoutColons, + /// before the type parameters; e.g. `foo::bar<'a>::Baz<T>`, used in types. + /// Paths using this style can be passed into macros expecting `path` nonterminals. + Type, /// A path with a lifetime and type parameters with double colons before - /// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>` - LifetimeAndTypesWithColons, + /// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>`, used in expressions or patterns. + Expr, } /// How to parse a bound, whether to allow bound modifiers such as `?`. @@ -292,13 +288,13 @@ impl TokenType { match *self { TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)), TokenType::Operator => "an operator".to_string(), - TokenType::Keyword(kw) => format!("`{}`", kw.to_name()), + TokenType::Keyword(kw) => format!("`{}`", kw.name()), } } } -fn is_plain_ident_or_underscore(t: &token::Token) -> bool { - t.is_plain_ident() || *t == token::Underscore +fn is_ident_or_underscore(t: &token::Token) -> bool { + t.is_ident() || *t == token::Underscore } /// Information about the path to a module. @@ -398,6 +394,17 @@ impl<'a> Parser<'a> { Parser::token_to_string(&self.token) } + pub fn this_token_descr(&self) -> String { + let s = self.this_token_to_string(); + if self.token.is_strict_keyword() { + format!("keyword `{}`", s) + } else if self.token.is_reserved_keyword() { + format!("reserved keyword `{}`", s) + } else { + format!("`{}`", s) + } + } + pub fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; @@ -562,12 +569,10 @@ impl<'a> Parser<'a> { } pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { - if !self.restrictions.contains(Restrictions::ALLOW_MODULE_PATHS) { - self.check_strict_keywords(); - } + self.check_strict_keywords(); self.check_reserved_keywords(); match self.token { - token::Ident(i, _) => { + token::Ident(i) => { self.bump(); Ok(i) } @@ -585,12 +590,9 @@ impl<'a> Parser<'a> { } } - pub fn parse_ident_or_self_type(&mut self) -> PResult<'a, ast::Ident> { - if self.is_self_type_ident() { - self.expect_self_type_ident() - } else { - self.parse_ident() - } + fn parse_ident_into_path(&mut self) -> PResult<'a, ast::Path> { + let ident = self.parse_ident()?; + Ok(ast::Path::from_ident(self.last_span, ident)) } /// Check if the next token is `tok`, and return `true` if so. @@ -637,9 +639,8 @@ impl<'a> Parser<'a> { } pub fn check_contextual_keyword(&mut self, ident: Ident) -> bool { - let tok = token::Ident(ident, token::Plain); - self.expected_tokens.push(TokenType::Token(tok)); - if let token::Ident(ref cur_ident, _) = self.token { + self.expected_tokens.push(TokenType::Token(token::Ident(ident))); + if let token::Ident(ref cur_ident) = self.token { cur_ident.name == ident.name } else { false @@ -1159,7 +1160,7 @@ impl<'a> Parser<'a> { let other_bounds = if self.eat(&token::BinOp(token::Plus)) { self.parse_ty_param_bounds(BoundParsingMode::Bare)? } else { - P::empty() + P::new() }; let all_bounds = Some(TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None)).into_iter() @@ -1170,7 +1171,7 @@ impl<'a> Parser<'a> { } pub fn parse_ty_path(&mut self) -> PResult<'a, TyKind> { - Ok(TyKind::Path(None, self.parse_path(LifetimeAndTypesWithoutColons)?)) + Ok(TyKind::Path(None, self.parse_path(PathStyle::Type)?)) } /// parse a TyKind::BareFn type: @@ -1473,13 +1474,11 @@ impl<'a> Parser<'a> { } else if self.eat_lt() { let (qself, path) = - self.parse_qualified_path(NoTypesAllowed)?; + self.parse_qualified_path(PathStyle::Type)?; TyKind::Path(Some(qself), path) - } else if self.check(&token::ModSep) || - self.token.is_ident() || - self.token.is_path() { - let path = self.parse_path(LifetimeAndTypesWithoutColons)?; + } else if self.token.is_path_start() { + let path = self.parse_path(PathStyle::Type)?; if self.check(&token::Not) { // MACRO INVOCATION self.bump(); @@ -1497,9 +1496,8 @@ impl<'a> Parser<'a> { // TYPE TO BE INFERRED TyKind::Infer } else { - let this_token_str = self.this_token_to_string(); - let msg = format!("expected type, found `{}`", this_token_str); - return Err(self.fatal(&msg[..])); + let msg = format!("expected type, found {}", self.this_token_descr()); + return Err(self.fatal(&msg)); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1541,10 +1539,10 @@ impl<'a> Parser<'a> { debug!("parser is_named_argument offset:{}", offset); if offset == 0 { - is_plain_ident_or_underscore(&self.token) + is_ident_or_underscore(&self.token) && self.look_ahead(1, |t| *t == token::Colon) } else { - self.look_ahead(offset, |t| is_plain_ident_or_underscore(t)) + self.look_ahead(offset, |t| is_ident_or_underscore(t)) && self.look_ahead(offset + 1, |t| *t == token::Colon) } } @@ -1564,7 +1562,7 @@ impl<'a> Parser<'a> { } else { debug!("parse_arg_general ident_to_pat"); let sp = self.last_span; - let spanned = Spanned { span: sp, node: special_idents::invalid }; + let spanned = Spanned { span: sp, node: keywords::Invalid.ident() }; P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), @@ -1616,12 +1614,12 @@ impl<'a> Parser<'a> { } /// Matches token_lit = LIT_INTEGER | ... - pub fn lit_from_token(&self, tok: &token::Token) -> PResult<'a, LitKind> { - match *tok { + pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { + let out = match self.token { token::Interpolated(token::NtExpr(ref v)) => { match v.node { - ExprKind::Lit(ref lit) => { Ok(lit.node.clone()) } - _ => { return self.unexpected_last(tok); } + ExprKind::Lit(ref lit) => { lit.node.clone() } + _ => { return self.unexpected_last(&self.token); } } } token::Literal(lit, suf) => { @@ -1636,13 +1634,13 @@ impl<'a> Parser<'a> { (false, parse::integer_lit(&s.as_str(), suf.as_ref().map(|s| s.as_str()), &self.sess.span_diagnostic, - self.last_span)) + self.span)) } token::Float(s) => { (false, parse::float_lit(&s.as_str(), suf.as_ref().map(|s| s.as_str()), &self.sess.span_diagnostic, - self.last_span)) + self.span)) } token::Str_(s) => { @@ -1664,14 +1662,17 @@ impl<'a> Parser<'a> { }; if suffix_illegal { - let sp = self.last_span; + let sp = self.span; self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf) } - Ok(out) + out } - _ => { return self.unexpected_last(tok); } - } + _ => { return self.unexpected_last(&self.token); } + }; + + self.bump(); + Ok(out) } /// Matches lit = true | false | token_lit @@ -1682,8 +1683,7 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(keywords::False) { LitKind::Bool(false) } else { - let token = self.bump_and_get(); - let lit = self.lit_from_token(&token)?; + let lit = self.parse_lit_token()?; lit }; Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }) @@ -1707,6 +1707,16 @@ impl<'a> Parser<'a> { } } + pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { + match self.token { + token::Ident(sid) if self.token.is_path_segment_keyword() => { + self.bump(); + Ok(sid) + } + _ => self.parse_ident(), + } + } + /// Parses qualified path. /// /// Assumes that the leading `<` has been parsed already. @@ -1722,12 +1732,12 @@ impl<'a> Parser<'a> { /// /// `<T as U>::a` /// `<T as U>::F::a::<S>` - pub fn parse_qualified_path(&mut self, mode: PathParsingMode) + pub fn parse_qualified_path(&mut self, mode: PathStyle) -> PResult<'a, (QSelf, ast::Path)> { let span = self.last_span; let self_type = self.parse_ty_sum()?; let mut path = if self.eat_keyword(keywords::As) { - self.parse_path(LifetimeAndTypesWithoutColons)? + self.parse_path(PathStyle::Type)? } else { ast::Path { span: span, @@ -1745,14 +1755,14 @@ impl<'a> Parser<'a> { self.expect(&token::ModSep)?; let segments = match mode { - LifetimeAndTypesWithoutColons => { + PathStyle::Type => { self.parse_path_segments_without_colons()? } - LifetimeAndTypesWithColons => { + PathStyle::Expr => { self.parse_path_segments_with_colons()? } - NoTypesAllowed | ImportPrefix => { - self.parse_path_segments_without_types(mode == ImportPrefix)? + PathStyle::Mod => { + self.parse_path_segments_without_types()? } }; path.segments.extend(segments); @@ -1766,7 +1776,7 @@ impl<'a> Parser<'a> { /// mode. The `mode` parameter determines whether lifetimes, types, and/or /// bounds are permitted and whether `::` must precede type parameter /// groups. - pub fn parse_path(&mut self, mode: PathParsingMode) -> PResult<'a, ast::Path> { + pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { // Check for a whole path... let found = match self.token { token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()), @@ -1783,14 +1793,14 @@ impl<'a> Parser<'a> { // identifier followed by an optional lifetime and a set of types. // A bound set is a set of type parameter bounds. let segments = match mode { - LifetimeAndTypesWithoutColons => { + PathStyle::Type => { self.parse_path_segments_without_colons()? } - LifetimeAndTypesWithColons => { + PathStyle::Expr => { self.parse_path_segments_with_colons()? } - NoTypesAllowed | ImportPrefix => { - self.parse_path_segments_without_types(mode == ImportPrefix)? + PathStyle::Mod => { + self.parse_path_segments_without_types()? } }; @@ -1813,7 +1823,7 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. - let identifier = self.parse_ident_or_self_type()?; + let identifier = self.parse_path_segment_ident()?; // Parse types, optionally. let parameters = if self.eat_lt() { @@ -1866,7 +1876,7 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. - let identifier = self.parse_ident_or_self_type()?; + let identifier = self.parse_path_segment_ident()?; // If we do not see a `::`, stop. if !self.eat(&token::ModSep) { @@ -1905,15 +1915,14 @@ impl<'a> Parser<'a> { } } - /// Examples: /// - `a::b::c` - pub fn parse_path_segments_without_types(&mut self, import_prefix: bool) + pub fn parse_path_segments_without_types(&mut self) -> PResult<'a, Vec<ast::PathSegment>> { let mut segments = Vec::new(); loop { // First, parse an identifier. - let identifier = self.parse_ident_or_self_type()?; + let identifier = self.parse_path_segment_ident()?; // Assemble and push the result. segments.push(ast::PathSegment { @@ -1922,7 +1931,7 @@ impl<'a> Parser<'a> { }); // If we do not see a `::` or see `::{`/`::*`, stop. - if !self.check(&token::ModSep) || import_prefix && self.is_import_coupler() { + if !self.check(&token::ModSep) || self.is_import_coupler() { return Ok(segments); } else { self.bump(); @@ -2212,15 +2221,6 @@ impl<'a> Parser<'a> { let lo = self.span.lo; return self.parse_lambda_expr(lo, CaptureBy::Ref, attrs); }, - token::Ident(id @ ast::Ident { - name: token::SELF_KEYWORD_NAME, - ctxt: _ - }, token::Plain) => { - self.bump(); - let path = ast::Path::from_ident(mk_sp(lo, hi), id); - ex = ExprKind::Path(None, path); - hi = self.last_span.hi; - } token::OpenDelim(token::Bracket) => { self.bump(); @@ -2263,7 +2263,7 @@ impl<'a> Parser<'a> { _ => { if self.eat_lt() { let (qself, path) = - self.parse_qualified_path(LifetimeAndTypesWithColons)?; + self.parse_qualified_path(PathStyle::Expr)?; hi = path.span.hi; return Ok(self.mk_expr(lo, hi, ExprKind::Path(Some(qself), path), attrs)); } @@ -2350,12 +2350,8 @@ impl<'a> Parser<'a> { let mut db = self.fatal("expected expression, found statement (`let`)"); db.note("variable declaration using `let` is a statement"); return Err(db); - } else if self.check(&token::ModSep) || - self.token.is_ident() && - !self.check_keyword(keywords::True) && - !self.check_keyword(keywords::False) { - let pth = - self.parse_path(LifetimeAndTypesWithColons)?; + } else if self.token.is_path_start() { + let pth = self.parse_path(PathStyle::Expr)?; // `!`, as an operator, is prefix, so we know this isn't that if self.check(&token::Not) { @@ -2435,10 +2431,18 @@ impl<'a> Parser<'a> { hi = pth.span.hi; ex = ExprKind::Path(None, pth); } else { - // other literal expression - let lit = self.parse_lit()?; - hi = lit.span.hi; - ex = ExprKind::Lit(P(lit)); + match self.parse_lit() { + Ok(lit) => { + hi = lit.span.hi; + ex = ExprKind::Lit(P(lit)); + } + Err(mut err) => { + err.cancel(); + let msg = format!("expected expression, found {}", + self.this_token_descr()); + return Err(self.fatal(&msg)); + } + } } } } @@ -2577,7 +2581,7 @@ impl<'a> Parser<'a> { // expr.f if self.eat(&token::Dot) { match self.token { - token::Ident(i, _) => { + token::Ident(i) => { let dot_pos = self.last_span.hi; hi = self.span.hi; self.bump(); @@ -2632,7 +2636,7 @@ impl<'a> Parser<'a> { self.span_err(self.span, &format!("unexpected token: `{}`", actual)); let dot_pos = self.last_span.hi; - e = self.parse_dot_suffix(special_idents::invalid, + e = self.parse_dot_suffix(keywords::Invalid.ident(), mk_sp(dot_pos, dot_pos), e, lo)?; } @@ -2674,7 +2678,7 @@ impl<'a> Parser<'a> { // Parse unquoted tokens after a `$` in a token tree fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { let mut sp = self.span; - let (name, namep) = match self.token { + let name = match self.token { token::Dollar => { self.bump(); @@ -2694,40 +2698,36 @@ impl<'a> Parser<'a> { op: repeat, num_captures: name_num }))); - } else if self.token.is_keyword_allow_following_colon(keywords::Crate) { + } else if self.token.is_keyword(keywords::Crate) { self.bump(); return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); } else { sp = mk_sp(sp.lo, self.span.hi); - let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain }; - let name = self.parse_ident()?; - (name, namep) + self.parse_ident()? } } - token::SubstNt(name, namep) => { + token::SubstNt(name) => { self.bump(); - (name, namep) + name } _ => unreachable!() }; // continue by trying to parse the `:ident` after `$name` - if self.token == token::Colon && self.look_ahead(1, |t| t.is_ident() && - !t.is_strict_keyword() && - !t.is_reserved_keyword()) { + if self.token == token::Colon && + self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) { self.bump(); sp = mk_sp(sp.lo, self.span.hi); - let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain }; let nt_kind = self.parse_ident()?; - Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp))) + Ok(TokenTree::Token(sp, MatchNt(name, nt_kind))) } else { - Ok(TokenTree::Token(sp, SubstNt(name, namep))) + Ok(TokenTree::Token(sp, SubstNt(name))) } } pub fn check_unknown_macro_variable(&mut self) { if self.quote_depth == 0 { match self.token { - token::SubstNt(name, _) => + token::SubstNt(name) => self.fatal(&format!("unknown macro variable `{}`", name)).emit(), _ => {} } @@ -3225,13 +3225,15 @@ impl<'a> Parser<'a> { Ok(self.mk_expr(lo, hi, ExprKind::IfLet(pat, expr, thn, els), attrs)) } - // `|args| expr` - pub fn parse_lambda_expr(&mut self, lo: BytePos, + // `move |args| expr` + pub fn parse_lambda_expr(&mut self, + lo: BytePos, capture_clause: CaptureBy, attrs: ThinAttributes) -> PResult<'a, P<Expr>> { let decl = self.parse_fn_block_decl()?; + let decl_hi = self.last_span.hi; let body = match decl.output { FunctionRetTy::Default(_) => { // If no explicit return type is given, parse any @@ -3255,7 +3257,8 @@ impl<'a> Parser<'a> { Ok(self.mk_expr( lo, body.span.hi, - ExprKind::Closure(capture_clause, decl, body), attrs)) + ExprKind::Closure(capture_clause, decl, body, mk_sp(lo, decl_hi)), + attrs)) } // `else` token already eaten @@ -3587,16 +3590,16 @@ impl<'a> Parser<'a> { } fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> { - if self.is_path_start() { + if self.token.is_path_start() { let lo = self.span.lo; let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = - self.parse_qualified_path(NoTypesAllowed)?; + self.parse_qualified_path(PathStyle::Expr)?; (Some(qself), path) } else { // Parse an unqualified path - (None, self.parse_path(LifetimeAndTypesWithColons)?) + (None, self.parse_path(PathStyle::Expr)?) }; let hi = self.last_span.hi; Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), None)) @@ -3605,12 +3608,6 @@ impl<'a> Parser<'a> { } } - fn is_path_start(&self) -> bool { - (self.token == token::Lt || self.token == token::ModSep - || self.token.is_ident() || self.token.is_path()) - && !self.token.is_keyword(keywords::True) && !self.token.is_keyword(keywords::False) - } - /// Parse a pattern. pub fn parse_pat(&mut self) -> PResult<'a, P<Pat>> { maybe_whole!(self, NtPat); @@ -3661,19 +3658,16 @@ impl<'a> Parser<'a> { // Parse box pat let subpat = self.parse_pat()?; pat = PatKind::Box(subpat); - } else if self.is_path_start() { + } else if self.token.is_path_start() { // Parse pattern starting with a path - if self.token.is_plain_ident() && self.look_ahead(1, |t| *t != token::DotDotDot && + if self.token.is_ident() && self.look_ahead(1, |t| *t != token::DotDotDot && *t != token::OpenDelim(token::Brace) && *t != token::OpenDelim(token::Paren) && - // Contrary to its definition, a plain ident can be followed by :: in macros *t != token::ModSep) { // Plain idents have some extra abilities here compared to general paths if self.look_ahead(1, |t| *t == token::Not) { // Parse macro invocation - let ident = self.parse_ident()?; - let ident_span = self.last_span; - let path = ast::Path::from_ident(ident_span, ident); + let path = self.parse_ident_into_path()?; self.bump(); let delim = self.expect_open_delim()?; let tts = self.parse_seq_to_end( @@ -3693,11 +3687,11 @@ impl<'a> Parser<'a> { let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = - self.parse_qualified_path(NoTypesAllowed)?; + self.parse_qualified_path(PathStyle::Expr)?; (Some(qself), path) } else { // Parse an unqualified path - (None, self.parse_path(LifetimeAndTypesWithColons)?) + (None, self.parse_path(PathStyle::Expr)?) }; match self.token { token::DotDotDot => { @@ -3754,12 +3748,20 @@ impl<'a> Parser<'a> { } } else { // Try to parse everything else as literal with optional minus - let begin = self.parse_pat_literal_maybe_minus()?; - if self.eat(&token::DotDotDot) { - let end = self.parse_pat_range_end()?; - pat = PatKind::Range(begin, end); - } else { - pat = PatKind::Lit(begin); + match self.parse_pat_literal_maybe_minus() { + Ok(begin) => { + if self.eat(&token::DotDotDot) { + let end = self.parse_pat_range_end()?; + pat = PatKind::Range(begin, end); + } else { + pat = PatKind::Lit(begin); + } + } + Err(mut err) => { + err.cancel(); + let msg = format!("expected pattern, found {}", self.this_token_descr()); + return Err(self.fatal(&msg)); + } } } } @@ -3956,11 +3958,11 @@ impl<'a> Parser<'a> { // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... - let pth = self.parse_path(NoTypesAllowed)?; + let pth = self.parse_ident_into_path()?; self.bump(); let id = match self.token { - token::OpenDelim(_) => token::special_idents::invalid, // no special identifier + token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier _ => self.parse_ident()?, }; @@ -3972,7 +3974,7 @@ impl<'a> Parser<'a> { _ => { // we only expect an ident if we didn't parse one // above. - let ident_str = if id.name == token::special_idents::invalid.name { + let ident_str = if id.name == keywords::Invalid.name() { "identifier, " } else { "" @@ -3998,7 +4000,7 @@ impl<'a> Parser<'a> { MacStmtStyle::NoBraces }; - if id.name == token::special_idents::invalid.name { + if id.name == keywords::Invalid.name() { let mac = P(spanned(lo, hi, Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT })); let stmt = StmtKind::Mac(mac, style, attrs.into_thin_attrs()); spanned(lo, hi, stmt) @@ -4240,7 +4242,7 @@ impl<'a> Parser<'a> { -> PResult<'a, TyParamBounds> { if !self.eat(&token::Colon) { - Ok(P::empty()) + Ok(P::new()) } else { self.parse_ty_param_bounds(mode) } @@ -4626,17 +4628,12 @@ impl<'a> Parser<'a> { })) } - fn is_self_ident(&mut self) -> bool { - match self.token { - token::Ident(id, token::Plain) => id.name == special_idents::self_.name, - _ => false - } - } - fn expect_self_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token { - token::Ident(id, token::Plain) if id.name == special_idents::self_.name => { + token::Ident(id) if id.name == keywords::SelfValue.name() => { self.bump(); + // The hygiene context of `id` needs to be preserved here, + // so we can't just return `SelfValue.ident()`. Ok(id) }, _ => { @@ -4647,27 +4644,6 @@ impl<'a> Parser<'a> { } } - fn is_self_type_ident(&mut self) -> bool { - match self.token { - token::Ident(id, token::Plain) => id.name == special_idents::type_self.name, - _ => false - } - } - - fn expect_self_type_ident(&mut self) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(id, token::Plain) if id.name == special_idents::type_self.name => { - self.bump(); - Ok(id) - }, - _ => { - let token_str = self.this_token_to_string(); - Err(self.fatal(&format!("expected `Self`, found `{}`", - token_str))) - } - } - } - /// Parse the argument list and result type of a function /// that may have a self type. fn parse_fn_decl_with_self<F>(&mut self, @@ -4736,16 +4712,16 @@ impl<'a> Parser<'a> { } else { Mutability::Immutable }; - if self.is_self_ident() { + if self.token.is_keyword(keywords::SelfValue) { let span = self.span; self.span_err(span, "cannot pass self by raw pointer"); self.bump(); } // error case, making bogus self ident: - SelfKind::Value(special_idents::self_) + SelfKind::Value(keywords::SelfValue.ident()) } token::Ident(..) => { - if self.is_self_ident() { + if self.token.is_keyword(keywords::SelfValue) { let self_ident = self.expect_self_ident()?; // Determine whether this is the fully explicit form, `self: @@ -4969,7 +4945,7 @@ impl<'a> Parser<'a> { Visibility::Inherited => (), _ => { let is_macro_rules: bool = match self.token { - token::Ident(sid, _) => sid.name == intern("macro_rules"), + token::Ident(sid) => sid.name == intern("macro_rules"), _ => false, }; if is_macro_rules { @@ -5002,7 +4978,7 @@ impl<'a> Parser<'a> { self.complain_if_pub_macro(&vis, last_span); let lo = self.span.lo; - let pth = self.parse_path(NoTypesAllowed)?; + let pth = self.parse_ident_into_path()?; self.expect(&token::Not)?; // eat a matched-delimiter token tree: @@ -5017,7 +4993,7 @@ impl<'a> Parser<'a> { if delim != token::Brace { self.expect(&token::Semi)? } - Ok((token::special_idents::invalid, vec![], ast::ImplItemKind::Macro(m))) + Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(m))) } else { let (constness, unsafety, abi) = self.parse_fn_front_matter()?; let ident = self.parse_ident()?; @@ -5112,7 +5088,7 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Brace))?; self.expect(&token::CloseDelim(token::Brace))?; - Ok((special_idents::invalid, + Ok((keywords::Invalid.ident(), ItemKind::DefaultImpl(unsafety, opt_trait.unwrap()), None)) } else { if opt_trait.is_some() { @@ -5128,7 +5104,7 @@ impl<'a> Parser<'a> { impl_items.push(self.parse_impl_item()?); } - Ok((special_idents::invalid, + Ok((keywords::Invalid.ident(), ItemKind::Impl(unsafety, polarity, generics, opt_trait, ty, impl_items), Some(attrs))) } @@ -5137,7 +5113,7 @@ impl<'a> Parser<'a> { /// Parse a::B<String,i32> fn parse_trait_ref(&mut self) -> PResult<'a, TraitRef> { Ok(ast::TraitRef { - path: self.parse_path(LifetimeAndTypesWithoutColons)?, + path: self.parse_path(PathStyle::Type)?, ref_id: ast::DUMMY_NODE_ID, }) } @@ -5297,8 +5273,7 @@ impl<'a> Parser<'a> { self.expect(&token::CloseDelim(token::Paren))?; Ok(Visibility::Crate(span)) } else { - let path = self.with_res(Restrictions::ALLOW_MODULE_PATHS, - |this| this.parse_path(NoTypesAllowed))?; + let path = self.parse_path(PathStyle::Mod)?; self.expect(&token::CloseDelim(token::Paren))?; Ok(Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }) } @@ -5306,7 +5281,7 @@ impl<'a> Parser<'a> { /// Parse defaultness: DEFAULT or nothing fn parse_defaultness(&mut self) -> PResult<'a, Defaultness> { - if self.eat_contextual_keyword(special_idents::DEFAULT) { + if self.eat_contextual_keyword(keywords::Default.ident()) { Ok(Defaultness::Default) } else { Ok(Defaultness::Final) @@ -5634,7 +5609,7 @@ impl<'a> Parser<'a> { }; Ok(self.mk_item(lo, last_span.hi, - special_idents::invalid, + keywords::Invalid.ident(), ItemKind::ForeignMod(m), visibility, attrs)) @@ -5773,7 +5748,7 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, - token::special_idents::invalid, + keywords::Invalid.ident(), item_, visibility, attrs); @@ -6044,7 +6019,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, Option<P<Item>>> { if macros_allowed && !self.token.is_any_keyword() && self.look_ahead(1, |t| *t == token::Not) - && (self.look_ahead(2, |t| t.is_plain_ident()) + && (self.look_ahead(2, |t| t.is_ident()) || self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren)) || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) { // MACRO INVOCATION ITEM @@ -6055,16 +6030,16 @@ impl<'a> Parser<'a> { let mac_lo = self.span.lo; // item macro. - let pth = self.parse_path(NoTypesAllowed)?; + let pth = self.parse_ident_into_path()?; self.expect(&token::Not)?; // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax // and remove this. - let id = if self.token.is_plain_ident() { + let id = if self.token.is_ident() { self.parse_ident()? } else { - token::special_idents::invalid // no special identifier + keywords::Invalid.ident() // no special identifier }; // eat a matched-delimiter token tree: let delim = self.expect_open_delim()?; @@ -6161,7 +6136,7 @@ impl<'a> Parser<'a> { let items = self.parse_path_list_items()?; Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) } else { - let prefix = self.parse_path(ImportPrefix)?; + let prefix = self.parse_path(PathStyle::Mod)?; if self.is_import_coupler() { // `foo::bar::{a, b}` or `foo::bar::*` self.bump(); diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 16417ac0044..fcb6c3539db 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -11,7 +11,6 @@ pub use self::BinOpToken::*; pub use self::Nonterminal::*; pub use self::DelimToken::*; -pub use self::IdentStyle::*; pub use self::Lit::*; pub use self::Token::*; @@ -26,7 +25,6 @@ use std::fmt; use std::ops::Deref; use std::rc::Rc; -#[allow(non_camel_case_types)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum BinOpToken { Plus, @@ -53,13 +51,6 @@ pub enum DelimToken { } #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] -pub enum IdentStyle { - /// `::` follows the identifier with no whitespace in-between. - ModName, - Plain, -} - -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum SpecialMacroVar { /// `$crate` will be filled in with the name of the crate a macro was /// imported from, if any. @@ -99,7 +90,6 @@ impl Lit { } } -#[allow(non_camel_case_types)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)] pub enum Token { /* Expression-operator symbols. */ @@ -141,7 +131,7 @@ pub enum Token { Literal(Lit, Option<ast::Name>), /* Name components */ - Ident(ast::Ident, IdentStyle), + Ident(ast::Ident), Underscore, Lifetime(ast::Ident), @@ -151,11 +141,11 @@ pub enum Token { /// Doc comment DocComment(ast::Name), // In left-hand-sides of MBE macros: - /// Parse a nonterminal (name to bind, name of NT, styles of their idents) - MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle), + /// Parse a nonterminal (name to bind, name of NT) + MatchNt(ast::Ident, ast::Ident), // In right-hand-sides of MBE macros: /// A syntactic variable that will be filled in by macro expansion. - SubstNt(ast::Ident, IdentStyle), + SubstNt(ast::Ident), /// A macro variable with special meaning. SpecialVarNt(SpecialMacroVar), @@ -185,7 +175,7 @@ impl Token { pub fn can_begin_expr(&self) -> bool { match *self { OpenDelim(_) => true, - Ident(_, _) => true, + Ident(..) => true, Underscore => true, Tilde => true, Literal(_, _) => true, @@ -218,7 +208,7 @@ impl Token { /// Returns `true` if the token is an identifier. pub fn is_ident(&self) -> bool { match *self { - Ident(_, _) => true, + Ident(..) => true, _ => false, } } @@ -239,16 +229,6 @@ impl Token { } } - /// Returns `true` if the token is a path that is not followed by a `::` - /// token. - #[allow(non_upper_case_globals)] - pub fn is_plain_ident(&self) -> bool { - match *self { - Ident(_, Plain) => true, - _ => false, - } - } - /// Returns `true` if the token is a lifetime. pub fn is_lifetime(&self) -> bool { match *self { @@ -263,6 +243,11 @@ impl Token { self.is_keyword(keywords::Const) } + pub fn is_path_start(&self) -> bool { + self == &ModSep || self == &Lt || self.is_path() || + self.is_path_segment_keyword() || self.is_ident() && !self.is_any_keyword() + } + /// Maps a token to its corresponding binary operator. pub fn to_binop(&self) -> Option<BinOpKind> { match *self { @@ -289,77 +274,41 @@ impl Token { } /// Returns `true` if the token is a given keyword, `kw`. - #[allow(non_upper_case_globals)] pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { match *self { - Ident(sid, Plain) => kw.to_name() == sid.name, - _ => false, + Ident(id) => id.name == kw.name(), + _ => false, } } - pub fn is_keyword_allow_following_colon(&self, kw: keywords::Keyword) -> bool { + pub fn is_path_segment_keyword(&self) -> bool { match *self { - Ident(sid, _) => { kw.to_name() == sid.name } - _ => { false } + Ident(id) => id.name == keywords::Super.name() || + id.name == keywords::SelfValue.name() || + id.name == keywords::SelfType.name(), + _ => false, } } - /// Returns `true` if the token is either a special identifier, or a strict - /// or reserved keyword. - #[allow(non_upper_case_globals)] + /// Returns `true` if the token is either a strict or reserved keyword. pub fn is_any_keyword(&self) -> bool { - match *self { - Ident(sid, Plain) => { - let n = sid.name; - - n == SELF_KEYWORD_NAME - || n == STATIC_KEYWORD_NAME - || n == SUPER_KEYWORD_NAME - || n == SELF_TYPE_KEYWORD_NAME - || STRICT_KEYWORD_START <= n - && n <= RESERVED_KEYWORD_FINAL - }, - _ => false - } + self.is_strict_keyword() || self.is_reserved_keyword() } - /// Returns `true` if the token may not appear as an identifier. - #[allow(non_upper_case_globals)] + /// Returns `true` if the token is a strict keyword. pub fn is_strict_keyword(&self) -> bool { match *self { - Ident(sid, Plain) => { - let n = sid.name; - - n == SELF_KEYWORD_NAME - || n == STATIC_KEYWORD_NAME - || n == SUPER_KEYWORD_NAME - || n == SELF_TYPE_KEYWORD_NAME - || STRICT_KEYWORD_START <= n - && n <= STRICT_KEYWORD_FINAL - }, - Ident(sid, ModName) => { - let n = sid.name; - - n != SELF_KEYWORD_NAME - && n != SUPER_KEYWORD_NAME - && STRICT_KEYWORD_START <= n - && n <= STRICT_KEYWORD_FINAL - } + Ident(id) => id.name >= keywords::As.name() && + id.name <= keywords::While.name(), _ => false, } } - /// Returns `true` if the token is a keyword that has been reserved for - /// possible future use. - #[allow(non_upper_case_globals)] + /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_reserved_keyword(&self) -> bool { match *self { - Ident(sid, Plain) => { - let n = sid.name; - - RESERVED_KEYWORD_START <= n - && n <= RESERVED_KEYWORD_FINAL - }, + Ident(id) => id.name >= keywords::Abstract.name() && + id.name <= keywords::Yield.name(), _ => false, } } @@ -369,7 +318,7 @@ impl Token { /// See `styntax::ext::mtwt`. pub fn mtwt_eq(&self, other : &Token) -> bool { match (self, other) { - (&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) => + (&Ident(id1), &Ident(id2)) | (&Lifetime(id1), &Lifetime(id2)) => mtwt::resolve(id1) == mtwt::resolve(id2), _ => *self == *other } @@ -385,7 +334,7 @@ pub enum Nonterminal { NtPat(P<ast::Pat>), NtExpr(P<ast::Expr>), NtTy(P<ast::Ty>), - NtIdent(Box<ast::SpannedIdent>, IdentStyle), + NtIdent(Box<ast::SpannedIdent>), /// Stuff inside brackets for attributes NtMeta(P<ast::MetaItem>), NtPath(Box<ast::Path>), @@ -422,191 +371,104 @@ impl fmt::Debug for Nonterminal { } } - -// Get the first "argument" -macro_rules! first { - ( $first:expr, $( $remainder:expr, )* ) => ( $first ) -} - -// Get the last "argument" (has to be done recursively to avoid phoney local ambiguity error) -macro_rules! last { - ( $first:expr, $( $remainder:expr, )+ ) => ( last!( $( $remainder, )+ ) ); - ( $first:expr, ) => ( $first ) -} - // In this macro, there is the requirement that the name (the number) must be monotonically // increasing by one in the special identifiers, starting at 0; the same holds for the keywords, -// except starting from the next number instead of zero, and with the additional exception that -// special identifiers are *also* allowed (they are deduplicated in the important place, the -// interner), an exception which is demonstrated by "static" and "self". -macro_rules! declare_special_idents_and_keywords {( - // So now, in these rules, why is each definition parenthesised? - // Answer: otherwise we get a spurious local ambiguity bug on the "}" - pub mod special_idents { - $( ($si_name:expr, $si_static:ident, $si_str:expr); )* - } - - pub mod keywords { - 'strict: - $( ($sk_name:expr, $sk_variant:ident, $sk_str:expr); )* - 'reserved: - $( ($rk_name:expr, $rk_variant:ident, $rk_str:expr); )* - } +// except starting from the next number instead of zero. +macro_rules! declare_keywords {( + $( ($index: expr, $konst: ident, $string: expr) )* ) => { - const STRICT_KEYWORD_START: ast::Name = first!($( ast::Name($sk_name), )*); - const STRICT_KEYWORD_FINAL: ast::Name = last!($( ast::Name($sk_name), )*); - const RESERVED_KEYWORD_START: ast::Name = first!($( ast::Name($rk_name), )*); - const RESERVED_KEYWORD_FINAL: ast::Name = last!($( ast::Name($rk_name), )*); - - pub mod special_idents { - use ast; - $( - #[allow(non_upper_case_globals)] - pub const $si_static: ast::Ident = ast::Ident { - name: ast::Name($si_name), - ctxt: ast::EMPTY_CTXT, - }; - )* - } - - pub mod special_names { - use ast; - $( - #[allow(non_upper_case_globals)] - pub const $si_static: ast::Name = ast::Name($si_name); - )* - } - - /// All the valid words that have meaning in the Rust language. - /// - /// Rust keywords are either 'strict' or 'reserved'. Strict keywords may not - /// appear as identifiers at all. Reserved keywords are not used anywhere in - /// the language and may not appear as identifiers. pub mod keywords { - pub use self::Keyword::*; use ast; - - #[derive(Copy, Clone, PartialEq, Eq)] - pub enum Keyword { - $( $sk_variant, )* - $( $rk_variant, )* + #[derive(Clone, Copy, PartialEq, Eq)] + pub struct Keyword { + ident: ast::Ident, } - impl Keyword { - pub fn to_name(&self) -> ast::Name { - match *self { - $( $sk_variant => ast::Name($sk_name), )* - $( $rk_variant => ast::Name($rk_name), )* - } - } + #[inline] pub fn ident(self) -> ast::Ident { self.ident } + #[inline] pub fn name(self) -> ast::Name { self.ident.name } } + $( + #[allow(non_upper_case_globals)] + pub const $konst: Keyword = Keyword { + ident: ast::Ident::with_empty_ctxt(ast::Name($index)) + }; + )* } fn mk_fresh_ident_interner() -> IdentInterner { - let mut init_vec = Vec::new(); - $(init_vec.push($si_str);)* - $(init_vec.push($sk_str);)* - $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(&init_vec[..]) + interner::StrInterner::prefill(&[$($string,)*]) } }} -// If the special idents get renumbered, remember to modify these two as appropriate -pub const SELF_KEYWORD_NAME: ast::Name = ast::Name(SELF_KEYWORD_NAME_NUM); -const STATIC_KEYWORD_NAME: ast::Name = ast::Name(STATIC_KEYWORD_NAME_NUM); -pub const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM); -const SELF_TYPE_KEYWORD_NAME: ast::Name = ast::Name(SELF_TYPE_KEYWORD_NAME_NUM); - -pub const SELF_KEYWORD_NAME_NUM: u32 = 1; -const STATIC_KEYWORD_NAME_NUM: u32 = 2; -const SUPER_KEYWORD_NAME_NUM: u32 = 3; -const SELF_TYPE_KEYWORD_NAME_NUM: u32 = 10; - // NB: leaving holes in the ident table is bad! a different ident will get // interned with the id from the hole, but it will be between the min and max // of the reserved words, and thus tagged as "reserved". - -declare_special_idents_and_keywords! { - pub mod special_idents { - // These ones are statics - (0, invalid, ""); - (super::SELF_KEYWORD_NAME_NUM, self_, "self"); - (super::STATIC_KEYWORD_NAME_NUM, statik, "static"); - (super::SUPER_KEYWORD_NAME_NUM, super_, "super"); - (4, static_lifetime, "'static"); - - // for matcher NTs - (5, tt, "tt"); - (6, matchers, "matchers"); - - // outside of libsyntax - (7, clownshoe_abi, "__rust_abi"); - (8, opaque, "<opaque>"); - (9, __unused1, "<__unused1>"); - (super::SELF_TYPE_KEYWORD_NAME_NUM, type_self, "Self"); - (11, prelude_import, "prelude_import"); - (12, DEFAULT, "default"); - } - - pub mod keywords { - // These ones are variants of the Keyword enum - - 'strict: - (13, As, "as"); - (14, Break, "break"); - (15, Crate, "crate"); - (16, Else, "else"); - (17, Enum, "enum"); - (18, Extern, "extern"); - (19, False, "false"); - (20, Fn, "fn"); - (21, For, "for"); - (22, If, "if"); - (23, Impl, "impl"); - (24, In, "in"); - (25, Let, "let"); - (26, Loop, "loop"); - (27, Match, "match"); - (28, Mod, "mod"); - (29, Move, "move"); - (30, Mut, "mut"); - (31, Pub, "pub"); - (32, Ref, "ref"); - (33, Return, "return"); - // Static and Self are also special idents (prefill de-dupes) - (super::STATIC_KEYWORD_NAME_NUM, Static, "static"); - (super::SELF_KEYWORD_NAME_NUM, SelfValue, "self"); - (super::SELF_TYPE_KEYWORD_NAME_NUM, SelfType, "Self"); - (34, Struct, "struct"); - (super::SUPER_KEYWORD_NAME_NUM, Super, "super"); - (35, True, "true"); - (36, Trait, "trait"); - (37, Type, "type"); - (38, Unsafe, "unsafe"); - (39, Use, "use"); - (40, While, "while"); - (41, Continue, "continue"); - (42, Box, "box"); - (43, Const, "const"); - (44, Where, "where"); - 'reserved: - (45, Virtual, "virtual"); - (46, Proc, "proc"); - (47, Alignof, "alignof"); - (48, Become, "become"); - (49, Offsetof, "offsetof"); - (50, Priv, "priv"); - (51, Pure, "pure"); - (52, Sizeof, "sizeof"); - (53, Typeof, "typeof"); - (54, Unsized, "unsized"); - (55, Yield, "yield"); - (56, Do, "do"); - (57, Abstract, "abstract"); - (58, Final, "final"); - (59, Override, "override"); - (60, Macro, "macro"); - } +// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, +// this should be rarely necessary though if the keywords are kept in alphabetic order. +declare_keywords! { + // Invalid identifier + (0, Invalid, "") + + // Strict keywords used in the language. + (1, As, "as") + (2, Box, "box") + (3, Break, "break") + (4, Const, "const") + (5, Continue, "continue") + (6, Crate, "crate") + (7, Else, "else") + (8, Enum, "enum") + (9, Extern, "extern") + (10, False, "false") + (11, Fn, "fn") + (12, For, "for") + (13, If, "if") + (14, Impl, "impl") + (15, In, "in") + (16, Let, "let") + (17, Loop, "loop") + (18, Match, "match") + (19, Mod, "mod") + (20, Move, "move") + (21, Mut, "mut") + (22, Pub, "pub") + (23, Ref, "ref") + (24, Return, "return") + (25, SelfValue, "self") + (26, SelfType, "Self") + (27, Static, "static") + (28, Struct, "struct") + (29, Super, "super") + (30, Trait, "trait") + (31, True, "true") + (32, Type, "type") + (33, Unsafe, "unsafe") + (34, Use, "use") + (35, Where, "where") + (36, While, "while") + + // Keywords reserved for future use. + (37, Abstract, "abstract") + (38, Alignof, "alignof") + (39, Become, "become") + (40, Do, "do") + (41, Final, "final") + (42, Macro, "macro") + (43, Offsetof, "offsetof") + (44, Override, "override") + (45, Priv, "priv") + (46, Proc, "proc") + (47, Pure, "pure") + (48, Sizeof, "sizeof") + (49, Typeof, "typeof") + (50, Unsized, "unsized") + (51, Virtual, "virtual") + (52, Yield, "yield") + + // Weak keywords, have special meaning only in specific contexts. + (53, Default, "default") + (54, StaticLifetime, "'static") + (55, Union, "union") } // looks like we can get rid of this completely... @@ -779,6 +641,6 @@ mod tests { assert!(Gt.mtwt_eq(&Gt)); let a = str_to_ident("bac"); let a1 = mark_ident(a,92); - assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain))); + assert!(Ident(a).mtwt_eq(&Ident(a1))); } } |
