diff options
| author | bors <bors@rust-lang.org> | 2015-02-03 03:44:05 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2015-02-03 03:44:05 +0000 |
| commit | 7858cb432d3f2efc0374424cb2b51518f697c172 (patch) | |
| tree | bccd460a861e61f758d2d459cb9da02b1ad8792b /src/libsyntax/parse | |
| parent | eaf4c5c784637f3df8bdebc6ec21dbd4bc69420a (diff) | |
| parent | 9ece22ee00033cdf0b6b418c451112c92c8ad922 (diff) | |
| download | rust-7858cb432d3f2efc0374424cb2b51518f697c172.tar.gz rust-7858cb432d3f2efc0374424cb2b51518f697c172.zip | |
Auto merge of #21872 - alexcrichton:rollup, r=alexcrichton
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 9 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 28 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 39 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 152 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 2 |
6 files changed, 138 insertions, 102 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 54ec9c7b146..06e8728d236 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -13,7 +13,7 @@ use ast; use codemap::{spanned, Spanned, mk_sp, Span}; use parse::common::*; //resolve bug? use parse::token; -use parse::parser::Parser; +use parse::parser::{Parser, TokenType}; use ptr::P; /// A parser that can parse attributes. @@ -69,7 +69,9 @@ impl<'a> ParserAttr for Parser<'a> { let lo = self.span.lo; self.bump(); - let style = if self.eat(&token::Not) { + if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); } + let style = if self.token == token::Not { + self.bump(); if !permit_inner { let span = self.span; self.span_err(span, @@ -96,7 +98,8 @@ impl<'a> ParserAttr for Parser<'a> { } }; - if permit_inner && self.eat(&token::Semi) { + if permit_inner && self.token == token::Semi { + self.bump(); self.span_warn(span, "this inner attribute syntax is deprecated. \ The new syntax is `#![foo]`, with a bang and no semicolon"); style = ast::AttrInner; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 926385ccd11..b17fc7fe82e 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -62,7 +62,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { pub fn strip_doc_comment_decoration(comment: &str) -> String { /// remove whitespace-only lines from the start/end of lines fn vertical_trim(lines: Vec<String> ) -> Vec<String> { - let mut i = 0us; + let mut i = 0; let mut j = lines.len(); // first line of all-stars should be omitted if lines.len() > 0 && @@ -90,7 +90,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut i = usize::MAX; let mut can_trim = true; let mut first = true; - for line in lines.iter() { + for line in &lines { for (j, c) in line.chars().enumerate() { if j > i || !"* \t".contains_char(c) { can_trim = false; @@ -125,7 +125,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { // one-line comments lose their prefix static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; - for prefix in ONLINERS.iter() { + for prefix in ONLINERS { if comment.starts_with(*prefix) { return (&comment[prefix.len()..]).to_string(); } @@ -158,7 +158,7 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) { fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec<Comment>) { while is_whitespace(rdr.curr) && !rdr.is_eof() { - if rdr.col == CharPos(0us) && rdr.curr_is('\n') { + if rdr.col == CharPos(0) && rdr.curr_is('\n') { push_blank_line_comment(rdr, &mut *comments); } rdr.bump(); @@ -305,7 +305,7 @@ fn read_block_comment(rdr: &mut StringReader, let mut style = if code_to_the_left { Trailing } else { Isolated }; rdr.consume_non_eol_whitespace(); - if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1us { + if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1 { style = Mixed; } debug!("<<< block comment"); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2cf6058a433..e6da47304ce 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -279,7 +279,7 @@ impl<'a> StringReader<'a> { /// Converts CRLF to LF in the given string, raising an error on bare CR. fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> CowString<'b> { - let mut i = 0us; + let mut i = 0; while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { @@ -331,10 +331,10 @@ impl<'a> StringReader<'a> { let byte_offset_diff = next.next - current_byte_offset; self.pos = self.pos + Pos::from_usize(byte_offset_diff); self.curr = Some(next.ch); - self.col = self.col + CharPos(1us); + self.col = self.col + CharPos(1); if last_char == '\n' { self.filemap.next_line(self.last_pos); - self.col = CharPos(0us); + self.col = CharPos(0); } if byte_offset_diff > 1 { @@ -472,7 +472,7 @@ impl<'a> StringReader<'a> { cmap.files.borrow_mut().push(self.filemap.clone()); let loc = cmap.lookup_char_pos_adj(self.last_pos); debug!("Skipping a shebang"); - if loc.line == 1us && loc.col == CharPos(0us) { + if loc.line == 1 && loc.col == CharPos(0) { // FIXME: Add shebang "token", return it let start = self.last_pos; while !self.curr_is('\n') && !self.is_eof() { self.bump(); } @@ -646,7 +646,7 @@ impl<'a> StringReader<'a> { /// Scan through any digits (base `radix`) or underscores, and return how /// many digits there were. fn scan_digits(&mut self, radix: usize) -> usize { - let mut len = 0us; + let mut len = 0; loop { let c = self.curr; if c == Some('_') { debug!("skipping a _"); self.bump(); continue; } @@ -799,14 +799,14 @@ impl<'a> StringReader<'a> { if self.curr == Some('{') { self.scan_unicode_escape(delim) } else { - let res = self.scan_hex_digits(4us, delim, false); + let res = self.scan_hex_digits(4, delim, false); let sp = codemap::mk_sp(escaped_pos, self.last_pos); self.old_escape_warning(sp); res } } 'U' if !ascii_only => { - let res = self.scan_hex_digits(8us, delim, false); + let res = self.scan_hex_digits(8, delim, false); let sp = codemap::mk_sp(escaped_pos, self.last_pos); self.old_escape_warning(sp); res @@ -877,7 +877,7 @@ impl<'a> StringReader<'a> { fn scan_unicode_escape(&mut self, delim: char) -> bool { self.bump(); // past the { let start_bpos = self.last_pos; - let mut count = 0us; + let mut count = 0; let mut accum_int = 0; while !self.curr_is('}') && count <= 6 { @@ -937,10 +937,10 @@ impl<'a> StringReader<'a> { /// error if it isn't. fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) { match base { - 16us => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \ + 16 => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \ supported"), - 8us => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"), - 2us => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"), + 8 => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"), + 2 => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"), _ => () } } @@ -1189,7 +1189,7 @@ impl<'a> StringReader<'a> { 'r' => { let start_bpos = self.last_pos; self.bump(); - let mut hash_count = 0us; + let mut hash_count = 0; while self.curr_is('#') { self.bump(); hash_count += 1; @@ -1374,7 +1374,7 @@ impl<'a> StringReader<'a> { fn scan_raw_byte_string(&mut self) -> token::Lit { let start_bpos = self.last_pos; self.bump(); - let mut hash_count = 0us; + let mut hash_count = 0; while self.curr_is('#') { self.bump(); hash_count += 1; @@ -1526,7 +1526,7 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) fn check_tokenization (mut string_reader: StringReader, expected: Vec<token::Token> ) { - for expected_tok in expected.iter() { + for expected_tok in &expected { assert_eq!(&string_reader.next_token().tok, expected_tok); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 8ac5b6e5274..eecd7d87185 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -181,7 +181,7 @@ pub fn parse_tts_from_source_str(name: String, name, source ); - p.quote_depth += 1us; + p.quote_depth += 1; // right now this is re-creating the token trees from ... token trees. maybe_aborted(p.parse_all_token_trees(),p) } @@ -324,7 +324,7 @@ pub mod with_hygiene { name, source ); - p.quote_depth += 1us; + p.quote_depth += 1; // right now this is re-creating the token trees from ... token trees. maybe_aborted(p.parse_all_token_trees(),p) } @@ -436,7 +436,7 @@ pub fn str_lit(lit: &str) -> String { let error = |&: i| format!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace - fn eat<'a>(it: &mut iter::Peekable<(usize, char), str::CharIndices<'a>>) { + fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) { loop { match it.peek().map(|x| x.1) { Some(' ') | Some('\n') | Some('\r') | Some('\t') => { @@ -605,7 +605,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> { let error = |&: i| format!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace - fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<(usize, u8), I>) { + fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) { loop { match it.peek().map(|x| x.1) { Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => { @@ -683,9 +683,9 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> match suffix { Some(suf) if looks_like_width_suffix(&['f'], suf) => { match base { - 16us => sd.span_err(sp, "hexadecimal float literal is not supported"), - 8us => sd.span_err(sp, "octal float literal is not supported"), - 2us => sd.span_err(sp, "binary float literal is not supported"), + 16 => sd.span_err(sp, "hexadecimal float literal is not supported"), + 8 => sd.span_err(sp, "octal float literal is not supported"), + 2 => sd.span_err(sp, "binary float literal is not supported"), _ => () } let ident = token::intern_and_get_ident(&*s); @@ -755,6 +755,7 @@ mod test { use ast; use abi; use attr::{first_attr_value_str_by_name, AttrMetaMethods}; + use parse; use parse::parser::Parser; use parse::token::{str_to_ident}; use print::pprust::item_to_string; @@ -1163,7 +1164,7 @@ mod test { "impl z { fn a (self: Foo, &myarg: i32) {} }", ]; - for &src in srcs.iter() { + for &src in &srcs { let spans = get_spans_of_pat_idents(src); let Span{ lo, hi, .. } = spans[0]; assert!("self" == &src[lo.to_usize()..hi.to_usize()], @@ -1214,4 +1215,26 @@ mod test { let doc = first_attr_value_str_by_name(item.attrs.as_slice(), "doc").unwrap(); assert_eq!(doc.get(), "/** doc comment\n * with CRLF */"); } + + #[test] + fn ttdelim_span() { + let sess = parse::new_parse_sess(); + let expr = parse::parse_expr_from_source_str("foo".to_string(), + "foo!( fn main() { body } )".to_string(), vec![], &sess); + + let tts = match expr.node { + ast::ExprMac(ref mac) => { + let ast::MacInvocTT(_, ref tts, _) = mac.node; + tts.clone() + } + _ => panic!("not a macro"), + }; + + let span = tts.iter().rev().next().unwrap().get_span(); + + match sess.span_diagnostic.cm.span_to_snippet(span) { + Some(s) => assert_eq!(&s[], "{ body }"), + None => panic!("could not get snippet"), + } + } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d99095eeba3..c3182602a4b 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -290,6 +290,7 @@ pub struct Parser<'a> { #[derive(PartialEq, Eq, Clone)] pub enum TokenType { Token(token::Token), + Keyword(keywords::Keyword), Operator, } @@ -298,6 +299,7 @@ impl TokenType { match *self { TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)), TokenType::Operator => "an operator".to_string(), + TokenType::Keyword(kw) => format!("`{}`", token::get_name(kw.to_name())), } } } @@ -365,9 +367,9 @@ impl<'a> Parser<'a> { token_str)[]); } - pub fn unexpected(&self) -> ! { - let this_token = self.this_token_to_string(); - self.fatal(&format!("unexpected token: `{}`", this_token)[]); + pub fn unexpected(&mut self) -> ! { + self.expect_one_of(&[], &[]); + unreachable!() } /// Expect and consume the token t. Signal an error if @@ -425,10 +427,13 @@ impl<'a> Parser<'a> { let expect = tokens_to_string(&expected[]); let actual = self.this_token_to_string(); self.fatal( - &(if expected.len() != 1 { + &(if expected.len() > 1 { (format!("expected one of {}, found `{}`", expect, actual)) + } else if expected.len() == 0 { + (format!("unexpected token: `{}`", + actual)) } else { (format!("expected {}, found `{}`", expect, @@ -515,7 +520,7 @@ impl<'a> Parser<'a> { pub fn parse_path_list_item(&mut self) -> ast::PathListItem { let lo = self.span.lo; - let node = if self.eat_keyword(keywords::Mod) { + let node = if self.eat_keyword_noexpect(keywords::Mod) { let span = self.last_span; self.span_warn(span, "deprecated syntax; use the `self` keyword now"); ast::PathListMod { id: ast::DUMMY_NODE_ID } @@ -547,9 +552,23 @@ impl<'a> Parser<'a> { is_present } + pub fn check_keyword(&mut self, kw: keywords::Keyword) -> bool { + self.expected_tokens.push(TokenType::Keyword(kw)); + self.token.is_keyword(kw) + } + /// If the next token is the given keyword, eat it and return /// true. Otherwise, return false. pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool { + if self.check_keyword(kw) { + self.bump(); + true + } else { + false + } + } + + pub fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool { if self.token.is_keyword(kw) { self.bump(); true @@ -563,10 +582,7 @@ impl<'a> Parser<'a> { /// Otherwise, eat it. pub fn expect_keyword(&mut self, kw: keywords::Keyword) { if !self.eat_keyword(kw) { - let id_interned_str = token::get_name(kw.to_name()); - let token_str = self.this_token_to_string(); - self.fatal(&format!("expected `{}`, found `{}`", - id_interned_str, token_str)[]) + self.expect_one_of(&[], &[]); } } @@ -593,6 +609,7 @@ impl<'a> Parser<'a> { /// Expect and consume an `&`. If `&&` is seen, replace it with a single /// `&` and continue. If an `&` is not seen, signal an error. fn expect_and(&mut self) { + self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); match self.token { token::BinOp(token::And) => self.bump(), token::AndAnd => { @@ -601,12 +618,7 @@ impl<'a> Parser<'a> { self.replace_token(token::BinOp(token::And), lo, span.hi) } _ => { - let token_str = self.this_token_to_string(); - let found_token = - Parser::token_to_string(&token::BinOp(token::And)); - self.fatal(&format!("expected `{}`, found `{}`", - found_token, - token_str)[]) + self.expect_one_of(&[], &[]); } } } @@ -614,6 +626,7 @@ impl<'a> Parser<'a> { /// Expect and consume a `|`. If `||` is seen, replace it with a single /// `|` and continue. If a `|` is not seen, signal an error. fn expect_or(&mut self) { + self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); match self.token { token::BinOp(token::Or) => self.bump(), token::OrOr => { @@ -622,12 +635,7 @@ impl<'a> Parser<'a> { self.replace_token(token::BinOp(token::Or), lo, span.hi) } _ => { - let found_token = self.this_token_to_string(); - let token_str = - Parser::token_to_string(&token::BinOp(token::Or)); - self.fatal(&format!("expected `{}`, found `{}`", - token_str, - found_token)[]) + self.expect_one_of(&[], &[]); } } } @@ -652,6 +660,7 @@ impl<'a> Parser<'a> { /// This is meant to be used when parsing generics on a path to get the /// starting token. fn eat_lt(&mut self) -> bool { + self.expected_tokens.push(TokenType::Token(token::Lt)); match self.token { token::Lt => { self.bump(); true } token::BinOp(token::Shl) => { @@ -666,11 +675,7 @@ impl<'a> Parser<'a> { fn expect_lt(&mut self) { if !self.eat_lt() { - let found_token = self.this_token_to_string(); - let token_str = Parser::token_to_string(&token::Lt); - self.fatal(&format!("expected `{}`, found `{}`", - token_str, - found_token)[]) + self.expect_one_of(&[], &[]); } } @@ -700,6 +705,7 @@ impl<'a> Parser<'a> { /// with a single > and continue. If a GT is not seen, /// signal an error. pub fn expect_gt(&mut self) { + self.expected_tokens.push(TokenType::Token(token::Gt)); match self.token { token::Gt => self.bump(), token::BinOp(token::Shr) => { @@ -740,7 +746,7 @@ impl<'a> Parser<'a> { // would encounter a `>` and stop. This lets the parser handle trailing // commas in generic parameters, because it can stop either after // parsing a type or after parsing a comma. - for i in iter::count(0us, 1) { + for i in iter::count(0, 1) { if self.check(&token::Gt) || self.token == token::BinOp(token::Shr) || self.token == token::Ge @@ -917,7 +923,7 @@ impl<'a> Parser<'a> { }; self.span = next.sp; self.token = next.tok; - self.tokens_consumed += 1us; + self.tokens_consumed += 1; self.expected_tokens.clear(); // check after each token self.check_unknown_macro_variable(); @@ -998,14 +1004,14 @@ impl<'a> Parser<'a> { /// Is the current token one of the keywords that signals a bare function /// type? pub fn token_is_bare_fn_keyword(&mut self) -> bool { - self.token.is_keyword(keywords::Fn) || - self.token.is_keyword(keywords::Unsafe) || - self.token.is_keyword(keywords::Extern) + self.check_keyword(keywords::Fn) || + self.check_keyword(keywords::Unsafe) || + self.check_keyword(keywords::Extern) } /// Is the current token one of the keywords that signals a closure type? pub fn token_is_closure_keyword(&mut self) -> bool { - self.token.is_keyword(keywords::Unsafe) + self.check_keyword(keywords::Unsafe) } pub fn get_lifetime(&mut self) -> ast::Ident { @@ -1035,7 +1041,7 @@ impl<'a> Parser<'a> { let lifetime_defs = self.parse_late_bound_lifetime_defs(); // examine next token to decide to do - if self.eat_keyword(keywords::Proc) { + if self.eat_keyword_noexpect(keywords::Proc) { self.parse_proc_type(lifetime_defs) } else if self.token_is_bare_fn_keyword() || self.token_is_closure_keyword() { self.parse_ty_bare_fn_or_ty_closure(lifetime_defs) @@ -1166,11 +1172,11 @@ impl<'a> Parser<'a> { // Closure: [unsafe] <'lt> |S| [:Bounds] -> T // Fn: [unsafe] [extern "ABI"] fn <'lt> (S) -> T - if self.token.is_keyword(keywords::Fn) { + if self.check_keyword(keywords::Fn) { self.parse_ty_bare_fn(lifetime_defs) - } else if self.token.is_keyword(keywords::Extern) { + } else if self.check_keyword(keywords::Extern) { self.parse_ty_bare_fn(lifetime_defs) - } else if self.token.is_keyword(keywords::Unsafe) { + } else if self.check_keyword(keywords::Unsafe) { if self.look_ahead(1, |t| t.is_keyword(keywords::Fn) || t.is_keyword(keywords::Extern)) { self.parse_ty_bare_fn(lifetime_defs) @@ -1480,7 +1486,7 @@ impl<'a> Parser<'a> { // BORROWED POINTER self.expect_and(); self.parse_borrowed_pointee() - } else if self.token.is_keyword(keywords::For) { + } else if self.check_keyword(keywords::For) { self.parse_for_in_type() } else if self.token_is_bare_fn_keyword() || self.token_is_closure_keyword() { @@ -1494,14 +1500,14 @@ impl<'a> Parser<'a> { })) { // CLOSURE self.parse_ty_closure(Vec::new()) - } else if self.eat_keyword(keywords::Typeof) { + } else if self.eat_keyword_noexpect(keywords::Typeof) { // TYPEOF // In order to not be ambiguous, the type must be surrounded by parens. self.expect(&token::OpenDelim(token::Paren)); let e = self.parse_expr(); self.expect(&token::CloseDelim(token::Paren)); TyTypeof(e) - } else if self.eat_keyword(keywords::Proc) { + } else if self.eat_keyword_noexpect(keywords::Proc) { self.parse_proc_type(Vec::new()) } else if self.eat_lt() { // QUALIFIED PATH `<TYPE as TRAIT_REF>::item` @@ -2092,6 +2098,7 @@ impl<'a> Parser<'a> { } fn expect_open_delim(&mut self) -> token::DelimToken { + self.expected_tokens.push(TokenType::Token(token::Gt)); match self.token { token::OpenDelim(delim) => { self.bump(); @@ -2233,7 +2240,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Move) { return self.parse_lambda_expr(CaptureByValue); } - if self.eat_keyword(keywords::Proc) { + if self.eat_keyword_noexpect(keywords::Proc) { let span = self.last_span; let _ = self.parse_proc_decl(); let _ = self.parse_expr(); @@ -2307,8 +2314,8 @@ impl<'a> Parser<'a> { hi = self.span.hi; } else if self.check(&token::ModSep) || self.token.is_ident() && - !self.token.is_keyword(keywords::True) && - !self.token.is_keyword(keywords::False) { + !self.check_keyword(keywords::True) && + !self.check_keyword(keywords::False) { let pth = self.parse_path(LifetimeAndTypesWithColons); @@ -2625,7 +2632,7 @@ impl<'a> Parser<'a> { } pub fn check_unknown_macro_variable(&mut self) { - if self.quote_depth == 0us { + if self.quote_depth == 0 { match self.token { token::SubstNt(name, _) => self.fatal(&format!("unknown macro variable `{}`", @@ -2694,7 +2701,7 @@ impl<'a> Parser<'a> { token_str)[]) }, /* we ought to allow different depths of unquotation */ - token::Dollar | token::SubstNt(..) if p.quote_depth > 0us => { + token::Dollar | token::SubstNt(..) if p.quote_depth > 0 => { p.parse_unquoted() } _ => { @@ -2706,7 +2713,7 @@ impl<'a> Parser<'a> { match self.token { token::Eof => { let open_braces = self.open_braces.clone(); - for sp in open_braces.iter() { + for sp in &open_braces { self.span_help(*sp, "did you mean to close this delimiter?"); } // There shouldn't really be a span, but it's easier for the test runner @@ -2735,7 +2742,7 @@ impl<'a> Parser<'a> { self.open_braces.pop().unwrap(); // Expand to cover the entire delimited token tree - let span = Span { hi: self.span.hi, ..pre_span }; + let span = Span { hi: close_span.hi, ..pre_span }; TtDelimited(span, Rc::new(Delimited { delim: delim, @@ -2792,7 +2799,7 @@ impl<'a> Parser<'a> { ex = ExprAddrOf(m, e); } token::Ident(_, _) => { - if !self.token.is_keyword(keywords::Box) { + if !self.check_keyword(keywords::Box) { return self.parse_dot_or_call_expr(); } @@ -2879,7 +2886,7 @@ impl<'a> Parser<'a> { } } None => { - if AS_PREC >= min_prec && self.eat_keyword(keywords::As) { + if AS_PREC >= min_prec && self.eat_keyword_noexpect(keywords::As) { let rhs = self.parse_ty(); let _as = self.mk_expr(lhs.span.lo, rhs.span.hi, @@ -3002,7 +3009,7 @@ impl<'a> Parser<'a> { /// Parse an 'if' or 'if let' expression ('if' token already eaten) pub fn parse_if_expr(&mut self) -> P<Expr> { - if self.token.is_keyword(keywords::Let) { + if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(); } let lo = self.last_span.lo; @@ -3655,7 +3662,7 @@ impl<'a> Parser<'a> { } let lo = self.span.lo; - if self.token.is_keyword(keywords::Let) { + if self.check_keyword(keywords::Let) { check_expected_item(self, &item_attrs[]); self.expect_keyword(keywords::Let); let decl = self.parse_let(); @@ -5200,7 +5207,7 @@ impl<'a> Parser<'a> { Some(i) => { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); - for p in included_mod_stack[i.. len].iter() { + for p in &included_mod_stack[i.. len] { err.push_str(&p.display().as_cow()[]); err.push_str(" -> "); } @@ -5302,7 +5309,7 @@ impl<'a> Parser<'a> { let (maybe_path, ident) = match self.token { token::Ident(..) => { let the_ident = self.parse_ident(); - let path = if self.eat_keyword(keywords::As) { + let path = if self.eat_keyword_noexpect(keywords::As) { // skip the ident if there is one if self.token.is_ident() { self.bump(); } @@ -5445,7 +5452,7 @@ impl<'a> Parser<'a> { seq_sep_trailing_allowed(token::Comma), |p| p.parse_ty_sum() ); - for ty in arg_tys.into_iter() { + for ty in arg_tys { args.push(ast::VariantArg { ty: ty, id: ast::DUMMY_NODE_ID, @@ -5595,14 +5602,13 @@ impl<'a> Parser<'a> { token_str)[]); } - if self.eat_keyword(keywords::Virtual) { + if self.eat_keyword_noexpect(keywords::Virtual) { let span = self.span; self.span_err(span, "`virtual` structs have been removed from the language"); } - if self.token.is_keyword(keywords::Static) { + if self.eat_keyword(keywords::Static) { // STATIC ITEM - self.bump(); let m = if self.eat_keyword(keywords::Mut) {MutMutable} else {MutImmutable}; let (ident, item_, extra_attrs) = self.parse_item_const(Some(m)); let last_span = self.last_span; @@ -5614,9 +5620,8 @@ impl<'a> Parser<'a> { maybe_append(attrs, extra_attrs)); return Ok(item); } - if self.token.is_keyword(keywords::Const) { + if self.eat_keyword(keywords::Const) { // CONST ITEM - self.bump(); if self.eat_keyword(keywords::Mut) { let last_span = self.last_span; self.span_err(last_span, "const globals cannot be mutable"); @@ -5632,8 +5637,8 @@ impl<'a> Parser<'a> { maybe_append(attrs, extra_attrs)); return Ok(item); } - if self.token.is_keyword(keywords::Unsafe) && - self.look_ahead(1us, |t| t.is_keyword(keywords::Trait)) + if self.check_keyword(keywords::Unsafe) && + self.look_ahead(1, |t| t.is_keyword(keywords::Trait)) { // UNSAFE TRAIT ITEM self.expect_keyword(keywords::Unsafe); @@ -5649,8 +5654,8 @@ impl<'a> Parser<'a> { maybe_append(attrs, extra_attrs)); return Ok(item); } - if self.token.is_keyword(keywords::Unsafe) && - self.look_ahead(1us, |t| t.is_keyword(keywords::Impl)) + if self.check_keyword(keywords::Unsafe) && + self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) { // IMPL ITEM self.expect_keyword(keywords::Unsafe); @@ -5665,7 +5670,7 @@ impl<'a> Parser<'a> { maybe_append(attrs, extra_attrs)); return Ok(item); } - if self.token.is_keyword(keywords::Fn) { + if self.check_keyword(keywords::Fn) { // FUNCTION ITEM self.bump(); let (ident, item_, extra_attrs) = @@ -5679,8 +5684,8 @@ impl<'a> Parser<'a> { maybe_append(attrs, extra_attrs)); return Ok(item); } - if self.token.is_keyword(keywords::Unsafe) - && self.look_ahead(1us, |t| *t != token::OpenDelim(token::Brace)) { + if self.check_keyword(keywords::Unsafe) + && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) { // UNSAFE FUNCTION ITEM self.bump(); let abi = if self.eat_keyword(keywords::Extern) { @@ -5784,11 +5789,11 @@ impl<'a> Parser<'a> { let visibility = self.parse_visibility(); - if self.token.is_keyword(keywords::Static) { + if self.check_keyword(keywords::Static) { // FOREIGN STATIC ITEM return Ok(self.parse_item_foreign_static(visibility, attrs)); } - if self.token.is_keyword(keywords::Fn) || self.token.is_keyword(keywords::Unsafe) { + if self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) { // FOREIGN FUNCTION ITEM return Ok(self.parse_item_foreign_fn(visibility, attrs)); } @@ -5912,9 +5917,9 @@ impl<'a> Parser<'a> { self.bump(); match self.token { - token::Ident(i, _) => { - self.bump(); - path.push(i); + token::Ident(..) => { + let ident = self.parse_ident(); + path.push(ident); } // foo::bar::{a,b,c} @@ -5954,11 +5959,16 @@ impl<'a> Parser<'a> { return P(spanned(lo, self.span.hi, ViewPathGlob(path))); } + // fall-through for case foo::bar::; + token::Semi => { + self.span_err(self.span, "expected identifier or `{` or `*`, found `;`"); + } + _ => break } } } - let mut rename_to = path[path.len() - 1us]; + let mut rename_to = path[path.len() - 1]; let path = ast::Path { span: mk_sp(lo, self.last_span.hi), global: false, diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 5531ce7b119..5c3892e49c0 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -456,7 +456,7 @@ macro_rules! declare_special_idents_and_keywords {( pub use self::Keyword::*; use ast; - #[derive(Copy)] + #[derive(Copy, Clone, PartialEq, Eq)] pub enum Keyword { $( $sk_variant, )* $( $rk_variant, )* |
