diff options
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 46 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 85 | ||||
| -rw-r--r-- | src/libsyntax/parse/obsolete.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 411 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 141 |
7 files changed, 335 insertions, 374 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 41693d9d47a..4aad7f911db 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -32,7 +32,7 @@ impl<'a> ParserAttr for Parser<'a> { fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> { let mut attrs: Vec<ast::Attribute> = Vec::new(); loop { - debug!("parse_outer_attributes: self.token={}", + debug!("parse_outer_attributes: self.token={:?}", self.token); match self.token { token::Pound => { @@ -62,7 +62,7 @@ impl<'a> ParserAttr for Parser<'a> { /// If permit_inner is true, then a leading `!` indicates an inner /// attribute fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute { - debug!("parse_attributes: permit_inner={} self.token={}", + debug!("parse_attributes: permit_inner={:?} self.token={:?}", permit_inner, self.token); let (span, value, mut style) = match self.token { token::Pound => { @@ -92,7 +92,7 @@ impl<'a> ParserAttr for Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `#`, found `{}`", token_str)[]); + self.fatal(format!("expected `#`, found `{}`", token_str).index(&FullRange)); } }; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 0d5592b57b1..e7fc5aac9c7 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - return lines[i..j].iter().map(|x| (*x).clone()).collect(); + return lines.index(&(i..j)).iter().map(|x| (*x).clone()).collect(); } /// remove a "[ \t]*\*" block from each line, if possible @@ -116,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { if can_trim { lines.iter().map(|line| { - line[i + 1..line.len()].to_string() + line.index(&((i + 1)..line.len())).to_string() }).collect() } else { lines @@ -127,12 +127,12 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; for prefix in ONLINERS.iter() { if comment.starts_with(*prefix) { - return comment[prefix.len()..].to_string(); + return comment.index(&(prefix.len()..)).to_string(); } } if comment.starts_with("/*") { - let lines = comment[3u..comment.len() - 2u] + let lines = comment.index(&(3u..(comment.len() - 2u))) .lines_any() .map(|s| s.to_string()) .collect::<Vec<String> >(); @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(line[]) { + if is_doc_comment(line.index(&FullRange)) { break; } lines.push(line); @@ -224,10 +224,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(s[], col) { + let s1 = match all_whitespace(s.index(&FullRange), col) { Some(col) => { if col < len { - s[col..len].to_string() + s.index(&(col..len)).to_string() } else { "".to_string() } @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(curr_line[]) { + if is_block_doc_comment(curr_line.index(&FullRange)) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a50b97142c2..153b18b8760 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -25,7 +25,7 @@ use std::rc::Rc; use std::str; use std::string::CowString; -pub use ext::tt::transcribe::{TtReader, new_tt_reader}; +pub use ext::tt::transcribe::{TtReader, new_tt_reader, new_tt_reader_with_doc_flag}; pub mod comments; @@ -111,7 +111,7 @@ impl<'a> Reader for TtReader<'a> { } fn next_token(&mut self) -> TokenAndSpan { let r = tt_next_token(self); - debug!("TtReader: r={}", r); + debug!("TtReader: r={:?}", r); r } fn fatal(&self, m: &str) -> ! { @@ -196,7 +196,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, m[]); + self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +205,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, m[]); + self.err_span_(from_pos, to_pos, m.index(&FullRange)); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -214,8 +214,8 @@ impl<'a> StringReader<'a> { m.push_str(": "); let from = self.byte_offset(from_pos).to_uint(); let to = self.byte_offset(to_pos).to_uint(); - m.push_str(self.filemap.src[from..to]); - self.fatal_span_(from_pos, to_pos, m[]); + m.push_str(self.filemap.src.index(&(from..to))); + self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -256,13 +256,13 @@ impl<'a> StringReader<'a> { /// adjusted 1 towards each other (assumes that on either side there is a /// single-byte delimiter). pub fn name_from(&self, start: BytePos) -> ast::Name { - debug!("taking an ident from {} to {}", start, self.last_pos); + debug!("taking an ident from {:?} to {:?}", start, self.last_pos); self.with_str_from(start, token::intern) } /// As name_from, with an explicit endpoint. pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name { - debug!("taking an ident from {} to {}", start, end); + debug!("taking an ident from {:?} to {:?}", start, end); self.with_str_from_to(start, end, token::intern) } @@ -301,7 +301,7 @@ impl<'a> StringReader<'a> { while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { - if j < i { buf.push_str(s[j..i]); } + if j < i { buf.push_str(s.index(&(j..i))); } j = next; if next >= s.len() || s.char_at(next) != '\n' { let pos = start + BytePos(i as u32); @@ -311,7 +311,7 @@ impl<'a> StringReader<'a> { } i = next; } - if j < s.len() { buf.push_str(s[j..]); } + if j < s.len() { buf.push_str(s.index(&(j..))); } buf } } @@ -496,7 +496,7 @@ impl<'a> StringReader<'a> { // for skipping over all "junk" '/' | '#' => { let c = self.scan_comment(); - debug!("scanning a comment {}", c); + debug!("scanning a comment {:?}", c); c }, c if is_whitespace(Some(c)) => { @@ -506,7 +506,7 @@ impl<'a> StringReader<'a> { tok: token::Whitespace, sp: codemap::mk_sp(start_bpos, self.last_pos) }); - debug!("scanning whitespace: {}", c); + debug!("scanning whitespace: {:?}", c); c }, _ => None @@ -556,7 +556,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(string[])) + token::DocComment(token::intern(string.index(&FullRange))) } else { token::Comment }; @@ -592,8 +592,8 @@ impl<'a> StringReader<'a> { whence: &str) { match r.curr { Some(r_c) if r_c == c => r.bump(), - Some(r_c) => panic!("expected {}, hit {}, {}", described_c, r_c, whence), - None => panic!("expected {}, hit EOF, {}", described_c, whence), + Some(r_c) => panic!("expected {:?}, hit {:?}, {}", described_c, r_c, whence), + None => panic!("expected {:?}, hit EOF, {}", described_c, whence), } } @@ -614,7 +614,7 @@ impl<'a> StringReader<'a> { self.scan_digits(base); let encoded_name : u32 = self.with_str_from(start_bpos, |s| { num::from_str_radix(s, 10).unwrap_or_else(|| { - panic!("expected digits representing a name, got `{}`, {}, range [{},{}]", + panic!("expected digits representing a name, got {:?}, {}, range [{:?},{:?}]", s, whence, start_bpos, self.last_pos); }) }); @@ -632,7 +632,7 @@ impl<'a> StringReader<'a> { self.scan_digits(base); let encoded_ctxt : ast::SyntaxContext = self.with_str_from(start_bpos, |s| { num::from_str_radix(s, 10).unwrap_or_else(|| { - panic!("expected digits representing a ctxt, got `{}`, {}", s, whence); + panic!("expected digits representing a ctxt, got {:?}, {}", s, whence); }) }); @@ -652,7 +652,7 @@ impl<'a> StringReader<'a> { if c == Some('_') { debug!("skipping a _"); self.bump(); continue; } match c.and_then(|cc| cc.to_digit(radix)) { Some(_) => { - debug!("{} in scan_digits", c); + debug!("{:?} in scan_digits", c); len += 1; self.bump(); } @@ -728,7 +728,7 @@ impl<'a> StringReader<'a> { delim: char, below_0x7f_only: bool) -> bool { - debug!("scanning {} digits until {}", n_digits, delim); + debug!("scanning {} digits until {:?}", n_digits, delim); let start_bpos = self.last_pos; let mut accum_int = 0; @@ -990,7 +990,7 @@ impl<'a> StringReader<'a> { if is_dec_digit(c) { let num = self.scan_number(c.unwrap()); let suffix = self.scan_optional_raw_name(); - debug!("next_token_inner: scanned number {}, {}", num, suffix); + debug!("next_token_inner: scanned number {:?}, {:?}", num, suffix); return token::Literal(num, suffix) } @@ -1110,7 +1110,7 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(format!("'{}", lifetime_name)[]) + str_to_ident(format!("'{}", lifetime_name).index(&FullRange)) }); // Conjure up a "keyword checking ident" to make sure that @@ -1444,14 +1444,14 @@ fn is_dec_digit(c: Option<char>) -> bool { return in_range(c, '0', '9'); } pub fn is_doc_comment(s: &str) -> bool { let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') || s.starts_with("//!"); - debug!("is `{}` a doc comment? {}", s, res); + debug!("is {:?} a doc comment? {}", s, res); res } pub fn is_block_doc_comment(s: &str) -> bool { let res = (s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') || s.starts_with("/*!"); - debug!("is `{}` a doc comment? {}", s, res); + debug!("is {:?} a doc comment? {}", s, res); res } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b0969a573e6..d26b3af67bd 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -24,8 +24,7 @@ use std::num::Int; use std::str; use std::iter; -#[cfg_attr(stage0, macro_escape)] -#[cfg_attr(not(stage0), macro_use)] +#[macro_use] pub mod parser; pub mod lexer; @@ -254,19 +253,19 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) let bytes = match File::open(path).read_to_end() { Ok(bytes) => bytes, Err(e) => { - err(format!("couldn't read {}: {}", + err(format!("couldn't read {:?}: {:?}", path.display(), - e)[]); + e).index(&FullRange)); unreachable!() } }; - match str::from_utf8(bytes[]).ok() { + match str::from_utf8(bytes.index(&FullRange)).ok() { Some(s) => { return string_to_filemap(sess, s.to_string(), path.as_str().unwrap().to_string()) } None => { - err(format!("{} is not UTF-8 encoded", path.display())[]) + err(format!("{:?} is not UTF-8 encoded", path.display()).index(&FullRange)) } } unreachable!() @@ -297,7 +296,9 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<ast::TokenTree>, cfg: ast::CrateConfig) -> Parser<'a> { let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts); - Parser::new(sess, cfg, box trdr) + let mut p = Parser::new(sess, cfg, box trdr); + p.check_unknown_macro_variable(); + p } // FIXME (Issue #16472): The `with_hygiene` mod should go away after @@ -398,10 +399,10 @@ pub fn char_lit(lit: &str) -> (char, int) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = msg[]; + let msg2 = msg.index(&FullRange); fn esc(len: uint, lit: &str) -> Option<(char, int)> { - num::from_str_radix(lit[2..len], 16) + num::from_str_radix(lit.index(&(2..len)), 16) .and_then(char::from_u32) .map(|x| (x, len as int)) } @@ -409,7 +410,7 @@ pub fn char_lit(lit: &str) -> (char, int) { let unicode_escape = |&: | -> Option<(char, int)> if lit.as_bytes()[2] == b'{' { let idx = lit.find('}').expect(msg2); - let subslice = lit[3..idx]; + let subslice = lit.index(&(3..idx)); num::from_str_radix(subslice, 16) .and_then(char::from_u32) .map(|x| (x, subslice.chars().count() as int + 4)) @@ -471,7 +472,7 @@ pub fn str_lit(lit: &str) -> String { eat(&mut chars); } else { // otherwise, a normal escape - let (c, n) = char_lit(lit[i..]); + let (c, n) = char_lit(lit.index(&(i..))); for _ in range(0, n - 1) { // we don't need to move past the first \ chars.next(); } @@ -534,12 +535,12 @@ pub fn raw_str_lit(lit: &str) -> String { fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && first_chars.contains(&s.char_at(0)) && - s[1..].chars().all(|c| '0' <= c && c <= '9') + s.index(&(1..)).chars().all(|c| '0' <= c && c <= '9') } fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> ast::Lit_ { - debug!("filtered_float_lit: {}, {}", data, suffix); + debug!("filtered_float_lit: {}, {:?}", data, suffix); match suffix { Some("f32") => ast::LitFloat(data, ast::TyF32), Some("f64") => ast::LitFloat(data, ast::TyF64), @@ -547,7 +548,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. sd.span_err(sp, &*format!("illegal width `{}` for float literal, \ - valid widths are 32 and 64", suf[1..])); + valid widths are 32 and 64", suf.index(&(1..)))); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \ valid suffixes are `f32` and `f64`", suf)); @@ -559,7 +560,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, } } pub fn float_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> ast::Lit_ { - debug!("float_lit: {}, {}", s, suffix); + debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is defered until trans let s = s.chars().filter(|&c| c != '_').collect::<String>(); let data = token::intern_and_get_ident(&*s); @@ -583,7 +584,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) { b'\'' => b'\'', b'0' => b'\0', _ => { - match ::std::num::from_str_radix::<u64>(lit[2..4], 16) { + match ::std::num::from_str_radix::<u64>(lit.index(&(2..4)), 16) { Some(c) => if c > 0xFF { panic!(err(2)) @@ -633,7 +634,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> { } _ => { // otherwise, a normal escape - let (c, n) = byte_lit(lit[i..]); + let (c, n) = byte_lit(lit.index(&(i..))); // we don't need to move past the first \ for _ in range(0, n - 1) { chars.next(); @@ -662,9 +663,9 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::<String>(); - let mut s = s2[]; + let mut s = s2.index(&FullRange); - debug!("integer_lit: {}, {}", s, suffix); + debug!("integer_lit: {}, {:?}", s, suffix); let mut base = 10; let orig = s; @@ -695,18 +696,20 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> } if base != 10 { - s = s[2..]; + s = s.index(&(2..)); } if let Some(suf) = suffix { if suf.is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")} ty = match suf { - "i" => ast::SignedIntLit(ast::TyI, ast::Plus), + "i" => ast::SignedIntLit(ast::TyIs, ast::Plus), + "is" => ast::SignedIntLit(ast::TyIs, ast::Plus), "i8" => ast::SignedIntLit(ast::TyI8, ast::Plus), "i16" => ast::SignedIntLit(ast::TyI16, ast::Plus), "i32" => ast::SignedIntLit(ast::TyI32, ast::Plus), "i64" => ast::SignedIntLit(ast::TyI64, ast::Plus), - "u" => ast::UnsignedIntLit(ast::TyU), + "u" => ast::UnsignedIntLit(ast::TyUs), + "us" => ast::UnsignedIntLit(ast::TyUs), "u8" => ast::UnsignedIntLit(ast::TyU8), "u16" => ast::UnsignedIntLit(ast::TyU16), "u32" => ast::UnsignedIntLit(ast::TyU32), @@ -717,7 +720,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> if looks_like_width_suffix(&['i', 'u'], suf) { sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \ valid widths are 8, 16, 32 and 64", - suf[1..])); + suf.index(&(1..)))); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf)); } @@ -727,8 +730,8 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> } } - debug!("integer_lit: the type is {}, base {}, the new string is {}, the original \ - string was {}, the original suffix was {}", ty, base, s, orig, suffix); + debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \ + string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix); let res: u64 = match ::std::num::from_str_radix(s, base) { Some(r) => r, @@ -815,7 +818,7 @@ mod test { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = tts[]; + let tts: &[ast::TokenTree] = tts.index(&FullRange); match tts { [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), ast::TtToken(_, token::Not), @@ -823,30 +826,30 @@ mod test { ast::TtDelimited(_, ref macro_delimed)] if name_macro_rules.as_str() == "macro_rules" && name_zip.as_str() == "zip" => { - match macro_delimed.tts[] { + match macro_delimed.tts.index(&FullRange) { [ast::TtDelimited(_, ref first_delimed), ast::TtToken(_, token::FatArrow), ast::TtDelimited(_, ref second_delimed)] if macro_delimed.delim == token::Paren => { - match first_delimed.tts[] { + match first_delimed.tts.index(&FullRange) { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if first_delimed.delim == token::Paren && name.as_str() == "a" => {}, - _ => panic!("value 3: {}", **first_delimed), + _ => panic!("value 3: {:?}", **first_delimed), } - match second_delimed.tts[] { + match second_delimed.tts.index(&FullRange) { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if second_delimed.delim == token::Paren && name.as_str() == "a" => {}, - _ => panic!("value 4: {}", **second_delimed), + _ => panic!("value 4: {:?}", **second_delimed), } }, - _ => panic!("value 2: {}", **macro_delimed), + _ => panic!("value 2: {:?}", **macro_delimed), } }, - _ => panic!("value: {}",tts), + _ => panic!("value: {:?}",tts), } } @@ -1113,24 +1116,24 @@ mod test { let use_s = "use foo::bar::baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], use_s); + assert_eq!(vitem_s.index(&FullRange), use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], use_s); + assert_eq!(vitem_s.index(&FullRange), use_s); } #[test] fn parse_extern_crate() { let ex_s = "extern crate foo;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], ex_s); + assert_eq!(vitem_s.index(&FullRange), ex_s); let ex_s = "extern crate \"foo\" as bar;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], ex_s); + assert_eq!(vitem_s.index(&FullRange), ex_s); } fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { @@ -1167,10 +1170,10 @@ mod test { for &src in srcs.iter() { let spans = get_spans_of_pat_idents(src); - let Span{lo:lo,hi:hi,..} = spans[0]; - assert!("self" == src[lo.to_uint()..hi.to_uint()], + let Span{ lo, hi, .. } = spans[0]; + assert!("self" == &src[lo.to_uint()..hi.to_uint()], "\"{}\" != \"self\". src=\"{}\"", - src[lo.to_uint()..hi.to_uint()], src) + &src[lo.to_uint()..hi.to_uint()], src) } } @@ -1209,7 +1212,7 @@ mod test { let docs = item.attrs.iter().filter(|a| a.name().get() == "doc") .map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(docs[], b); + assert_eq!(docs.index(&FullRange), b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index a49680d7e1c..23728c74ae8 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -127,13 +127,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { kind_str: &str, desc: &str) { self.span_err(sp, - format!("obsolete syntax: {}", kind_str)[]); + format!("obsolete syntax: {}", kind_str).index(&FullRange)); if !self.obsolete_set.contains(&kind) { self.sess .span_diagnostic .handler() - .note(format!("{}", desc)[]); + .note(format!("{}", desc).index(&FullRange)); self.obsolete_set.insert(kind); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 32f8f5ee3d6..92e0395eca4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -63,7 +63,7 @@ use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; use ast::{Visibility, WhereClause}; use ast; use ast_util::{self, as_prec, ident_to_path, operator_prec}; -use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp, DUMMY_SP}; +use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp}; use diagnostic; use ext::tt::macro_parser; use parse; @@ -389,12 +389,12 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; self.span_fatal(last_span, format!("unexpected token: `{}`", - token_str)[]); + token_str).index(&FullRange)); } pub fn unexpected(&mut self) -> ! { let this_token = self.this_token_to_string(); - self.fatal(format!("unexpected token: `{}`", this_token)[]); + self.fatal(format!("unexpected token: `{}`", this_token).index(&FullRange)); } /// Expect and consume the token t. Signal an error if @@ -408,7 +408,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", token_str, - this_token_str)[]) + this_token_str).index(&FullRange)) } } else { self.expect_one_of(slice::ref_slice(t), &[]); @@ -449,7 +449,7 @@ impl<'a> Parser<'a> { expected.push_all(&*self.expected_tokens); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); - let expect = tokens_to_string(expected[]); + let expect = tokens_to_string(expected.index(&FullRange)); let actual = self.this_token_to_string(); self.fatal( (if expected.len() != 1 { @@ -460,7 +460,7 @@ impl<'a> Parser<'a> { (format!("expected {}, found `{}`", expect, actual)) - })[] + }).index(&FullRange) ) } } @@ -488,12 +488,12 @@ impl<'a> Parser<'a> { /// followed by some token from the set edible + inedible. Recover /// from anticipated input errors, discarding erroneous characters. pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token], inedible: &[token::Token]) { - debug!("commit_expr {}", e); + debug!("commit_expr {:?}", e); if let ExprPath(..) = e.node { // might be unit-struct construction; check for recoverableinput error. let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>(); expected.push_all(inedible); - self.check_for_erroneous_unit_struct_expecting(expected[]); + self.check_for_erroneous_unit_struct_expecting(expected.index(&FullRange)); } self.expect_one_of(edible, inedible) } @@ -510,9 +510,9 @@ impl<'a> Parser<'a> { .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>(); - expected.push_all(inedible[]); + expected.push_all(inedible.index(&FullRange)); self.check_for_erroneous_unit_struct_expecting( - expected[]); + expected.index(&FullRange)); } self.expect_one_of(edible, inedible) } @@ -535,7 +535,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal((format!("expected ident, found `{}`", - token_str))[]) + token_str)).index(&FullRange)) } } } @@ -593,7 +593,7 @@ impl<'a> Parser<'a> { let id_interned_str = token::get_name(kw.to_name()); let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", - id_interned_str, token_str)[]) + id_interned_str, token_str).index(&FullRange)) } } @@ -604,7 +604,7 @@ impl<'a> Parser<'a> { let span = self.span; self.span_err(span, format!("expected identifier, found keyword `{}`", - token_str)[]); + token_str).index(&FullRange)); } } @@ -613,7 +613,7 @@ impl<'a> Parser<'a> { if self.token.is_reserved_keyword() { let token_str = self.this_token_to_string(); self.fatal(format!("`{}` is a reserved keyword", - token_str)[]) + token_str).index(&FullRange)) } } @@ -633,7 +633,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BinOp(token::And)); self.fatal(format!("expected `{}`, found `{}`", found_token, - token_str)[]) + token_str).index(&FullRange)) } } } @@ -654,7 +654,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BinOp(token::Or)); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token)[]) + found_token).index(&FullRange)) } } } @@ -697,7 +697,7 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(&token::Lt); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token)[]) + found_token).index(&FullRange)) } } @@ -749,7 +749,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", gt_str, - this_token_str)[]) + this_token_str).index(&FullRange)) } } } @@ -946,6 +946,8 @@ impl<'a> Parser<'a> { self.token = next.tok; self.tokens_consumed += 1u; self.expected_tokens.clear(); + // check after each token + self.check_unknown_macro_variable(); } /// Advance the parser by one token and return the bumped token. @@ -1369,7 +1371,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = p.parse_inner_attrs_and_block(); let mut attrs = attrs; - attrs.push_all(inner_attrs[]); + attrs.push_all(inner_attrs.index(&FullRange)); ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, @@ -1388,7 +1390,7 @@ impl<'a> Parser<'a> { _ => { let token_str = p.this_token_to_string(); p.fatal((format!("expected `;` or `{{`, found `{}`", - token_str))[]) + token_str)).index(&FullRange)) } } } @@ -1584,7 +1586,7 @@ impl<'a> Parser<'a> { } else { let this_token_str = self.this_token_to_string(); let msg = format!("expected type, found `{}`", this_token_str); - self.fatal(msg[]); + self.fatal(msg.index(&FullRange)); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1726,14 +1728,14 @@ impl<'a> Parser<'a> { token::Str_(s) => { (true, - LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str())[]), + LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()), ast::CookedStr)) } token::StrRaw(s, n) => { (true, LitStr( token::intern_and_get_ident( - parse::raw_str_lit(s.as_str())[]), + parse::raw_str_lit(s.as_str()).index(&FullRange)), ast::RawStr(n))) } token::Binary(i) => @@ -1977,7 +1979,7 @@ impl<'a> Parser<'a> { }; } _ => { - self.fatal(format!("expected a lifetime name")[]); + self.fatal(format!("expected a lifetime name").index(&FullRange)); } } } @@ -2015,7 +2017,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `,` or `>` after lifetime \ name, found `{}`", this_token_str); - self.fatal(msg[]); + self.fatal(msg.index(&FullRange)); } } } @@ -2103,22 +2105,6 @@ impl<'a> Parser<'a> { ExprIndex(expr, idx) } - pub fn mk_slice(&mut self, - expr: P<Expr>, - start: Option<P<Expr>>, - end: Option<P<Expr>>, - _mutbl: Mutability) - -> ast::Expr_ { - // FIXME: we could give more accurate span info here. - let (lo, hi) = match (&start, &end) { - (&Some(ref s), &Some(ref e)) => (s.span.lo, e.span.hi), - (&Some(ref s), &None) => (s.span.lo, s.span.hi), - (&None, &Some(ref e)) => (e.span.lo, e.span.hi), - (&None, &None) => (DUMMY_SP.lo, DUMMY_SP.hi), - }; - ExprIndex(expr, self.mk_expr(lo, hi, ExprRange(start, end))) - } - pub fn mk_range(&mut self, start: Option<P<Expr>>, end: Option<P<Expr>>) @@ -2515,7 +2501,7 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let fstr = n.as_str(); self.span_err(last_span, - format!("unexpected token: `{}`", n.as_str())[]); + format!("unexpected token: `{}`", n.as_str()).index(&FullRange)); if fstr.chars().all(|x| "0123456789.".contains_char(x)) { let float = match fstr.parse::<f64>() { Some(f) => f, @@ -2524,7 +2510,7 @@ impl<'a> Parser<'a> { self.span_help(last_span, format!("try parenthesizing the first index; e.g., `(foo.{}){}`", float.trunc() as uint, - float.fract().to_string()[1..])[]); + float.fract().to_string().index(&(1..))).index(&FullRange)); } self.abort_if_errors(); @@ -2550,87 +2536,44 @@ impl<'a> Parser<'a> { } // expr[...] - // Could be either an index expression or a slicing expression. - // Any slicing non-terminal can have a mutable version with `mut` - // after the opening square bracket. + // An index expression. token::OpenDelim(token::Bracket) => { + let bracket_pos = self.span.lo; self.bump(); - let mutbl = if self.eat_keyword(keywords::Mut) { - MutMutable + + let mut found_dotdot = false; + if self.token == token::DotDot && + self.look_ahead(1, |t| t == &token::CloseDelim(token::Bracket)) { + // Using expr[..], which is a mistake, should be expr[] + self.bump(); + self.bump(); + found_dotdot = true; + } + + if found_dotdot || self.eat(&token::CloseDelim(token::Bracket)) { + // No expression, expand to a FullRange + // FIXME(#20516) It would be better to use a lang item or + // something for FullRange. + hi = self.last_span.hi; + let range = ExprStruct(ident_to_path(mk_sp(lo, hi), + token::special_idents::FullRange), + vec![], + None); + let ix = self.mk_expr(bracket_pos, hi, range); + let index = self.mk_index(e, ix); + e = self.mk_expr(lo, hi, index) } else { - MutImmutable - }; - match self.token { - // e[] - token::CloseDelim(token::Bracket) => { - self.bump(); - hi = self.span.hi; - let slice = self.mk_slice(e, None, None, mutbl); - e = self.mk_expr(lo, hi, slice) - } - // e[..e] - token::DotDot => { - self.bump(); - match self.token { - // e[..] - token::CloseDelim(token::Bracket) => { - self.bump(); - hi = self.span.hi; - let slice = self.mk_slice(e, None, None, mutbl); - e = self.mk_expr(lo, hi, slice); + let ix = self.parse_expr(); + hi = self.span.hi; + self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket)); + let index = self.mk_index(e, ix); + e = self.mk_expr(lo, hi, index) + } - self.span_err(e.span, "incorrect slicing expression: `[..]`"); - self.span_note(e.span, - "use `expr[]` to construct a slice of the whole of expr"); - } - // e[..e] - _ => { - hi = self.span.hi; - let e2 = self.parse_expr(); - self.commit_expr_expecting(&*e2, token::CloseDelim(token::Bracket)); - let slice = self.mk_slice(e, None, Some(e2), mutbl); - e = self.mk_expr(lo, hi, slice) - } - } - } - // e[e] | e[e..] | e[e..e] - _ => { - let ix = self.parse_expr_res(RESTRICTION_NO_DOTS); - match self.token { - // e[e..] | e[e..e] - token::DotDot => { - self.bump(); - let e2 = match self.token { - // e[e..] - token::CloseDelim(token::Bracket) => { - self.bump(); - None - } - // e[e..e] - _ => { - let e2 = self.parse_expr_res(RESTRICTION_NO_DOTS); - self.commit_expr_expecting(&*e2, - token::CloseDelim(token::Bracket)); - Some(e2) - } - }; - hi = self.span.hi; - let slice = self.mk_slice(e, Some(ix), e2, mutbl); - e = self.mk_expr(lo, hi, slice) - } - // e[e] - _ => { - if mutbl == ast::MutMutable { - self.span_err(e.span, - "`mut` keyword is invalid in index expressions"); - } - hi = self.span.hi; - self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket)); - let index = self.mk_index(e, ix); - e = self.mk_expr(lo, hi, index) - } - } - } + if found_dotdot { + self.span_err(e.span, "incorrect slicing expression: `[..]`"); + self.span_note(e.span, + "use `&expr[]` to construct a slice of the whole of expr"); } } @@ -2655,6 +2598,70 @@ impl<'a> Parser<'a> { return e; } + // Parse unquoted tokens after a `$` in a token tree + fn parse_unquoted(&mut self) -> TokenTree { + let mut sp = self.span; + let (name, namep) = match self.token { + token::Dollar => { + self.bump(); + + if self.token == token::OpenDelim(token::Paren) { + let Spanned { node: seq, span: seq_span } = self.parse_seq( + &token::OpenDelim(token::Paren), + &token::CloseDelim(token::Paren), + seq_sep_none(), + |p| p.parse_token_tree() + ); + let (sep, repeat) = self.parse_sep_and_kleene_op(); + let name_num = macro_parser::count_names(seq.as_slice()); + return TtSequence(mk_sp(sp.lo, seq_span.hi), + Rc::new(SequenceRepetition { + tts: seq, + separator: sep, + op: repeat, + num_captures: name_num + })); + } else if self.token.is_keyword_allow_following_colon(keywords::Crate) { + self.bump(); + return TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)); + } else { + sp = mk_sp(sp.lo, self.span.hi); + let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain }; + let name = self.parse_ident(); + (name, namep) + } + } + token::SubstNt(name, namep) => { + self.bump(); + (name, namep) + } + _ => unreachable!() + }; + // continue by trying to parse the `:ident` after `$name` + if self.token == token::Colon && self.look_ahead(1, |t| t.is_ident() && + !t.is_strict_keyword() && + !t.is_reserved_keyword()) { + self.bump(); + sp = mk_sp(sp.lo, self.span.hi); + let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain }; + let nt_kind = self.parse_ident(); + TtToken(sp, MatchNt(name, nt_kind, namep, kindp)) + } else { + TtToken(sp, SubstNt(name, namep)) + } + } + + pub fn check_unknown_macro_variable(&mut self) { + if self.quote_depth == 0u { + match self.token { + token::SubstNt(name, _) => + self.fatal(format!("unknown macro variable `{}`", + token::get_ident(name)).index(&FullRange)), + _ => {} + } + } + } + /// Parse an optional separator followed by a Kleene-style /// repetition token (+ or *). pub fn parse_sep_and_kleene_op(&mut self) -> (Option<token::Token>, ast::KleeneOp) { @@ -2701,63 +2708,25 @@ impl<'a> Parser<'a> { fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree { maybe_whole!(deref p, NtTT); match p.token { - token::CloseDelim(_) => { - // This is a conservative error: only report the last unclosed delimiter. The - // previous unclosed delimiters could actually be closed! The parser just hasn't - // gotten to them yet. - match p.open_braces.last() { - None => {} - Some(&sp) => p.span_note(sp, "unclosed delimiter"), - }; - let token_str = p.this_token_to_string(); - p.fatal(format!("incorrect close delimiter: `{}`", - token_str)[]) - }, - /* we ought to allow different depths of unquotation */ - token::Dollar if p.quote_depth > 0u => { - p.bump(); - let sp = p.span; - - if p.token == token::OpenDelim(token::Paren) { - let seq = p.parse_seq( - &token::OpenDelim(token::Paren), - &token::CloseDelim(token::Paren), - seq_sep_none(), - |p| p.parse_token_tree() - ); - let (sep, repeat) = p.parse_sep_and_kleene_op(); - let seq = match seq { - Spanned { node, .. } => node, + token::CloseDelim(_) => { + // This is a conservative error: only report the last unclosed delimiter. The + // previous unclosed delimiters could actually be closed! The parser just hasn't + // gotten to them yet. + match p.open_braces.last() { + None => {} + Some(&sp) => p.span_note(sp, "unclosed delimiter"), }; - let name_num = macro_parser::count_names(seq[]); - TtSequence(mk_sp(sp.lo, p.span.hi), - Rc::new(SequenceRepetition { - tts: seq, - separator: sep, - op: repeat, - num_captures: name_num - })) - } else if p.token.is_keyword_allow_following_colon(keywords::Crate) { - p.bump(); - TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)) - } else { - // A nonterminal that matches or not - let namep = match p.token { token::Ident(_, p) => p, _ => token::Plain }; - let name = p.parse_ident(); - if p.token == token::Colon && p.look_ahead(1, |t| t.is_ident()) { - p.bump(); - let kindp = match p.token { token::Ident(_, p) => p, _ => token::Plain }; - let nt_kind = p.parse_ident(); - let m = TtToken(sp, MatchNt(name, nt_kind, namep, kindp)); - m - } else { - TtToken(sp, SubstNt(name, namep)) - } + let token_str = p.this_token_to_string(); + p.fatal(format!("incorrect close delimiter: `{}`", + token_str).index(&FullRange)) + }, + /* we ought to allow different depths of unquotation */ + token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => { + p.parse_unquoted() + } + _ => { + TtToken(p.span, p.bump_and_get()) } - } - _ => { - TtToken(p.span, p.bump_and_get()) - } } } @@ -2890,7 +2859,7 @@ impl<'a> Parser<'a> { let this_token_to_string = self.this_token_to_string(); self.span_err(span, format!("expected expression, found `{}`", - this_token_to_string)[]); + this_token_to_string).index(&FullRange)); let box_span = mk_sp(lo, self.last_span.hi); self.span_help(box_span, "perhaps you meant `box() (foo)` instead?"); @@ -3273,7 +3242,7 @@ impl<'a> Parser<'a> { if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", "}", - token_str)[]) + token_str).index(&FullRange)) } etc = true; break; @@ -3294,7 +3263,7 @@ impl<'a> Parser<'a> { BindByRef(..) | BindByValue(MutMutable) => { let token_str = self.this_token_to_string(); self.fatal(format!("unexpected `{}`", - token_str)[]) + token_str).index(&FullRange)) } _ => {} } @@ -3577,7 +3546,7 @@ impl<'a> Parser<'a> { let span = self.span; let tok_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected identifier, found `{}`", tok_str)[]); + format!("expected identifier, found `{}`", tok_str).index(&FullRange)); } let ident = self.parse_ident(); let last_span = self.last_span; @@ -3674,7 +3643,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; if self.token.is_keyword(keywords::Let) { - check_expected_item(self, item_attrs[]); + check_expected_item(self, item_attrs.index(&FullRange)); self.expect_keyword(keywords::Let); let decl = self.parse_let(); P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) @@ -3683,7 +3652,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| *t == token::Not) { // it's a macro invocation: - check_expected_item(self, item_attrs[]); + check_expected_item(self, item_attrs.index(&FullRange)); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -3711,7 +3680,7 @@ impl<'a> Parser<'a> { let tok_str = self.this_token_to_string(); self.fatal(format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str)[]) + tok_str).index(&FullRange)) }, }; @@ -3759,7 +3728,7 @@ impl<'a> Parser<'a> { } } else { let found_attrs = !item_attrs.is_empty(); - let item_err = Parser::expected_item_err(item_attrs[]); + let item_err = Parser::expected_item_err(item_attrs.index(&FullRange)); match self.parse_item_or_view_item(item_attrs, false) { IoviItem(i) => { let hi = i.span.hi; @@ -3803,7 +3772,7 @@ impl<'a> Parser<'a> { let sp = self.span; let tok = self.this_token_to_string(); self.span_fatal_help(sp, - format!("expected `{{`, found `{}`", tok)[], + format!("expected `{{`, found `{}`", tok).index(&FullRange), "place this code inside a block"); } @@ -3857,13 +3826,13 @@ impl<'a> Parser<'a> { while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(self.parse_outer_attributes()[]); + attributes_box.push_all(self.parse_outer_attributes().index(&FullRange)); match self.token { token::Semi => { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box[])); + Parser::expected_item_err(attributes_box.index(&FullRange))); attributes_box = Vec::new(); } self.bump(); // empty @@ -3955,7 +3924,7 @@ impl<'a> Parser<'a> { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box[])); + Parser::expected_item_err(attributes_box.index(&FullRange))); } let hi = self.span.hi; @@ -4399,7 +4368,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `self`, found `{}`", - token_str)[]) + token_str).index(&FullRange)) } } } @@ -4553,7 +4522,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `,` or `)`, found `{}`", - token_str)[]) + token_str).index(&FullRange)) } } } @@ -4729,7 +4698,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let body_span = body.span; let mut new_attrs = attrs; - new_attrs.push_all(inner_attrs[]); + new_attrs.push_all(inner_attrs.index(&FullRange)); (ast::MethDecl(ident, generics, abi, @@ -4948,7 +4917,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone()))[]); + token::get_ident(class_name.clone())).index(&FullRange)); } self.bump(); @@ -4956,7 +4925,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.fatal(format!("expected `where`, or `{}` after struct \ name, found `{}`", "{", - token_str)[]); + token_str).index(&FullRange)); } fields @@ -4987,7 +4956,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone()))[]); + token::get_ident(class_name.clone())).index(&FullRange)); } self.parse_where_clause(generics); @@ -5002,7 +4971,7 @@ impl<'a> Parser<'a> { } else { let token_str = self.this_token_to_string(); self.fatal(format!("expected `where`, `{}`, `(`, or `;` after struct \ - name, found `{}`", "{", token_str)[]); + name, found `{}`", "{", token_str).index(&FullRange)); } } @@ -5022,7 +4991,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal_help(span, format!("expected `,`, or `}}`, found `{}`", - token_str)[], + token_str).index(&FullRange), "struct fields should be separated by commas") } } @@ -5109,11 +5078,11 @@ impl<'a> Parser<'a> { let mut attrs = self.parse_outer_attributes(); if first { let mut tmp = attrs_remaining.clone(); - tmp.push_all(attrs[]); + tmp.push_all(attrs.index(&FullRange)); attrs = tmp; first = false; } - debug!("parse_mod_items: parse_item_or_view_item(attrs={})", + debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})", attrs); match self.parse_item_or_view_item(attrs, true /* macros allowed */) { @@ -5126,7 +5095,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected item, found `{}`", - token_str)[]) + token_str).index(&FullRange)) } } } @@ -5135,7 +5104,7 @@ impl<'a> Parser<'a> { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining[])); + Parser::expected_item_err(attrs_remaining.index(&FullRange))); } ast::Mod { @@ -5205,7 +5174,7 @@ impl<'a> Parser<'a> { -> (ast::Item_, Vec<ast::Attribute> ) { let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(self.mod_path_stack[]); + let mod_path = Path::new(".").join_many(self.mod_path_stack.index(&FullRange)); let dir_path = prefix.join(&mod_path); let mod_string = token::get_ident(id); let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name( @@ -5215,8 +5184,8 @@ impl<'a> Parser<'a> { let mod_name = mod_string.get().to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(default_path_str[]); - let secondary_path = dir_path.join(secondary_path_str[]); + let default_path = dir_path.join(default_path_str.index(&FullRange)); + let secondary_path = dir_path.join(secondary_path_str.index(&FullRange)); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -5231,13 +5200,13 @@ impl<'a> Parser<'a> { format!("maybe move this module `{0}` \ to its own directory via \ `{0}/mod.rs`", - this_module)[]); + this_module).index(&FullRange)); if default_exists || secondary_exists { self.span_note(id_sp, format!("... or maybe `use` the module \ `{}` instead of possibly \ redeclaring it", - mod_name)[]); + mod_name).index(&FullRange)); } self.abort_if_errors(); } @@ -5248,12 +5217,12 @@ impl<'a> Parser<'a> { (false, false) => { self.span_fatal_help(id_sp, format!("file not found for module `{}`", - mod_name)[], + mod_name).index(&FullRange), format!("name the file either {} or {} inside \ - the directory {}", + the directory {:?}", default_path_str, secondary_path_str, - dir_path.display())[]); + dir_path.display()).index(&FullRange)); } (true, true) => { self.span_fatal_help( @@ -5262,7 +5231,7 @@ impl<'a> Parser<'a> { and {}", mod_name, default_path_str, - secondary_path_str)[], + secondary_path_str).index(&FullRange), "delete or rename one of them to remove the ambiguity"); } } @@ -5284,11 +5253,11 @@ impl<'a> Parser<'a> { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in included_mod_stack.slice(i, len).iter() { - err.push_str(p.display().as_cow()[]); + err.push_str(p.display().as_cow().index(&FullRange)); err.push_str(" -> "); } - err.push_str(path.display().as_cow()[]); - self.span_fatal(id_sp, err[]); + err.push_str(path.display().as_cow().index(&FullRange)); + self.span_fatal(id_sp, err.index(&FullRange)); } None => () } @@ -5369,7 +5338,7 @@ impl<'a> Parser<'a> { if !attrs_remaining.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining[])); + Parser::expected_item_err(attrs_remaining.index(&FullRange))); } assert!(self.token == token::CloseDelim(token::Brace)); ast::ForeignMod { @@ -5410,7 +5379,7 @@ impl<'a> Parser<'a> { self.span_help(span, format!("perhaps you meant to enclose the crate name `{}` in \ a string?", - the_ident.as_str())[]); + the_ident.as_str()).index(&FullRange)); None } else { None @@ -5436,7 +5405,7 @@ impl<'a> Parser<'a> { self.span_fatal(span, format!("expected extern crate name but \ found `{}`", - token_str)[]); + token_str).index(&FullRange)); } }; @@ -5534,7 +5503,7 @@ impl<'a> Parser<'a> { self.span_err(start_span, format!("unit-like struct variant should be written \ without braces, as `{},`", - token::get_ident(ident))[]); + token::get_ident(ident)).index(&FullRange)); } kind = StructVariantKind(struct_def); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -5619,7 +5588,7 @@ impl<'a> Parser<'a> { format!("illegal ABI: expected one of [{}], \ found `{}`", abi::all_names().connect(", "), - the_string)[]); + the_string).index(&FullRange)); None } } @@ -5681,7 +5650,7 @@ impl<'a> Parser<'a> { format!("`extern mod` is obsolete, use \ `extern crate` instead \ to refer to external \ - crates.")[]) + crates.").index(&FullRange)) } return self.parse_item_extern_crate(lo, visibility, attrs); } @@ -5709,7 +5678,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal(span, format!("expected `{}` or `fn`, found `{}`", "{", - token_str)[]); + token_str).index(&FullRange)); } if self.eat_keyword(keywords::Virtual) { @@ -5822,7 +5791,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = - self.parse_item_mod(attrs[]); + self.parse_item_mod(attrs.index(&FullRange)); let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, @@ -6162,7 +6131,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes()[]); + attrs.push_all(self.parse_outer_attributes().index(&FullRange)); // First, parse view items. let mut view_items : Vec<ast::ViewItem> = Vec::new(); let mut items = Vec::new(); @@ -6244,7 +6213,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes()[]); + attrs.push_all(self.parse_outer_attributes().index(&FullRange)); let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs, macros_allowed) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 094aacf3207..013bce1755b 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -392,6 +392,7 @@ impl fmt::Show for Nonterminal { } } + // Get the first "argument" macro_rules! first { ( $first:expr, $( $remainder:expr, )* ) => ( $first ) @@ -479,7 +480,7 @@ macro_rules! declare_special_idents_and_keywords {( $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(init_vec[]) + interner::StrInterner::prefill(init_vec.index(&FullRange)) } }} @@ -515,66 +516,66 @@ declare_special_idents_and_keywords! { (9, unnamed_field, "<unnamed_field>"); (10, type_self, "Self"); (11, prelude_import, "prelude_import"); + (12, FullRange, "FullRange"); } pub mod keywords { // These ones are variants of the Keyword enum 'strict: - (12, As, "as"); - (13, Break, "break"); - (14, Crate, "crate"); - (15, Else, "else"); - (16, Enum, "enum"); - (17, Extern, "extern"); - (18, False, "false"); - (19, Fn, "fn"); - (20, For, "for"); - (21, If, "if"); - (22, Impl, "impl"); - (23, In, "in"); - (24, Let, "let"); - (25, Loop, "loop"); - (26, Match, "match"); - (27, Mod, "mod"); - (28, Move, "move"); - (29, Mut, "mut"); - (30, Pub, "pub"); - (31, Ref, "ref"); - (32, Return, "return"); + (13, As, "as"); + (14, Break, "break"); + (15, Crate, "crate"); + (16, Else, "else"); + (17, Enum, "enum"); + (18, Extern, "extern"); + (19, False, "false"); + (20, Fn, "fn"); + (21, For, "for"); + (22, If, "if"); + (23, Impl, "impl"); + (24, In, "in"); + (25, Let, "let"); + (26, Loop, "loop"); + (27, Match, "match"); + (28, Mod, "mod"); + (29, Move, "move"); + (30, Mut, "mut"); + (31, Pub, "pub"); + (32, Ref, "ref"); + (33, Return, "return"); // Static and Self are also special idents (prefill de-dupes) (super::STATIC_KEYWORD_NAME_NUM, Static, "static"); (super::SELF_KEYWORD_NAME_NUM, Self, "self"); - (33, Struct, "struct"); + (34, Struct, "struct"); (super::SUPER_KEYWORD_NAME_NUM, Super, "super"); - (34, True, "true"); - (35, Trait, "trait"); - (36, Type, "type"); - (37, Unsafe, "unsafe"); - (38, Use, "use"); - (39, Virtual, "virtual"); - (40, While, "while"); - (41, Continue, "continue"); - (42, Proc, "proc"); - (43, Box, "box"); - (44, Const, "const"); - (45, Where, "where"); - + (35, True, "true"); + (36, Trait, "trait"); + (37, Type, "type"); + (38, Unsafe, "unsafe"); + (39, Use, "use"); + (40, Virtual, "virtual"); + (41, While, "while"); + (42, Continue, "continue"); + (43, Proc, "proc"); + (44, Box, "box"); + (45, Const, "const"); + (46, Where, "where"); 'reserved: - (46, Alignof, "alignof"); - (47, Be, "be"); - (48, Offsetof, "offsetof"); - (49, Priv, "priv"); - (50, Pure, "pure"); - (51, Sizeof, "sizeof"); - (52, Typeof, "typeof"); - (53, Unsized, "unsized"); - (54, Yield, "yield"); - (55, Do, "do"); - (56, Abstract, "abstract"); - (57, Final, "final"); - (58, Override, "override"); - (59, Macro, "macro"); + (47, Alignof, "alignof"); + (48, Be, "be"); + (49, Offsetof, "offsetof"); + (50, Priv, "priv"); + (51, Pure, "pure"); + (52, Sizeof, "sizeof"); + (53, Typeof, "typeof"); + (54, Unsized, "unsized"); + (55, Yield, "yield"); + (56, Do, "do"); + (57, Abstract, "abstract"); + (58, Final, "final"); + (59, Override, "override"); + (60, Macro, "macro"); } } @@ -628,7 +629,7 @@ impl InternedString { #[inline] pub fn get<'a>(&'a self) -> &'a str { - self.string[] + self.string.index(&FullRange) } } @@ -652,59 +653,47 @@ impl BytesContainer for InternedString { impl fmt::Show for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.string[]) + fmt::String::fmt(self, f) + } +} + +impl fmt::String for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.string.index(&FullRange)) } } impl<'a> PartialEq<&'a str> for InternedString { #[inline(always)] fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(self.string[], *other) + PartialEq::eq(self.string.index(&FullRange), *other) } #[inline(always)] fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(self.string[], *other) + PartialEq::ne(self.string.index(&FullRange), *other) } } impl<'a> PartialEq<InternedString > for &'a str { #[inline(always)] fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, other.string[]) + PartialEq::eq(*self, other.string.index(&FullRange)) } #[inline(always)] fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, other.string[]) + PartialEq::ne(*self, other.string.index(&FullRange)) } } -#[cfg(stage0)] -impl<D:Decoder<E>, E> Decodable<D, E> for InternedString { - fn decode(d: &mut D) -> Result<InternedString, E> { - Ok(get_name(get_ident_interner().intern( - try!(d.read_str())[]))) - } -} - -#[cfg(not(stage0))] impl Decodable for InternedString { fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> { - Ok(get_name(get_ident_interner().intern( - try!(d.read_str())[]))) - } -} - -#[cfg(stage0)] -impl<S:Encoder<E>, E> Encodable<S, E> for InternedString { - fn encode(&self, s: &mut S) -> Result<(), E> { - s.emit_str(self.string[]) + Ok(get_name(get_ident_interner().intern(try!(d.read_str()).index(&FullRange)))) } } -#[cfg(not(stage0))] impl Encodable for InternedString { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self.string[]) + s.emit_str(self.string.index(&FullRange)) } } |
