diff options
| author | Alex Crichton <alex@alexcrichton.com> | 2015-01-07 17:26:58 -0800 |
|---|---|---|
| committer | Alex Crichton <alex@alexcrichton.com> | 2015-01-07 17:26:58 -0800 |
| commit | 6e806bdefde91af102567ef4b5dbd3ccf0c5c2ec (patch) | |
| tree | b8627ad2c80976f618b661ec14695f6a322ad1b3 /src/libsyntax/parse | |
| parent | f6a7dc5528a9a9ac36867bbca2a6044b7be5bce2 (diff) | |
| parent | 7d72719efc25c6cdb8963c187e93df646ba65245 (diff) | |
| download | rust-6e806bdefde91af102567ef4b5dbd3ccf0c5c2ec.tar.gz rust-6e806bdefde91af102567ef4b5dbd3ccf0c5c2ec.zip | |
rollup merge of #20721: japaric/snap
Conflicts: src/libcollections/vec.rs src/libcore/fmt/mod.rs src/librustc/lint/builtin.rs src/librustc/session/config.rs src/librustc_trans/trans/base.rs src/librustc_trans/trans/context.rs src/librustc_trans/trans/type_.rs src/librustc_typeck/check/_match.rs src/librustdoc/html/format.rs src/libsyntax/std_inject.rs src/libsyntax/util/interner.rs src/test/compile-fail/mut-pattern-mismatched.rs
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 48 | ||||
| -rw-r--r-- | src/libsyntax/parse/obsolete.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 217 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 18 |
7 files changed, 160 insertions, 161 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 4aad7f911db..54ec9c7b146 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -92,7 +92,7 @@ impl<'a> ParserAttr for Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `#`, found `{}`", token_str).index(&FullRange)); + self.fatal(&format!("expected `#`, found `{}`", token_str)[]); } }; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index e7fc5aac9c7..16ade904be8 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - return lines.index(&(i..j)).iter().map(|x| (*x).clone()).collect(); + return lines[i..j].iter().map(|x| (*x).clone()).collect(); } /// remove a "[ \t]*\*" block from each line, if possible @@ -116,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { if can_trim { lines.iter().map(|line| { - line.index(&((i + 1)..line.len())).to_string() + (&line[(i + 1)..line.len()]).to_string() }).collect() } else { lines @@ -127,12 +127,12 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; for prefix in ONLINERS.iter() { if comment.starts_with(*prefix) { - return comment.index(&(prefix.len()..)).to_string(); + return (&comment[prefix.len()..]).to_string(); } } if comment.starts_with("/*") { - let lines = comment.index(&(3u..(comment.len() - 2u))) + let lines = comment[3u..(comment.len() - 2u)] .lines_any() .map(|s| s.to_string()) .collect::<Vec<String> >(); @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(line.index(&FullRange)) { + if is_doc_comment(&line[]) { break; } lines.push(line); @@ -224,10 +224,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(s.index(&FullRange), col) { + let s1 = match all_whitespace(&s[], col) { Some(col) => { if col < len { - s.index(&(col..len)).to_string() + (&s[col..len]).to_string() } else { "".to_string() } @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(curr_line.index(&FullRange)) { + if is_block_doc_comment(&curr_line[]) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 153b18b8760..4cdafb36eec 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -196,7 +196,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); + self.fatal_span_(from_pos, to_pos, &m[]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +205,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, m.index(&FullRange)); + self.err_span_(from_pos, to_pos, &m[]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -214,8 +214,8 @@ impl<'a> StringReader<'a> { m.push_str(": "); let from = self.byte_offset(from_pos).to_uint(); let to = self.byte_offset(to_pos).to_uint(); - m.push_str(self.filemap.src.index(&(from..to))); - self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); + m.push_str(&self.filemap.src[from..to]); + self.fatal_span_(from_pos, to_pos, &m[]); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -301,7 +301,7 @@ impl<'a> StringReader<'a> { while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { - if j < i { buf.push_str(s.index(&(j..i))); } + if j < i { buf.push_str(&s[j..i]); } j = next; if next >= s.len() || s.char_at(next) != '\n' { let pos = start + BytePos(i as u32); @@ -311,7 +311,7 @@ impl<'a> StringReader<'a> { } i = next; } - if j < s.len() { buf.push_str(s.index(&(j..))); } + if j < s.len() { buf.push_str(&s[j..]); } buf } } @@ -556,7 +556,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(string.index(&FullRange))) + token::DocComment(token::intern(&string[])) } else { token::Comment }; @@ -1110,7 +1110,7 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(format!("'{}", lifetime_name).index(&FullRange)) + str_to_ident(&format!("'{}", lifetime_name)[]) }); // Conjure up a "keyword checking ident" to make sure that diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index d26b3af67bd..c42a6beea2d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -253,19 +253,19 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) let bytes = match File::open(path).read_to_end() { Ok(bytes) => bytes, Err(e) => { - err(format!("couldn't read {:?}: {:?}", + err(&format!("couldn't read {:?}: {:?}", path.display(), - e).index(&FullRange)); + e)[]); unreachable!() } }; - match str::from_utf8(bytes.index(&FullRange)).ok() { + match str::from_utf8(&bytes[]).ok() { Some(s) => { return string_to_filemap(sess, s.to_string(), path.as_str().unwrap().to_string()) } None => { - err(format!("{:?} is not UTF-8 encoded", path.display()).index(&FullRange)) + err(&format!("{:?} is not UTF-8 encoded", path.display())[]) } } unreachable!() @@ -399,10 +399,10 @@ pub fn char_lit(lit: &str) -> (char, int) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = msg.index(&FullRange); + let msg2 = &msg[]; fn esc(len: uint, lit: &str) -> Option<(char, int)> { - num::from_str_radix(lit.index(&(2..len)), 16) + num::from_str_radix(&lit[2..len], 16) .and_then(char::from_u32) .map(|x| (x, len as int)) } @@ -410,7 +410,7 @@ pub fn char_lit(lit: &str) -> (char, int) { let unicode_escape = |&: | -> Option<(char, int)> if lit.as_bytes()[2] == b'{' { let idx = lit.find('}').expect(msg2); - let subslice = lit.index(&(3..idx)); + let subslice = &lit[3..idx]; num::from_str_radix(subslice, 16) .and_then(char::from_u32) .map(|x| (x, subslice.chars().count() as int + 4)) @@ -472,7 +472,7 @@ pub fn str_lit(lit: &str) -> String { eat(&mut chars); } else { // otherwise, a normal escape - let (c, n) = char_lit(lit.index(&(i..))); + let (c, n) = char_lit(&lit[i..]); for _ in range(0, n - 1) { // we don't need to move past the first \ chars.next(); } @@ -535,7 +535,7 @@ pub fn raw_str_lit(lit: &str) -> String { fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && first_chars.contains(&s.char_at(0)) && - s.index(&(1..)).chars().all(|c| '0' <= c && c <= '9') + s[1..].chars().all(|c| '0' <= c && c <= '9') } fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, @@ -548,7 +548,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. sd.span_err(sp, &*format!("illegal width `{}` for float literal, \ - valid widths are 32 and 64", suf.index(&(1..)))); + valid widths are 32 and 64", &suf[1..])); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \ valid suffixes are `f32` and `f64`", suf)); @@ -584,7 +584,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) { b'\'' => b'\'', b'0' => b'\0', _ => { - match ::std::num::from_str_radix::<u64>(lit.index(&(2..4)), 16) { + match ::std::num::from_str_radix::<u64>(&lit[2..4], 16) { Some(c) => if c > 0xFF { panic!(err(2)) @@ -634,7 +634,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> { } _ => { // otherwise, a normal escape - let (c, n) = byte_lit(lit.index(&(i..))); + let (c, n) = byte_lit(&lit[i..]); // we don't need to move past the first \ for _ in range(0, n - 1) { chars.next(); @@ -663,7 +663,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::<String>(); - let mut s = s2.index(&FullRange); + let mut s = &s2[]; debug!("integer_lit: {}, {:?}", s, suffix); @@ -696,7 +696,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> } if base != 10 { - s = s.index(&(2..)); + s = &s[2..]; } if let Some(suf) = suffix { @@ -720,7 +720,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> if looks_like_width_suffix(&['i', 'u'], suf) { sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \ valid widths are 8, 16, 32 and 64", - suf.index(&(1..)))); + &suf[1..])); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf)); } @@ -818,7 +818,7 @@ mod test { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = tts.index(&FullRange); + let tts: &[ast::TokenTree] = &tts[]; match tts { [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), ast::TtToken(_, token::Not), @@ -826,19 +826,19 @@ mod test { ast::TtDelimited(_, ref macro_delimed)] if name_macro_rules.as_str() == "macro_rules" && name_zip.as_str() == "zip" => { - match macro_delimed.tts.index(&FullRange) { + match ¯o_delimed.tts[] { [ast::TtDelimited(_, ref first_delimed), ast::TtToken(_, token::FatArrow), ast::TtDelimited(_, ref second_delimed)] if macro_delimed.delim == token::Paren => { - match first_delimed.tts.index(&FullRange) { + match &first_delimed.tts[] { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if first_delimed.delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?}", **first_delimed), } - match second_delimed.tts.index(&FullRange) { + match &second_delimed.tts[] { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if second_delimed.delim == token::Paren @@ -1116,24 +1116,24 @@ mod test { let use_s = "use foo::bar::baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), use_s); + assert_eq!(&vitem_s[], use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), use_s); + assert_eq!(&vitem_s[], use_s); } #[test] fn parse_extern_crate() { let ex_s = "extern crate foo;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), ex_s); + assert_eq!(&vitem_s[], ex_s); let ex_s = "extern crate \"foo\" as bar;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.index(&FullRange), ex_s); + assert_eq!(&vitem_s[], ex_s); } fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { @@ -1212,7 +1212,7 @@ mod test { let docs = item.attrs.iter().filter(|a| a.name().get() == "doc") .map(|a| a.value_str().unwrap().get().to_string()).collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(docs.index(&FullRange), b); + assert_eq!(&docs[], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 23728c74ae8..e9e207e7dbc 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -127,13 +127,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { kind_str: &str, desc: &str) { self.span_err(sp, - format!("obsolete syntax: {}", kind_str).index(&FullRange)); + &format!("obsolete syntax: {}", kind_str)[]); if !self.obsolete_set.contains(&kind) { self.sess .span_diagnostic .handler() - .note(format!("{}", desc).index(&FullRange)); + .note(&format!("{}", desc)[]); self.obsolete_set.insert(kind); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 3bb9e6692a0..3497bebd0bb 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -388,13 +388,13 @@ impl<'a> Parser<'a> { pub fn unexpected_last(&mut self, t: &token::Token) -> ! { let token_str = Parser::token_to_string(t); let last_span = self.last_span; - self.span_fatal(last_span, format!("unexpected token: `{}`", - token_str).index(&FullRange)); + self.span_fatal(last_span, &format!("unexpected token: `{}`", + token_str)[]); } pub fn unexpected(&mut self) -> ! { let this_token = self.this_token_to_string(); - self.fatal(format!("unexpected token: `{}`", this_token).index(&FullRange)); + self.fatal(&format!("unexpected token: `{}`", this_token)[]); } /// Expect and consume the token t. Signal an error if @@ -406,9 +406,9 @@ impl<'a> Parser<'a> { } else { let token_str = Parser::token_to_string(t); let this_token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", token_str, - this_token_str).index(&FullRange)) + this_token_str)[]) } } else { self.expect_one_of(slice::ref_slice(t), &[]); @@ -449,10 +449,10 @@ impl<'a> Parser<'a> { expected.push_all(&*self.expected_tokens); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); - let expect = tokens_to_string(expected.index(&FullRange)); + let expect = tokens_to_string(&expected[]); let actual = self.this_token_to_string(); self.fatal( - (if expected.len() != 1 { + &(if expected.len() != 1 { (format!("expected one of {}, found `{}`", expect, actual)) @@ -460,7 +460,7 @@ impl<'a> Parser<'a> { (format!("expected {}, found `{}`", expect, actual)) - }).index(&FullRange) + }[]) ) } } @@ -493,7 +493,7 @@ impl<'a> Parser<'a> { // might be unit-struct construction; check for recoverableinput error. let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>(); expected.push_all(inedible); - self.check_for_erroneous_unit_struct_expecting(expected.index(&FullRange)); + self.check_for_erroneous_unit_struct_expecting(&expected[]); } self.expect_one_of(edible, inedible) } @@ -510,9 +510,9 @@ impl<'a> Parser<'a> { .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { let mut expected = edible.iter().map(|x| x.clone()).collect::<Vec<_>>(); - expected.push_all(inedible.index(&FullRange)); + expected.push_all(&inedible[]); self.check_for_erroneous_unit_struct_expecting( - expected.index(&FullRange)); + &expected[]); } self.expect_one_of(edible, inedible) } @@ -534,8 +534,8 @@ impl<'a> Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal((format!("expected ident, found `{}`", - token_str)).index(&FullRange)) + self.fatal(&format!("expected ident, found `{}`", + token_str)[]) } } } @@ -592,8 +592,8 @@ impl<'a> Parser<'a> { if !self.eat_keyword(kw) { let id_interned_str = token::get_name(kw.to_name()); let token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", - id_interned_str, token_str).index(&FullRange)) + self.fatal(&format!("expected `{}`, found `{}`", + id_interned_str, token_str)[]) } } @@ -603,8 +603,8 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); let span = self.span; self.span_err(span, - format!("expected identifier, found keyword `{}`", - token_str).index(&FullRange)); + &format!("expected identifier, found keyword `{}`", + token_str)[]); } } @@ -612,8 +612,8 @@ impl<'a> Parser<'a> { pub fn check_reserved_keywords(&mut self) { if self.token.is_reserved_keyword() { let token_str = self.this_token_to_string(); - self.fatal(format!("`{}` is a reserved keyword", - token_str).index(&FullRange)) + self.fatal(&format!("`{}` is a reserved keyword", + token_str)[]) } } @@ -631,9 +631,9 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); let found_token = Parser::token_to_string(&token::BinOp(token::And)); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", found_token, - token_str).index(&FullRange)) + token_str)[]) } } } @@ -652,9 +652,9 @@ impl<'a> Parser<'a> { let found_token = self.this_token_to_string(); let token_str = Parser::token_to_string(&token::BinOp(token::Or)); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", token_str, - found_token).index(&FullRange)) + found_token)[]) } } } @@ -695,9 +695,9 @@ impl<'a> Parser<'a> { if !self.eat_lt() { let found_token = self.this_token_to_string(); let token_str = Parser::token_to_string(&token::Lt); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", token_str, - found_token).index(&FullRange)) + found_token)[]) } } @@ -747,9 +747,9 @@ impl<'a> Parser<'a> { _ => { let gt_str = Parser::token_to_string(&token::Gt); let this_token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", + self.fatal(&format!("expected `{}`, found `{}`", gt_str, - this_token_str).index(&FullRange)) + this_token_str)[]) } } } @@ -1371,7 +1371,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = p.parse_inner_attrs_and_block(); let mut attrs = attrs; - attrs.push_all(inner_attrs.index(&FullRange)); + attrs.push_all(&inner_attrs[]); ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, @@ -1389,8 +1389,8 @@ impl<'a> Parser<'a> { _ => { let token_str = p.this_token_to_string(); - p.fatal((format!("expected `;` or `{{`, found `{}`", - token_str)).index(&FullRange)) + p.fatal(&format!("expected `;` or `{{`, found `{}`", + token_str)[]) } } } @@ -1586,7 +1586,7 @@ impl<'a> Parser<'a> { } else { let this_token_str = self.this_token_to_string(); let msg = format!("expected type, found `{}`", this_token_str); - self.fatal(msg.index(&FullRange)); + self.fatal(&msg[]); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1734,8 +1734,7 @@ impl<'a> Parser<'a> { token::StrRaw(s, n) => { (true, LitStr( - token::intern_and_get_ident( - parse::raw_str_lit(s.as_str()).index(&FullRange)), + token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())[]), ast::RawStr(n))) } token::Binary(i) => @@ -1979,7 +1978,7 @@ impl<'a> Parser<'a> { }; } _ => { - self.fatal(format!("expected a lifetime name").index(&FullRange)); + self.fatal(&format!("expected a lifetime name")[]); } } } @@ -2017,7 +2016,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `,` or `>` after lifetime \ name, found `{}`", this_token_str); - self.fatal(msg.index(&FullRange)); + self.fatal(&msg[]); } } } @@ -2501,16 +2500,16 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let fstr = n.as_str(); self.span_err(last_span, - format!("unexpected token: `{}`", n.as_str()).index(&FullRange)); + &format!("unexpected token: `{}`", n.as_str())[]); if fstr.chars().all(|x| "0123456789.".contains_char(x)) { let float = match fstr.parse::<f64>() { Some(f) => f, None => continue, }; self.span_help(last_span, - format!("try parenthesizing the first index; e.g., `(foo.{}){}`", + &format!("try parenthesizing the first index; e.g., `(foo.{}){}`", float.trunc() as uint, - float.fract().to_string().index(&(1..))).index(&FullRange)); + &float.fract().to_string()[1..])[]); } self.abort_if_errors(); @@ -2655,8 +2654,8 @@ impl<'a> Parser<'a> { if self.quote_depth == 0u { match self.token { token::SubstNt(name, _) => - self.fatal(format!("unknown macro variable `{}`", - token::get_ident(name)).index(&FullRange)), + self.fatal(&format!("unknown macro variable `{}`", + token::get_ident(name))[]), _ => {} } } @@ -2717,8 +2716,8 @@ impl<'a> Parser<'a> { Some(&sp) => p.span_note(sp, "unclosed delimiter"), }; let token_str = p.this_token_to_string(); - p.fatal(format!("incorrect close delimiter: `{}`", - token_str).index(&FullRange)) + p.fatal(&format!("incorrect close delimiter: `{}`", + token_str)[]) }, /* we ought to allow different depths of unquotation */ token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => { @@ -2858,8 +2857,8 @@ impl<'a> Parser<'a> { let span = self.span; let this_token_to_string = self.this_token_to_string(); self.span_err(span, - format!("expected expression, found `{}`", - this_token_to_string).index(&FullRange)); + &format!("expected expression, found `{}`", + this_token_to_string)[]); let box_span = mk_sp(lo, self.last_span.hi); self.span_help(box_span, "perhaps you meant `box() (foo)` instead?"); @@ -3263,8 +3262,8 @@ impl<'a> Parser<'a> { self.bump(); if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `{}`, found `{}`", "}", - token_str).index(&FullRange)) + self.fatal(&format!("expected `{}`, found `{}`", "}", + token_str)[]) } etc = true; break; @@ -3284,8 +3283,8 @@ impl<'a> Parser<'a> { match bind_type { BindByRef(..) | BindByValue(MutMutable) => { let token_str = self.this_token_to_string(); - self.fatal(format!("unexpected `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("unexpected `{}`", + token_str)[]) } _ => {} } @@ -3568,7 +3567,7 @@ impl<'a> Parser<'a> { let span = self.span; let tok_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected identifier, found `{}`", tok_str).index(&FullRange)); + &format!("expected identifier, found `{}`", tok_str)[]); } let ident = self.parse_ident(); let last_span = self.last_span; @@ -3665,7 +3664,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; if self.token.is_keyword(keywords::Let) { - check_expected_item(self, item_attrs.index(&FullRange)); + check_expected_item(self, &item_attrs[]); self.expect_keyword(keywords::Let); let decl = self.parse_let(); P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) @@ -3674,7 +3673,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| *t == token::Not) { // it's a macro invocation: - check_expected_item(self, item_attrs.index(&FullRange)); + check_expected_item(self, &item_attrs[]); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -3700,9 +3699,9 @@ impl<'a> Parser<'a> { "" }; let tok_str = self.this_token_to_string(); - self.fatal(format!("expected {}`(` or `{{`, found `{}`", + self.fatal(&format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str).index(&FullRange)) + tok_str)[]) }, }; @@ -3750,7 +3749,7 @@ impl<'a> Parser<'a> { } } else { let found_attrs = !item_attrs.is_empty(); - let item_err = Parser::expected_item_err(item_attrs.index(&FullRange)); + let item_err = Parser::expected_item_err(&item_attrs[]); match self.parse_item_or_view_item(item_attrs, false) { IoviItem(i) => { let hi = i.span.hi; @@ -3794,7 +3793,7 @@ impl<'a> Parser<'a> { let sp = self.span; let tok = self.this_token_to_string(); self.span_fatal_help(sp, - format!("expected `{{`, found `{}`", tok).index(&FullRange), + &format!("expected `{{`, found `{}`", tok)[], "place this code inside a block"); } @@ -3848,13 +3847,13 @@ impl<'a> Parser<'a> { while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(self.parse_outer_attributes().index(&FullRange)); + attributes_box.push_all(&self.parse_outer_attributes()[]); match self.token { token::Semi => { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box.index(&FullRange))); + Parser::expected_item_err(&attributes_box[])); attributes_box = Vec::new(); } self.bump(); // empty @@ -3946,7 +3945,7 @@ impl<'a> Parser<'a> { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box.index(&FullRange))); + Parser::expected_item_err(&attributes_box[])); } let hi = self.span.hi; @@ -4389,8 +4388,8 @@ impl<'a> Parser<'a> { }, _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `self`, found `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("expected `self`, found `{}`", + token_str)[]) } } } @@ -4543,8 +4542,8 @@ impl<'a> Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `,` or `)`, found `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("expected `,` or `)`, found `{}`", + token_str)[]) } } } @@ -4720,7 +4719,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let body_span = body.span; let mut new_attrs = attrs; - new_attrs.push_all(inner_attrs.index(&FullRange)); + new_attrs.push_all(&inner_attrs[]); (ast::MethDecl(ident, generics, abi, @@ -4937,17 +4936,17 @@ impl<'a> Parser<'a> { } if fields.len() == 0 { - self.fatal(format!("unit-like struct definition should be \ + self.fatal(&format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone())).index(&FullRange)); + token::get_ident(class_name.clone()))[]); } self.bump(); } else { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `where`, or `{}` after struct \ + self.fatal(&format!("expected `where`, or `{}` after struct \ name, found `{}`", "{", - token_str).index(&FullRange)); + token_str)[]); } fields @@ -4976,9 +4975,9 @@ impl<'a> Parser<'a> { }); if fields.len() == 0 { - self.fatal(format!("unit-like struct definition should be \ + self.fatal(&format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone())).index(&FullRange)); + token::get_ident(class_name.clone()))[]); } self.parse_where_clause(generics); @@ -4992,8 +4991,8 @@ impl<'a> Parser<'a> { // This case is where we see: `struct Foo<T>;` } else { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `where`, `{}`, `(`, or `;` after struct \ - name, found `{}`", "{", token_str).index(&FullRange)); + self.fatal(&format!("expected `where`, `{}`, `(`, or `;` after struct \ + name, found `{}`", "{", token_str)[]); } } @@ -5012,8 +5011,8 @@ impl<'a> Parser<'a> { let span = self.span; let token_str = self.this_token_to_string(); self.span_fatal_help(span, - format!("expected `,`, or `}}`, found `{}`", - token_str).index(&FullRange), + &format!("expected `,`, or `}}`, found `{}`", + token_str)[], "struct fields should be separated by commas") } } @@ -5100,7 +5099,7 @@ impl<'a> Parser<'a> { let mut attrs = self.parse_outer_attributes(); if first { let mut tmp = attrs_remaining.clone(); - tmp.push_all(attrs.index(&FullRange)); + tmp.push_all(&attrs[]); attrs = tmp; first = false; } @@ -5116,8 +5115,8 @@ impl<'a> Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected item, found `{}`", - token_str).index(&FullRange)) + self.fatal(&format!("expected item, found `{}`", + token_str)[]) } } } @@ -5126,7 +5125,7 @@ impl<'a> Parser<'a> { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining.index(&FullRange))); + Parser::expected_item_err(&attrs_remaining[])); } ast::Mod { @@ -5196,7 +5195,7 @@ impl<'a> Parser<'a> { -> (ast::Item_, Vec<ast::Attribute> ) { let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(self.mod_path_stack.index(&FullRange)); + let mod_path = Path::new(".").join_many(&self.mod_path_stack[]); let dir_path = prefix.join(&mod_path); let mod_string = token::get_ident(id); let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name( @@ -5206,8 +5205,8 @@ impl<'a> Parser<'a> { let mod_name = mod_string.get().to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(default_path_str.index(&FullRange)); - let secondary_path = dir_path.join(secondary_path_str.index(&FullRange)); + let default_path = dir_path.join(&default_path_str[]); + let secondary_path = dir_path.join(&secondary_path_str[]); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -5219,16 +5218,16 @@ impl<'a> Parser<'a> { None => self.root_module_name.as_ref().unwrap().clone(), }; self.span_note(id_sp, - format!("maybe move this module `{0}` \ + &format!("maybe move this module `{0}` \ to its own directory via \ `{0}/mod.rs`", - this_module).index(&FullRange)); + this_module)[]); if default_exists || secondary_exists { self.span_note(id_sp, - format!("... or maybe `use` the module \ + &format!("... or maybe `use` the module \ `{}` instead of possibly \ redeclaring it", - mod_name).index(&FullRange)); + mod_name)[]); } self.abort_if_errors(); } @@ -5238,22 +5237,22 @@ impl<'a> Parser<'a> { (false, true) => (secondary_path, true), (false, false) => { self.span_fatal_help(id_sp, - format!("file not found for module `{}`", - mod_name).index(&FullRange), - format!("name the file either {} or {} inside \ + &format!("file not found for module `{}`", + mod_name)[], + &format!("name the file either {} or {} inside \ the directory {:?}", default_path_str, secondary_path_str, - dir_path.display()).index(&FullRange)); + dir_path.display())[]); } (true, true) => { self.span_fatal_help( id_sp, - format!("file for module `{}` found at both {} \ + &format!("file for module `{}` found at both {} \ and {}", mod_name, default_path_str, - secondary_path_str).index(&FullRange), + secondary_path_str)[], "delete or rename one of them to remove the ambiguity"); } } @@ -5275,11 +5274,11 @@ impl<'a> Parser<'a> { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in included_mod_stack.slice(i, len).iter() { - err.push_str(p.display().as_cow().index(&FullRange)); + err.push_str(&p.display().as_cow()[]); err.push_str(" -> "); } - err.push_str(path.display().as_cow().index(&FullRange)); - self.span_fatal(id_sp, err.index(&FullRange)); + err.push_str(&path.display().as_cow()[]); + self.span_fatal(id_sp, &err[]); } None => () } @@ -5360,7 +5359,7 @@ impl<'a> Parser<'a> { if !attrs_remaining.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining.index(&FullRange))); + Parser::expected_item_err(&attrs_remaining[])); } assert!(self.token == token::CloseDelim(token::Brace)); ast::ForeignMod { @@ -5399,9 +5398,9 @@ impl<'a> Parser<'a> { self.span_err(span, "expected `;`, found `as`"); self.span_help(span, - format!("perhaps you meant to enclose the crate name `{}` in \ + &format!("perhaps you meant to enclose the crate name `{}` in \ a string?", - the_ident.as_str()).index(&FullRange)); + the_ident.as_str())[]); None } else { None @@ -5425,9 +5424,9 @@ impl<'a> Parser<'a> { let span = self.span; let token_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected extern crate name but \ + &format!("expected extern crate name but \ found `{}`", - token_str).index(&FullRange)); + token_str)[]); } }; @@ -5523,9 +5522,9 @@ impl<'a> Parser<'a> { let struct_def = self.parse_struct_def(); if struct_def.fields.len() == 0 { self.span_err(start_span, - format!("unit-like struct variant should be written \ + &format!("unit-like struct variant should be written \ without braces, as `{},`", - token::get_ident(ident)).index(&FullRange)); + token::get_ident(ident))[]); } kind = StructVariantKind(struct_def); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -5607,10 +5606,10 @@ impl<'a> Parser<'a> { let last_span = self.last_span; self.span_err( last_span, - format!("illegal ABI: expected one of [{}], \ + &format!("illegal ABI: expected one of [{}], \ found `{}`", abi::all_names().connect(", "), - the_string).index(&FullRange)); + the_string)[]); None } } @@ -5669,10 +5668,10 @@ impl<'a> Parser<'a> { if next_is_mod { let last_span = self.last_span; self.span_err(mk_sp(lo, last_span.hi), - format!("`extern mod` is obsolete, use \ + &format!("`extern mod` is obsolete, use \ `extern crate` instead \ to refer to external \ - crates.").index(&FullRange)) + crates.")[]) } return self.parse_item_extern_crate(lo, visibility, attrs); } @@ -5699,8 +5698,8 @@ impl<'a> Parser<'a> { let span = self.span; let token_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected `{}` or `fn`, found `{}`", "{", - token_str).index(&FullRange)); + &format!("expected `{}` or `fn`, found `{}`", "{", + token_str)[]); } if self.eat_keyword(keywords::Virtual) { @@ -5813,7 +5812,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = - self.parse_item_mod(attrs.index(&FullRange)); + self.parse_item_mod(&attrs[]); let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, @@ -6153,7 +6152,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes().index(&FullRange)); + attrs.push_all(&self.parse_outer_attributes()[]); // First, parse view items. let mut view_items : Vec<ast::ViewItem> = Vec::new(); let mut items = Vec::new(); @@ -6235,7 +6234,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes().index(&FullRange)); + attrs.push_all(&self.parse_outer_attributes()[]); let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs, macros_allowed) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 43786738910..4b3573f84c5 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -480,7 +480,7 @@ macro_rules! declare_special_idents_and_keywords {( $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(init_vec.index(&FullRange)) + interner::StrInterner::prefill(&init_vec[]) } }} @@ -629,7 +629,7 @@ impl InternedString { #[inline] pub fn get<'a>(&'a self) -> &'a str { - self.string.index(&FullRange) + &self.string[] } } @@ -659,41 +659,41 @@ impl fmt::Show for InternedString { impl fmt::String for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.string.index(&FullRange)) + write!(f, "{}", &self.string[]) } } impl<'a> PartialEq<&'a str> for InternedString { #[inline(always)] fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(self.string.index(&FullRange), *other) + PartialEq::eq(&self.string[], *other) } #[inline(always)] fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(self.string.index(&FullRange), *other) + PartialEq::ne(&self.string[], *other) } } impl<'a> PartialEq<InternedString > for &'a str { #[inline(always)] fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, other.string.index(&FullRange)) + PartialEq::eq(*self, &other.string[]) } #[inline(always)] fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, other.string.index(&FullRange)) + PartialEq::ne(*self, &other.string[]) } } impl Decodable for InternedString { fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> { - Ok(get_name(get_ident_interner().intern(try!(d.read_str()).index(&FullRange)))) + Ok(get_name(get_ident_interner().intern(&try!(d.read_str())[]))) } } impl Encodable for InternedString { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self.string.index(&FullRange)) + s.emit_str(&self.string[]) } } |
