diff options
| author | Niko Matsakis <niko@alum.mit.edu> | 2015-02-18 14:48:57 -0500 |
|---|---|---|
| committer | Niko Matsakis <niko@alum.mit.edu> | 2015-02-18 17:36:03 -0500 |
| commit | 9ea84aeed4ed3006eddb6a7b24e9714f2844cd22 (patch) | |
| tree | 566226c57e31172bd55c585a18651130786af96c /src/libsyntax/parse/lexer | |
| parent | 64cd30e0cacb6b509f9368004afb0b6bde7a5143 (diff) | |
| download | rust-9ea84aeed4ed3006eddb6a7b24e9714f2844cd22.tar.gz rust-9ea84aeed4ed3006eddb6a7b24e9714f2844cd22.zip | |
Replace all uses of `&foo[]` with `&foo[..]` en masse.
Diffstat (limited to 'src/libsyntax/parse/lexer')
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 8 |
2 files changed, 7 insertions, 7 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index b17fc7fe82e..88f7b33ad24 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(&line[]) { + if is_doc_comment(&line[..]) { break; } lines.push(line); @@ -224,7 +224,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<usize> { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(&s[], col) { + let s1 = match all_whitespace(&s[..], col) { Some(col) => { if col < len { (&s[col..len]).to_string() @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(&curr_line[]) { + if is_block_doc_comment(&curr_line[..]) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 38ba0b38df5..cca641a7852 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -196,7 +196,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +205,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, &m[]); + self.err_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -215,7 +215,7 @@ impl<'a> StringReader<'a> { let from = self.byte_offset(from_pos).to_usize(); let to = self.byte_offset(to_pos).to_usize(); m.push_str(&self.filemap.src[from..to]); - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -556,7 +556,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(&string[])) + token::DocComment(token::intern(&string[..])) } else { token::Comment }; |
