diff options
| author | bors <bors@rust-lang.org> | 2015-02-19 18:36:59 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2015-02-19 18:36:59 +0000 |
| commit | 522d09dfecbeca1595f25ac58c6d0178bbd21d7d (patch) | |
| tree | cc0252dd3413e5f890d0ebcfdaa096e5b002be0b /src/libsyntax/parse/lexer | |
| parent | 0b664bb8436f2cfda7f13a6f302ab486f332816f (diff) | |
| parent | 49771bafa5fca16486bfd06741dac3de2c587adf (diff) | |
| download | rust-522d09dfecbeca1595f25ac58c6d0178bbd21d7d.tar.gz rust-522d09dfecbeca1595f25ac58c6d0178bbd21d7d.zip | |
Auto merge of #22541 - Manishearth:rollup, r=Gankro 1.0.0-alpha.2
Continued from #22520
Diffstat (limited to 'src/libsyntax/parse/lexer')
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 13 |
2 files changed, 11 insertions, 12 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index b17fc7fe82e..1f06db60027 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -61,7 +61,7 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { pub fn strip_doc_comment_decoration(comment: &str) -> String { /// remove whitespace-only lines from the start/end of lines - fn vertical_trim(lines: Vec<String> ) -> Vec<String> { + fn vertical_trim(lines: Vec<String>) -> Vec<String> { let mut i = 0; let mut j = lines.len(); // first line of all-stars should be omitted @@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - return lines[i..j].iter().map(|x| (*x).clone()).collect(); + lines[i..j].iter().cloned().collect() } /// remove a "[ \t]*\*" block from each line, if possible @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(&line[]) { + if is_doc_comment(&line[..]) { break; } lines.push(line); @@ -224,7 +224,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<usize> { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(&s[], col) { + let s1 = match all_whitespace(&s[..], col) { Some(col) => { if col < len { (&s[col..len]).to_string() @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(&curr_line[]) { + if is_block_doc_comment(&curr_line[..]) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 38ba0b38df5..fd08cbd161b 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -16,14 +16,13 @@ use ext::tt::transcribe::tt_next_token; use parse::token; use parse::token::{str_to_ident}; -use std::borrow::IntoCow; +use std::borrow::{IntoCow, Cow}; use std::char; use std::fmt; use std::mem::replace; use std::num; use std::rc::Rc; use std::str; -use std::string::CowString; pub use ext::tt::transcribe::{TtReader, new_tt_reader, new_tt_reader_with_doc_flag}; @@ -196,7 +195,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +204,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, &m[]); + self.err_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -215,7 +214,7 @@ impl<'a> StringReader<'a> { let from = self.byte_offset(from_pos).to_usize(); let to = self.byte_offset(to_pos).to_usize(); m.push_str(&self.filemap.src[from..to]); - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -278,7 +277,7 @@ impl<'a> StringReader<'a> { /// Converts CRLF to LF in the given string, raising an error on bare CR. fn translate_crlf<'b>(&self, start: BytePos, - s: &'b str, errmsg: &'b str) -> CowString<'b> { + s: &'b str, errmsg: &'b str) -> Cow<'b, str> { let mut i = 0; while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); @@ -556,7 +555,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(&string[])) + token::DocComment(token::intern(&string[..])) } else { token::Comment }; |
