diff options
| author | bors <bors@rust-lang.org> | 2014-12-08 02:32:31 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2014-12-08 02:32:31 +0000 |
| commit | 83a44c7fa676b4e5e546ce3d4624e585f9a1e899 (patch) | |
| tree | 36d7db1d2567d86816d4ac6a1ec86276974dbc65 /src/libsyntax | |
| parent | 8bca470c5acf13aa20022a2c462a89f72de721fc (diff) | |
| parent | 1fea900de7f11d665086141806246842c03b9fc5 (diff) | |
| download | rust-83a44c7fa676b4e5e546ce3d4624e585f9a1e899.tar.gz rust-83a44c7fa676b4e5e546ce3d4624e585f9a1e899.zip | |
auto merge of #19378 : japaric/rust/no-as-slice, r=alexcrichton
Now that we have an overloaded comparison (`==`) operator, and that `Vec`/`String` deref to `[T]`/`str` on method calls, many `as_slice()`/`as_mut_slice()`/`to_string()` calls have become redundant. This patch removes them. These were the most common patterns: - `assert_eq(test_output.as_slice(), "ground truth")` -> `assert_eq(test_output, "ground truth")` - `assert_eq(test_output, "ground truth".to_string())` -> `assert_eq(test_output, "ground truth")` - `vec.as_mut_slice().sort()` -> `vec.sort()` - `vec.as_slice().slice(from, to)` -> `vec.slice(from_to)` --- Note that e.g. `a_string.push_str(b_string.as_slice())` has been left untouched in this PR, since we first need to settle down whether we want to favor the `&*b_string` or the `b_string[]` notation. This is rebased on top of #19167 cc @alexcrichton @aturon
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ast_map/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/codemap.rs | 30 | ||||
| -rw-r--r-- | src/libsyntax/ext/base.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/feature_gate.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 37 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 5 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 8 |
8 files changed, 43 insertions, 45 deletions
diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index 2913666a315..ce2fe6e7220 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -260,7 +260,7 @@ impl<'ast> Map<'ast> { } fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> { - self.map.borrow().as_slice().get(id as uint).map(|e| *e) + self.map.borrow().get(id as uint).map(|e| *e) } pub fn krate(&self) -> &'ast Crate { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 27e8c265e5c..6bcf562204b 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -290,7 +290,7 @@ impl FileMap { lines.get(line_number).map(|&line| { let begin: BytePos = line - self.start_pos; let begin = begin.to_uint(); - let slice = self.src.as_slice().slice_from(begin); + let slice = self.src.slice_from(begin); match slice.find('\n') { Some(e) => slice.slice_to(e), None => slice @@ -308,8 +308,8 @@ impl FileMap { } pub fn is_real_file(&self) -> bool { - !(self.name.as_slice().starts_with("<") && - self.name.as_slice().ends_with(">")) + !(self.name.starts_with("<") && + self.name.ends_with(">")) } } @@ -336,8 +336,8 @@ impl CodeMap { // Remove utf-8 BOM if any. // FIXME #12884: no efficient/safe way to remove from the start of a string // and reuse the allocation. - let mut src = if src.as_slice().starts_with("\ufeff") { - String::from_str(src.as_slice().slice_from(3)) + let mut src = if src.starts_with("\ufeff") { + String::from_str(src.slice_from(3)) } else { String::from_str(src.as_slice()) }; @@ -346,7 +346,7 @@ impl CodeMap { // This is a workaround to prevent CodeMap.lookup_filemap_idx from accidentally // overflowing into the next filemap in case the last byte of span is also the last // byte of filemap, which leads to incorrect results from CodeMap.span_to_*. - if src.len() > 0 && !src.as_slice().ends_with("\n") { + if src.len() > 0 && !src.ends_with("\n") { src.push('\n'); } @@ -426,14 +426,14 @@ impl CodeMap { if begin.fm.start_pos != end.fm.start_pos { None } else { - Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(), - end.pos.to_uint()).to_string()) + Some(begin.fm.src.slice(begin.pos.to_uint(), + end.pos.to_uint()).to_string()) } } pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> { for fm in self.files.borrow().iter() { - if filename == fm.name.as_slice() { + if filename == fm.name { return fm.clone(); } } @@ -614,11 +614,11 @@ mod test { let cm = init_code_map(); let fmabp1 = cm.lookup_byte_offset(BytePos(22)); - assert_eq!(fmabp1.fm.name, "blork.rs".to_string()); + assert_eq!(fmabp1.fm.name, "blork.rs"); assert_eq!(fmabp1.pos, BytePos(22)); let fmabp2 = cm.lookup_byte_offset(BytePos(24)); - assert_eq!(fmabp2.fm.name, "blork2.rs".to_string()); + assert_eq!(fmabp2.fm.name, "blork2.rs"); assert_eq!(fmabp2.pos, BytePos(0)); } @@ -640,12 +640,12 @@ mod test { let cm = init_code_map(); let loc1 = cm.lookup_char_pos(BytePos(22)); - assert_eq!(loc1.file.name, "blork.rs".to_string()); + assert_eq!(loc1.file.name, "blork.rs"); assert_eq!(loc1.line, 2); assert_eq!(loc1.col, CharPos(10)); let loc2 = cm.lookup_char_pos(BytePos(24)); - assert_eq!(loc2.file.name, "blork2.rs".to_string()); + assert_eq!(loc2.file.name, "blork2.rs"); assert_eq!(loc2.line, 1); assert_eq!(loc2.col, CharPos(0)); } @@ -701,7 +701,7 @@ mod test { let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; let file_lines = cm.span_to_lines(span); - assert_eq!(file_lines.file.name, "blork.rs".to_string()); + assert_eq!(file_lines.file.name, "blork.rs"); assert_eq!(file_lines.lines.len(), 1); assert_eq!(file_lines.lines[0], 1u); } @@ -723,6 +723,6 @@ mod test { let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; let sstr = cm.span_to_string(span); - assert_eq!(sstr, "blork.rs:2:1: 2:12".to_string()); + assert_eq!(sstr, "blork.rs:2:1: 2:12"); } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8d0d399fa31..0787518f04f 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -527,7 +527,7 @@ impl<'a> ExtCtxt<'a> { let mut call_site = None; loop { let expn_info = self.codemap().with_expn_info(expn_id, |ei| { - ei.map(|ei| (ei.call_site, ei.callee.name.as_slice() == "include")) + ei.map(|ei| (ei.call_site, ei.callee.name == "include")) }); match expn_info { None => break, diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 11c65d531f6..4af7b35079a 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -132,7 +132,7 @@ impl<'a> Context<'a> { } fn has_feature(&self, feature: &str) -> bool { - self.features.iter().any(|n| n.as_slice() == feature) + self.features.iter().any(|&n| n == feature) } } diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index b62d2d744c9..aeec6ee13fd 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -66,21 +66,20 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut j = lines.len(); // first line of all-stars should be omitted if lines.len() > 0 && - lines[0].as_slice().chars().all(|c| c == '*') { + lines[0].chars().all(|c| c == '*') { i += 1; } - while i < j && lines[i].as_slice().trim().is_empty() { + while i < j && lines[i].trim().is_empty() { i += 1; } // like the first, a last line of all stars should be omitted if j > i && lines[j - 1] - .as_slice() .chars() .skip(1) .all(|c| c == '*') { j -= 1; } - while j > i && lines[j - 1].as_slice().trim().is_empty() { + while j > i && lines[j - 1].trim().is_empty() { j -= 1; } return lines.slice(i, j).iter().map(|x| (*x).clone()).collect(); @@ -92,7 +91,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut can_trim = true; let mut first = true; for line in lines.iter() { - for (j, c) in line.as_slice().chars().enumerate() { + for (j, c) in line.chars().enumerate() { if j > i || !"* \t".contains_char(c) { can_trim = false; break; @@ -117,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { if can_trim { lines.iter().map(|line| { - line.as_slice().slice(i + 1, line.len()).to_string() + line.slice(i + 1, line.len()).to_string() }).collect() } else { lines @@ -228,7 +227,7 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String> , let s1 = match all_whitespace(s.as_slice(), col) { Some(col) => { if col < len { - s.as_slice().slice(col, len).to_string() + s.slice(col, len).to_string() } else { "".to_string() } @@ -265,7 +264,7 @@ fn read_block_comment(rdr: &mut StringReader, if is_block_doc_comment(curr_line.as_slice()) { return } - assert!(!curr_line.as_slice().contains_char('\n')); + assert!(!curr_line.contains_char('\n')); lines.push(curr_line); } else { let mut level: int = 1; @@ -390,41 +389,41 @@ mod test { #[test] fn test_block_doc_comment_1() { let comment = "/**\n * Test \n ** Test\n * Test\n*/"; let stripped = strip_doc_comment_decoration(comment); - assert_eq!(stripped, " Test \n* Test\n Test".to_string()); + assert_eq!(stripped, " Test \n* Test\n Test"); } #[test] fn test_block_doc_comment_2() { let comment = "/**\n * Test\n * Test\n*/"; let stripped = strip_doc_comment_decoration(comment); - assert_eq!(stripped, " Test\n Test".to_string()); + assert_eq!(stripped, " Test\n Test"); } #[test] fn test_block_doc_comment_3() { let comment = "/**\n let a: *int;\n *a = 5;\n*/"; let stripped = strip_doc_comment_decoration(comment); - assert_eq!(stripped, " let a: *int;\n *a = 5;".to_string()); + assert_eq!(stripped, " let a: *int;\n *a = 5;"); } #[test] fn test_block_doc_comment_4() { let comment = "/*******************\n test\n *********************/"; let stripped = strip_doc_comment_decoration(comment); - assert_eq!(stripped, " test".to_string()); + assert_eq!(stripped, " test"); } #[test] fn test_line_doc_comment() { let stripped = strip_doc_comment_decoration("/// test"); - assert_eq!(stripped, " test".to_string()); + assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("///! test"); - assert_eq!(stripped, " test".to_string()); + assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("// test"); - assert_eq!(stripped, " test".to_string()); + assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("// test"); - assert_eq!(stripped, " test".to_string()); + assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("///test"); - assert_eq!(stripped, "test".to_string()); + assert_eq!(stripped, "test"); let stripped = strip_doc_comment_decoration("///!test"); - assert_eq!(stripped, "test".to_string()); + assert_eq!(stripped, "test"); let stripped = strip_doc_comment_decoration("//test"); - assert_eq!(stripped, "test".to_string()); + assert_eq!(stripped, "test"); } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index b282db5ba2b..ab2c15d54c5 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -265,7 +265,7 @@ impl<'a> StringReader<'a> { /// Calls `f` with a string slice of the source text spanning from `start` /// up to but excluding `end`. fn with_str_from_to<T>(&self, start: BytePos, end: BytePos, f: |s: &str| -> T) -> T { - f(self.filemap.src.as_slice().slice( + f(self.filemap.src.slice( self.byte_offset(start).to_uint(), self.byte_offset(end).to_uint())) } @@ -321,7 +321,6 @@ impl<'a> StringReader<'a> { let last_char = self.curr.unwrap(); let next = self.filemap .src - .as_slice() .char_range_at(current_byte_offset); let byte_offset_diff = next.next - current_byte_offset; self.pos = self.pos + Pos::from_uint(byte_offset_diff); @@ -343,7 +342,7 @@ impl<'a> StringReader<'a> { pub fn nextch(&self) -> Option<char> { let offset = self.byte_offset(self.pos).to_uint(); if offset < self.filemap.src.len() { - Some(self.filemap.src.as_slice().char_at(offset)) + Some(self.filemap.src.char_at(offset)) } else { None } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 8d0c2de048a..951fe11a470 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -954,7 +954,7 @@ mod test { }\ ]\ }\ -]".to_string() +]" ); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index e9937dc9b60..6ce0ee79c62 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -2844,7 +2844,7 @@ impl<'a> State<'a> { comments::BlankLine => { // We need to do at least one, possibly two hardbreaks. let is_semi = match self.s.last_token() { - pp::String(s, _) => ";" == s.as_slice(), + pp::String(s, _) => ";" == s, _ => false }; if is_semi || self.is_begin() || self.is_end() { @@ -2961,9 +2961,9 @@ mod test { variadic: false }; let generics = ast_util::empty_generics(); - assert_eq!(&fun_to_string(&decl, ast::NormalFn, abba_ident, + assert_eq!(fun_to_string(&decl, ast::NormalFn, abba_ident, None, &generics), - &"fn abba()".to_string()); + "fn abba()"); } #[test] @@ -2981,7 +2981,7 @@ mod test { }); let varstr = variant_to_string(&var); - assert_eq!(&varstr,&"pub principal_skinner".to_string()); + assert_eq!(varstr, "pub principal_skinner"); } #[test] |
