diff options
| author | Jorge Aparicio <japaricious@gmail.com> | 2015-02-01 21:53:25 -0500 |
|---|---|---|
| committer | Jorge Aparicio <japaricious@gmail.com> | 2015-02-05 13:45:01 -0500 |
| commit | 17bc7d8d5be3be9674d702ccad2fa88c487d23b0 (patch) | |
| tree | 325defba0f55b48273cd3f0814fe6c083dee5d41 /src/libsyntax/parse | |
| parent | 2c05354211b04a52cc66a0b8ad8b2225eaf9e972 (diff) | |
| download | rust-17bc7d8d5be3be9674d702ccad2fa88c487d23b0.tar.gz rust-17bc7d8d5be3be9674d702ccad2fa88c487d23b0.zip | |
cleanup: replace `as[_mut]_slice()` calls with deref coercions
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 4 |
3 files changed, 11 insertions, 11 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index e6da47304ce..ecc39925a40 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -360,7 +360,7 @@ impl<'a> StringReader<'a> { pub fn nextnextch(&self) -> Option<char> { let offset = self.byte_offset(self.pos).to_usize(); - let s = self.filemap.src.as_slice(); + let s = &*self.filemap.src; if offset >= s.len() { return None } let str::CharRange { next, .. } = s.char_range_at(offset); if next < s.len() { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 6ea2ffa507d..6ff5c77f507 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -454,7 +454,7 @@ pub fn str_lit(lit: &str) -> String { match c { '\\' => { let ch = chars.peek().unwrap_or_else(|| { - panic!("{}", error(i).as_slice()) + panic!("{}", error(i)) }).1; if ch == '\n' { @@ -462,7 +462,7 @@ pub fn str_lit(lit: &str) -> String { } else if ch == '\r' { chars.next(); let ch = chars.peek().unwrap_or_else(|| { - panic!("{}", error(i).as_slice()) + panic!("{}", error(i)) }).1; if ch != '\n' { @@ -480,7 +480,7 @@ pub fn str_lit(lit: &str) -> String { }, '\r' => { let ch = chars.peek().unwrap_or_else(|| { - panic!("{}", error(i).as_slice()) + panic!("{}", error(i)) }).1; if ch != '\n' { @@ -622,11 +622,11 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> { match chars.next() { Some((i, b'\\')) => { let em = error(i); - match chars.peek().expect(em.as_slice()).1 { + match chars.peek().expect(&em).1 { b'\n' => eat(&mut chars), b'\r' => { chars.next(); - if chars.peek().expect(em.as_slice()).1 != b'\n' { + if chars.peek().expect(&em).1 != b'\n' { panic!("lexer accepted bare CR"); } eat(&mut chars); @@ -644,7 +644,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> { }, Some((i, b'\r')) => { let em = error(i); - if chars.peek().expect(em.as_slice()).1 != b'\n' { + if chars.peek().expect(&em).1 != b'\n' { panic!("lexer accepted bare CR"); } chars.next(); @@ -1200,7 +1200,7 @@ mod test { let name = "<source>".to_string(); let source = "/// doc comment\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); - let doc = first_attr_value_str_by_name(item.attrs.as_slice(), "doc").unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(doc.get(), "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); @@ -1212,7 +1212,7 @@ mod test { let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); - let doc = first_attr_value_str_by_name(item.attrs.as_slice(), "doc").unwrap(); + let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(doc.get(), "/** doc comment\n * with CRLF */"); } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 26433d06482..22174494458 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1681,7 +1681,7 @@ impl<'a> Parser<'a> { token::Str_(s) => { (true, - LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()), + LitStr(token::intern_and_get_ident(&parse::str_lit(s.as_str())), ast::CookedStr)) } token::StrRaw(s, n) => { @@ -2596,7 +2596,7 @@ impl<'a> Parser<'a> { |p| p.parse_token_tree() ); let (sep, repeat) = self.parse_sep_and_kleene_op(); - let name_num = macro_parser::count_names(seq.as_slice()); + let name_num = macro_parser::count_names(&seq); return TtSequence(mk_sp(sp.lo, seq_span.hi), Rc::new(SequenceRepetition { tts: seq, |
