diff options
| author | Niko Matsakis <niko@alum.mit.edu> | 2015-02-20 14:08:14 -0500 |
|---|---|---|
| committer | Niko Matsakis <niko@alum.mit.edu> | 2015-02-20 14:08:14 -0500 |
| commit | 68e5bb3f2caa34753edb7f921c0bcf1efd63cf88 (patch) | |
| tree | d895a5d4acf70d2c9ed4104bdaf1631b2daa351f /src/libsyntax | |
| parent | 42e155e13bf16b19251903ae51b2571925345771 (diff) | |
| download | rust-68e5bb3f2caa34753edb7f921c0bcf1efd63cf88.tar.gz rust-68e5bb3f2caa34753edb7f921c0bcf1efd63cf88.zip | |
Remove remaining uses of `[]`. This time I tried to use deref coercions where possible.
Diffstat (limited to 'src/libsyntax')
25 files changed, 89 insertions, 90 deletions
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index d8cba139fb5..009bfef8623 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -113,7 +113,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(('=', _)) => None, Some(('+', operand)) => { Some(token::intern_and_get_ident(&format!( - "={}", operand)[])) + "={}", operand))) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index d4ccabbd63b..2ef90f04f75 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -83,15 +83,15 @@ pub enum Annotatable { impl Annotatable { pub fn attrs(&self) -> &[ast::Attribute] { match *self { - Annotatable::Item(ref i) => &i.attrs[], + Annotatable::Item(ref i) => &i.attrs, Annotatable::TraitItem(ref i) => match *i { - ast::TraitItem::RequiredMethod(ref tm) => &tm.attrs[], - ast::TraitItem::ProvidedMethod(ref m) => &m.attrs[], - ast::TraitItem::TypeTraitItem(ref at) => &at.attrs[], + ast::TraitItem::RequiredMethod(ref tm) => &tm.attrs, + ast::TraitItem::ProvidedMethod(ref m) => &m.attrs, + ast::TraitItem::TypeTraitItem(ref at) => &at.attrs, }, Annotatable::ImplItem(ref i) => match *i { - ast::ImplItem::MethodImplItem(ref m) => &m.attrs[], - ast::ImplItem::TypeImplItem(ref t) => &t.attrs[], + ast::ImplItem::MethodImplItem(ref m) => &m.attrs, + ast::ImplItem::TypeImplItem(ref t) => &t.attrs, } } } @@ -639,7 +639,7 @@ impl<'a> ExtCtxt<'a> { pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } pub fn mod_path(&self) -> Vec<ast::Ident> { let mut v = Vec::new(); - v.push(token::str_to_ident(&self.ecfg.crate_name[])); + v.push(token::str_to_ident(&self.ecfg.crate_name)); v.extend(self.mod_path.iter().cloned()); return v; } @@ -648,7 +648,7 @@ impl<'a> ExtCtxt<'a> { if self.recursion_count > self.ecfg.recursion_limit { self.span_fatal(ei.call_site, &format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name)[]); + ei.callee.name)); } let mut call_site = ei.call_site; @@ -773,7 +773,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { - cx.span_err(sp, &format!("{} takes no arguments", name)[]); + cx.span_err(sp, &format!("{} takes no arguments", name)); } } @@ -786,12 +786,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, -> Option<String> { let mut p = cx.new_parser_from_tts(tts); if p.token == token::Eof { - cx.span_err(sp, &format!("{} takes 1 argument", name)[]); + cx.span_err(sp, &format!("{} takes 1 argument", name)); return None } let ret = cx.expander().fold_expr(p.parse_expr()); if p.token != token::Eof { - cx.span_err(sp, &format!("{} takes 1 argument", name)[]); + cx.span_err(sp, &format!("{} takes 1 argument", name)); } expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { s.to_string() diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 5bfd4a9f611..8923290d655 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -762,7 +762,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> { let loc = self.codemap().lookup_char_pos(span.lo); let expr_file = self.expr_str(span, - token::intern_and_get_ident(&loc.file.name[])); + token::intern_and_get_ident(&loc.file.name)); let expr_line = self.expr_usize(span, loc.line); let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line)); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 38098e50dee..84f786e9780 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitInt(i, ast::UnsignedIntLit(_)) | ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => { - accumulator.push_str(&format!("{}", i)[]); + accumulator.push_str(&format!("{}", i)); } ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => { - accumulator.push_str(&format!("-{}", i)[]); + accumulator.push_str(&format!("-{}", i)); } ast::LitBool(b) => { - accumulator.push_str(&format!("{}", b)[]); + accumulator.push_str(&format!("{}", b)); } ast::LitByte(..) | ast::LitBinary(..) => { diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index 518fbcc80ee..5f460264216 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -81,11 +81,11 @@ fn cs_clone( EnumNonMatchingCollapsed (..) => { cx.span_bug(trait_span, &format!("non-matching enum variants in \ - `derive({})`", name)[]) + `derive({})`", name)) } StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, - &format!("static method in `derive({})`", name)[]) + &format!("static method in `derive({})`", name)) } } @@ -102,7 +102,7 @@ fn cs_clone( None => { cx.span_bug(trait_span, &format!("unnamed field in normal struct in \ - `derive({})`", name)[]) + `derive({})`", name)) } }; cx.field_imm(field.span, ident, subcall(field)) diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index ab0f64e823f..f27bbc338e5 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -204,7 +204,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt, } else { let fields = fields.iter().enumerate().map(|(i, &span)| { getarg(cx, span, - token::intern_and_get_ident(&format!("_field{}", i)[]), + token::intern_and_get_ident(&format!("_field{}", i)), i) }).collect(); diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index dd609470599..8038074cee1 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -191,7 +191,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let name = match name { Some(id) => token::get_ident(id), None => { - token::intern_and_get_ident(&format!("_field{}", i)[]) + token::intern_and_get_ident(&format!("_field{}", i)) } }; let enc = cx.expr_method_call(span, self_.clone(), diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index b912ed34ae0..36bd8d39a83 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -363,7 +363,7 @@ impl<'a> TraitDef<'a> { // generated implementations are linted let mut attrs = newitem.attrs.clone(); attrs.extend(item.attrs.iter().filter(|a| { - match &a.name()[] { + match &a.name()[..] { "allow" | "warn" | "deny" | "forbid" => true, _ => false, } @@ -671,7 +671,7 @@ impl<'a> MethodDef<'a> { for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(&format!("__arg_{}", i)[]); + let ident = cx.ident_of(&format!("__arg_{}", i)); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); @@ -778,7 +778,7 @@ impl<'a> MethodDef<'a> { struct_path, struct_def, &format!("__self_{}", - i)[], + i), ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); @@ -971,7 +971,7 @@ impl<'a> MethodDef<'a> { let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, &self_arg_names[0][]); + let (p, idents) = mk_self_pat(cx, &self_arg_names[0]); subpats.push(p); idents }; @@ -1289,7 +1289,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a struct with named and unnamed fields in `derive`"); } }; - let ident = cx.ident_of(&format!("{}_{}", prefix, i)[]); + let ident = cx.ident_of(&format!("{}_{}", prefix, i)); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); @@ -1335,7 +1335,7 @@ impl<'a> TraitDef<'a> { let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); - let ident = cx.ident_of(&format!("{}_{}", prefix, i)[]); + let ident = cx.ident_of(&format!("{}_{}", prefix, i)); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); @@ -1378,7 +1378,7 @@ pub fn cs_fold<F>(use_foldl: bool, field.span, old, field.self_.clone(), - &field.other[]) + &field.other) }) } else { all_fields.iter().rev().fold(base, |old, field| { @@ -1386,7 +1386,7 @@ pub fn cs_fold<F>(use_foldl: bool, field.span, old, field.self_.clone(), - &field.other[]) + &field.other) }) } }, diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index f8bc331bfcf..eee780f457c 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -157,7 +157,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt, cx.span_err(titem.span, &format!("unknown `derive` \ trait: `{}`", - *tname)[]); + *tname)); } }; } diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 9c04d1e9282..93f8ee5042b 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -83,7 +83,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) None => { token::intern_and_get_ident(&format!("environment variable `{}` \ not defined", - var)[]) + var)) } Some(second) => { match expr_to_string(cx, second, "expected string literal") { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d4dda7390a5..a2f9dc6b5f8 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -389,7 +389,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("macro undefined: '{}!'", - &extnamestr)[]); + &extnamestr)); // let compilation continue None @@ -426,7 +426,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, pth.span, &format!("non-expression macro in expression position: {}", &extnamestr[..] - )[]); + )); return None; } }; @@ -436,7 +436,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("'{}' is not a tt-style macro", - &extnamestr)[]); + &extnamestr)); None } } @@ -608,7 +608,7 @@ pub fn expand_item_mac(it: P<ast::Item>, None => { fld.cx.span_err(path_span, &format!("macro undefined: '{}!'", - extnamestr)[]); + extnamestr)); // let compilation continue return SmallVector::zero(); } @@ -618,10 +618,9 @@ pub fn expand_item_mac(it: P<ast::Item>, if it.ident.name != parse::token::special_idents::invalid.name { fld.cx .span_err(path_span, - &format!("macro {}! expects no ident argument, \ - given '{}'", - extnamestr, - token::get_ident(it.ident))[]); + &format!("macro {}! expects no ident argument, given '{}'", + extnamestr, + token::get_ident(it.ident))); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -640,7 +639,7 @@ pub fn expand_item_mac(it: P<ast::Item>, if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, &format!("macro {}! expects an ident argument", - &extnamestr)[]); + &extnamestr)); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -659,7 +658,7 @@ pub fn expand_item_mac(it: P<ast::Item>, if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, &format!("macro_rules! expects an ident argument") - []); + ); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -691,7 +690,7 @@ pub fn expand_item_mac(it: P<ast::Item>, _ => { fld.cx.span_err(it.span, &format!("{}! is not legal in item position", - &extnamestr)[]); + &extnamestr)); return SmallVector::zero(); } } @@ -710,7 +709,7 @@ pub fn expand_item_mac(it: P<ast::Item>, None => { fld.cx.span_err(path_span, &format!("non-item macro in item position: {}", - &extnamestr)[]); + &extnamestr)); return SmallVector::zero(); } }; @@ -954,7 +953,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { None => { fld.cx.span_err(pth.span, &format!("macro undefined: '{}!'", - extnamestr)[]); + extnamestr)); // let compilation continue return DummyResult::raw_pat(span); } @@ -983,7 +982,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { &format!( "non-pattern macro in pattern position: {}", &extnamestr - )[] + ) ); return DummyResult::raw_pat(span); } @@ -995,7 +994,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { _ => { fld.cx.span_err(span, &format!("{}! is not legal in pattern position", - &extnamestr)[]); + &extnamestr)); return DummyResult::raw_pat(span); } } @@ -1981,7 +1980,7 @@ foo_module!(); // the xx binding should bind all of the xx varrefs: for (idx,v) in varrefs.iter().filter(|p| { p.segments.len() == 1 - && "xx" == &token::get_ident(p.segments[0].identifier)[] + && "xx" == &token::get_ident(p.segments[0].identifier) }).enumerate() { if mtwt::resolve(v.segments[0].identifier) != resolved_binding { println!("uh oh, xx binding didn't match xx varref:"); diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index e17329d7d33..1c2374e31f1 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -113,7 +113,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) _ => { ecx.span_err(p.span, &format!("expected ident for named argument, found `{}`", - p.this_token_to_string())[]); + p.this_token_to_string())); return None; } }; @@ -127,7 +127,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(prev) => { ecx.span_err(e.span, &format!("duplicate argument named `{}`", - name)[]); + name)); ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here"); continue } @@ -281,19 +281,19 @@ impl<'a, 'b> Context<'a, 'b> { &format!("argument redeclared with type `{}` when \ it was previously `{}`", *ty, - *cur)[]); + *cur)); } (&Known(ref cur), _) => { self.ecx.span_err(sp, &format!("argument used to format with `{}` was \ attempted to not be used for formatting", - *cur)[]); + *cur)); } (_, &Known(ref ty)) => { self.ecx.span_err(sp, &format!("argument previously used as a format \ argument attempted to be used as `{}`", - *ty)[]); + *ty)); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -337,7 +337,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> P<ast::Expr> { let sp = self.fmtsp; - let s = token::intern_and_get_ident(&self.literal[]); + let s = token::intern_and_get_ident(&self.literal); self.literal.clear(); self.ecx.expr_str(sp, s) } @@ -494,7 +494,7 @@ impl<'a, 'b> Context<'a, 'b> { None => continue // error already generated }; - let name = self.ecx.ident_of(&format!("__arg{}", i)[]); + let name = self.ecx.ident_of(&format!("__arg{}", i)); pats.push(self.ecx.pat_ident(e.span, name)); locals.push(Context::format_arg(self.ecx, e.span, arg_ty, self.ecx.expr_ident(e.span, name))); @@ -511,7 +511,7 @@ impl<'a, 'b> Context<'a, 'b> { }; let lname = self.ecx.ident_of(&format!("__arg{}", - *name)[]); + *name)); pats.push(self.ecx.pat_ident(e.span, lname)); names[self.name_positions[*name]] = Some(Context::format_arg(self.ecx, e.span, arg_ty, @@ -600,7 +600,7 @@ impl<'a, 'b> Context<'a, 'b> { _ => { ecx.span_err(sp, &format!("unknown format trait `{}`", - *tyname)[]); + *tyname)); "Dummy" } } @@ -694,7 +694,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, } if !parser.errors.is_empty() { cx.ecx.span_err(cx.fmtsp, &format!("invalid format string: {}", - parser.errors.remove(0))[]); + parser.errors.remove(0))); return DummyResult::raw_expr(sp); } if !cx.literal.is_empty() { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 2c7bf713aad..554529b5cb2 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -466,7 +466,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, } fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> { - strs.iter().map(|str| str_to_ident(&(*str)[])).collect() + strs.iter().map(|str| str_to_ident(&(*str))).collect() } fn id_ext(str: &str) -> ast::Ident { diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index c8d48750c75..ac82effeaea 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = cx.original_span_in_file(); let loc = cx.codemap().lookup_char_pos(topmost.lo); - let filename = token::intern_and_get_ident(&loc.file.name[]); + let filename = token::intern_and_get_ident(&loc.file.name); base::MacExpr::new(cx.expr_str(topmost, filename)) } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 664f7b3e088..ce513bc91f5 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { seq.num_captures } &TtDelimited(_, ref delim) => { - count_names(&delim.tts[]) + count_names(&delim.tts) } &TtToken(_, MatchNt(..)) => { 1 diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 06e8728d236..a0e2b4dbf5a 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -94,7 +94,7 @@ impl<'a> ParserAttr for Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal(&format!("expected `#`, found `{}`", token_str)[]); + self.fatal(&format!("expected `#`, found `{}`", token_str)); } }; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index fd08cbd161b..83d2bb0cc70 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1109,7 +1109,7 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(&format!("'{}", lifetime_name)[]) + str_to_ident(&format!("'{}", lifetime_name)) }); // Conjure up a "keyword checking ident" to make sure that diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 7ed48bdbb92..f826e43528b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -254,7 +254,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) Ok(bytes) => bytes, Err(e) => { err(&format!("couldn't read {:?}: {}", - path.display(), e)[]); + path.display(), e)); unreachable!() } }; @@ -264,7 +264,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) path.as_str().unwrap().to_string()) } None => { - err(&format!("{:?} is not UTF-8 encoded", path.display())[]) + err(&format!("{:?} is not UTF-8 encoded", path.display())) } } unreachable!() @@ -827,19 +827,19 @@ mod test { ast::TtDelimited(_, ref macro_delimed)] if name_macro_rules.as_str() == "macro_rules" && name_zip.as_str() == "zip" => { - match ¯o_delimed.tts[] { + match ¯o_delimed.tts { [ast::TtDelimited(_, ref first_delimed), ast::TtToken(_, token::FatArrow), ast::TtDelimited(_, ref second_delimed)] if macro_delimed.delim == token::Paren => { - match &first_delimed.tts[] { + match &first_delimed.tts { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if first_delimed.delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?}", **first_delimed), } - match &second_delimed.tts[] { + match &second_delimed.tts { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if second_delimed.delim == token::Paren @@ -1207,7 +1207,7 @@ mod test { let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); - let docs = item.attrs.iter().filter(|a| &a.name()[] == "doc") + let docs = item.attrs.iter().filter(|a| &a.name() == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[..], b); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 8480772ce6c..e6bcb8ac745 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -106,16 +106,16 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { desc: &str, error: bool) { if error { - self.span_err(sp, &format!("obsolete syntax: {}", kind_str)[]); + self.span_err(sp, &format!("obsolete syntax: {}", kind_str)); } else { - self.span_warn(sp, &format!("obsolete syntax: {}", kind_str)[]); + self.span_warn(sp, &format!("obsolete syntax: {}", kind_str)); } if !self.obsolete_set.contains(&kind) { self.sess .span_diagnostic .handler() - .note(&format!("{}", desc)[]); + .note(&format!("{}", desc)); self.obsolete_set.insert(kind); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 1805543d787..88c34937159 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5191,7 +5191,7 @@ impl<'a> Parser<'a> { -> (ast::Item_, Vec<ast::Attribute> ) { let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(&self.mod_path_stack[]); + let mod_path = Path::new(".").join_many(&self.mod_path_stack); let dir_path = prefix.join(&mod_path); let mod_string = token::get_ident(id); let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name( diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 433c013591c..2797ef084d9 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -652,47 +652,47 @@ impl BytesContainer for InternedString { impl fmt::Debug for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self.string[], f) + fmt::Debug::fmt(&self.string, f) } } impl fmt::Display for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.string[], f) + fmt::Display::fmt(&self.string, f) } } impl<'a> PartialEq<&'a str> for InternedString { #[inline(always)] fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(&self.string[], *other) + PartialEq::eq(&self.string[..], *other) } #[inline(always)] fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(&self.string[], *other) + PartialEq::ne(&self.string[..], *other) } } impl<'a> PartialEq<InternedString > for &'a str { #[inline(always)] fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, &other.string[]) + PartialEq::eq(*self, &other.string[..]) } #[inline(always)] fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, &other.string[]) + PartialEq::ne(*self, &other.string[..]) } } impl Decodable for InternedString { fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> { - Ok(get_name(get_ident_interner().intern(&try!(d.read_str())[]))) + Ok(get_name(get_ident_interner().intern(&try!(d.read_str())[..]))) } } impl Encodable for InternedString { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self.string[]) + s.emit_str(&self.string) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 1593bfb97fe..5b3fde8535b 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -139,7 +139,7 @@ pub fn buf_str(toks: &[Token], } s.push_str(&format!("{}={}", szs[i], - tok_str(&toks[i]))[]); + tok_str(&toks[i]))); i += 1; i %= n; } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index f26578e7401..92e7f4d2870 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -2342,7 +2342,7 @@ impl<'a> State<'a> { // HACK(eddyb) ignore the separately printed self argument. let args = if first { - &decl.inputs[] + &decl.inputs[..] } else { &decl.inputs[1..] }; diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 4e4a571ede7..ac7cdb1b413 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -38,7 +38,7 @@ pub fn maybe_inject_prelude(krate: ast::Crate) -> ast::Crate { } pub fn use_std(krate: &ast::Crate) -> bool { - !attr::contains_name(&krate.attrs[], "no_std") + !attr::contains_name(&krate.attrs, "no_std") } fn no_prelude(attrs: &[ast::Attribute]) -> bool { @@ -88,14 +88,14 @@ impl fold::Folder for PreludeInjector { // only add `use std::prelude::*;` if there wasn't a // `#![no_implicit_prelude]` at the crate level. // fold_mod() will insert glob path. - if !no_prelude(&krate.attrs[]) { + if !no_prelude(&krate.attrs) { krate.module = self.fold_mod(krate.module); } krate } fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> { - if !no_prelude(&item.attrs[]) { + if !no_prelude(&item.attrs) { // only recur if there wasn't `#![no_implicit_prelude]` // on this item, i.e. this means that the prelude is not // implicitly imported though the whole subtree diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 7b1fc91e45b..5bada41badf 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -73,14 +73,14 @@ pub fn modify_for_testing(sess: &ParseSess, // We generate the test harness when building in the 'test' // configuration, either with the '--test' or '--cfg test' // command line options. - let should_test = attr::contains_name(&krate.config[], "test"); + let should_test = attr::contains_name(&krate.config, "test"); // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use some_name = __test::main;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = - attr::first_attr_value_str_by_name(&krate.attrs[], + attr::first_attr_value_str_by_name(&krate.attrs, "reexport_test_harness_main"); if should_test { @@ -306,7 +306,7 @@ enum HasTestSignature { fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_test_attr = attr::contains_name(&i.attrs[], "test"); + let has_test_attr = attr::contains_name(&i.attrs, "test"); fn has_test_signature(i: &ast::Item) -> HasTestSignature { match &i.node { @@ -342,7 +342,7 @@ fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { } fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_bench_attr = attr::contains_name(&i.attrs[], "bench"); + let has_bench_attr = attr::contains_name(&i.attrs, "bench"); fn has_test_signature(i: &ast::Item) -> bool { match i.node { @@ -562,7 +562,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> { } fn is_test_crate(krate: &ast::Crate) -> bool { - match attr::find_crate_name(&krate.attrs[]) { + match attr::find_crate_name(&krate.attrs) { Some(ref s) if "test" == &s[..] => true, _ => false } |
