diff options
| author | bors <bors@rust-lang.org> | 2015-02-19 18:36:59 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2015-02-19 18:36:59 +0000 |
| commit | 522d09dfecbeca1595f25ac58c6d0178bbd21d7d (patch) | |
| tree | cc0252dd3413e5f890d0ebcfdaa096e5b002be0b /src/libsyntax/ext | |
| parent | 0b664bb8436f2cfda7f13a6f302ab486f332816f (diff) | |
| parent | 49771bafa5fca16486bfd06741dac3de2c587adf (diff) | |
| download | rust-522d09dfecbeca1595f25ac58c6d0178bbd21d7d.tar.gz rust-522d09dfecbeca1595f25ac58c6d0178bbd21d7d.zip | |
Auto merge of #22541 - Manishearth:rollup, r=Gankro 1.0.0-alpha.2
Continued from #22520
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/base.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat_idents.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/bounds.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/generic/mod.rs | 42 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/hash.rs | 25 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/show.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/env.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 26 | ||||
| -rw-r--r-- | src/libsyntax/ext/format.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 18 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 4 |
16 files changed, 85 insertions, 90 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8800ffd1e9b..d4ccabbd63b 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -640,7 +640,7 @@ impl<'a> ExtCtxt<'a> { pub fn mod_path(&self) -> Vec<ast::Ident> { let mut v = Vec::new(); v.push(token::str_to_ident(&self.ecfg.crate_name[])); - v.extend(self.mod_path.iter().map(|a| *a)); + v.extend(self.mod_path.iter().cloned()); return v; } pub fn bt_push(&mut self, ei: ExpnInfo) { diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 80d128959ea..38098e50dee 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(&accumulator[]))) + token::intern_and_get_ident(&accumulator[..]))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 63a8bd9ddf1..9410a51e7a5 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -49,7 +49,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(&res_str[]); + let res = str_to_ident(&res_str[..]); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index 879718a6399..93098484ae0 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -24,7 +24,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt, { let name = match mitem.node { MetaWord(ref tname) => { - match &tname[] { + match &tname[..] { "Copy" => "Copy", "Send" | "Sync" => { return cx.span_err(span, diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index f878cb5ca8b..b912ed34ae0 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -367,7 +367,7 @@ impl<'a> TraitDef<'a> { "allow" | "warn" | "deny" | "forbid" => true, _ => false, } - }).map(|a| a.clone())); + }).cloned()); push(P(ast::Item { attrs: attrs, ..(*newitem).clone() @@ -410,7 +410,7 @@ impl<'a> TraitDef<'a> { let mut ty_params = ty_params.into_vec(); // Copy the lifetimes - lifetimes.extend(generics.lifetimes.iter().map(|l| (*l).clone())); + lifetimes.extend(generics.lifetimes.iter().cloned()); // Create the type parameters. ty_params.extend(generics.ty_params.iter().map(|ty_param| { @@ -445,14 +445,14 @@ impl<'a> TraitDef<'a> { span: self.span, bound_lifetimes: wb.bound_lifetimes.clone(), bounded_ty: wb.bounded_ty.clone(), - bounds: OwnedSlice::from_vec(wb.bounds.iter().map(|b| b.clone()).collect()) + bounds: OwnedSlice::from_vec(wb.bounds.iter().cloned().collect()) }) } ast::WherePredicate::RegionPredicate(ref rb) => { ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { span: self.span, lifetime: rb.lifetime, - bounds: rb.bounds.iter().map(|b| b.clone()).collect() + bounds: rb.bounds.iter().cloned().collect() }) } ast::WherePredicate::EqPredicate(ref we) => { @@ -500,7 +500,7 @@ impl<'a> TraitDef<'a> { let opt_trait_ref = Some(trait_ref); let ident = ast_util::impl_pretty_name(&opt_trait_ref, &*self_type); let mut a = vec![attr]; - a.extend(self.attributes.iter().map(|a| a.clone())); + a.extend(self.attributes.iter().cloned()); cx.item( self.span, ident, @@ -536,15 +536,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) }; method_def.create_method(cx, @@ -576,15 +576,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, self_args, - &nonself_args[]) + &nonself_args[..]) }; method_def.create_method(cx, @@ -934,22 +934,22 @@ impl<'a> MethodDef<'a> { .collect::<Vec<String>>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(&name[])) + .map(|name|cx.ident_of(&name[..])) .collect::<Vec<ast::Ident>>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a usize // corresponding to its variant index. let vi_idents: Vec<ast::Ident> = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{}_vi", &name[]); - cx.ident_of(&vi_suffix[]) }) + .map(|name| { let vi_suffix = format!("{}_vi", &name[..]); + cx.ident_of(&vi_suffix[..]) }) .collect::<Vec<ast::Ident>>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( - self_arg_idents, &variants[], &vi_idents[]); + self_arg_idents, &variants[..], &vi_idents[..]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -976,7 +976,7 @@ impl<'a> MethodDef<'a> { idents }; for self_arg_name in self_arg_names.tail() { - let (p, idents) = mk_self_pat(cx, &self_arg_name[]); + let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); subpats.push(p); self_pats_idents.push(idents); } @@ -1032,7 +1032,7 @@ impl<'a> MethodDef<'a> { &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, &self_args[], nonself_args, + cx, trait_, type_ident, &self_args[..], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1085,7 +1085,7 @@ impl<'a> MethodDef<'a> { } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, &self_args[], nonself_args, + cx, trait_, type_ident, &self_args[..], nonself_args, &catch_all_substructure); // Builds the expression: @@ -1391,7 +1391,7 @@ pub fn cs_fold<F>(use_foldl: bool, } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (&all_args[], tuple), + enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") @@ -1431,7 +1431,7 @@ pub fn cs_same_method<F>(f: F, f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (&all_self_args[], tuple), + enum_nonmatch_f(cx, trait_span, (&all_self_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index 5aa9f9a0c3e..2149c7a7f77 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -14,7 +14,6 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; -use parse::token::InternedString; use ptr::P; pub fn expand_deriving_hash<F>(cx: &mut ExtCtxt, @@ -26,30 +25,26 @@ pub fn expand_deriving_hash<F>(cx: &mut ExtCtxt, { let path = Path::new_(pathvec_std!(cx, core::hash::Hash), None, - vec!(box Literal(Path::new_local("__S"))), true); - let generics = LifetimeBounds { - lifetimes: Vec::new(), - bounds: vec!(("__S", - vec!(path_std!(cx, core::hash::Writer), - path_std!(cx, core::hash::Hasher)))), - }; - let args = Path::new_local("__S"); - let inline = cx.meta_word(span, InternedString::new("inline")); - let attrs = vec!(cx.attribute(span, inline)); + vec!(), true); + let arg = Path::new_local("__H"); let hash_trait_def = TraitDef { span: span, attributes: Vec::new(), path: path, additional_bounds: Vec::new(), - generics: generics, + generics: LifetimeBounds::empty(), methods: vec!( MethodDef { name: "hash", - generics: LifetimeBounds::empty(), + generics: LifetimeBounds { + lifetimes: Vec::new(), + bounds: vec![("__H", + vec![path_std!(cx, core::hash::Hasher)])], + }, explicit_self: borrowed_explicit_self(), - args: vec!(Ptr(box Literal(args), Borrowed(None, MutMutable))), + args: vec!(Ptr(box Literal(arg), Borrowed(None, MutMutable))), ret_ty: nil_ty(), - attributes: attrs, + attributes: vec![], combine_substructure: combine_substructure(box |a, b, c| { hash_substructure(a, b, c) }) diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 0ed9e85e576..f8bc331bfcf 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -102,7 +102,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt, |i| push(i))) } - match &tname[] { + match &tname[..] { "Clone" => expand!(clone::expand_deriving_clone), "Hash" => expand!(hash::expand_deriving_hash), diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index 3f5947672e0..281f23f9e61 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -128,7 +128,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(&format_string[]); + let s = token::intern_and_get_ident(&format_string[..]); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 5d56707c87a..9c04d1e9282 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match env::var(&var[]) { + let e = match env::var(&var[..]) { Err(..) => { cx.expr_path(cx.path_all(sp, true, @@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - &s[])))) + &s[..])))) } }; MacExpr::new(e) @@ -101,7 +101,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } - let e = match env::var(&var[]) { + let e = match env::var(&var[..]) { Err(_) => { cx.span_err(sp, &msg); cx.expr_usize(sp, 0) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6b7cecee815..d4dda7390a5 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -405,7 +405,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, }, }); let fm = fresh_mark(); - let marked_before = mark_tts(&tts[], fm); + let marked_before = mark_tts(&tts[..], fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -416,7 +416,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, let opt_parsed = { let expanded = expandfun.expand(fld.cx, mac_span, - &marked_before[]); + &marked_before[..]); parse_thunk(expanded) }; let parsed = match opt_parsed { @@ -425,7 +425,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("non-expression macro in expression position: {}", - &extnamestr[] + &extnamestr[..] )[]); return None; } @@ -633,8 +633,8 @@ pub fn expand_item_mac(it: P<ast::Item>, } }); // mark before expansion: - let marked_before = mark_tts(&tts[], fm); - expander.expand(fld.cx, it.span, &marked_before[]) + let marked_before = mark_tts(&tts[..], fm); + expander.expand(fld.cx, it.span, &marked_before[..]) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { @@ -652,7 +652,7 @@ pub fn expand_item_mac(it: P<ast::Item>, } }); // mark before expansion: - let marked_tts = mark_tts(&tts[], fm); + let marked_tts = mark_tts(&tts[..], fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } MacroRulesTT => { @@ -971,11 +971,11 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { }); let fm = fresh_mark(); - let marked_before = mark_tts(&tts[], fm); + let marked_before = mark_tts(&tts[..], fm); let mac_span = fld.cx.original_span(); let expanded = match expander.expand(fld.cx, mac_span, - &marked_before[]).make_pat() { + &marked_before[..]).make_pat() { Some(e) => e, None => { fld.cx.span_err( @@ -1128,7 +1128,7 @@ fn expand_annotatable(a: Annotatable, if valid_ident { fld.cx.mod_push(it.ident); } - let macro_use = contains_macro_use(fld, &new_attrs[]); + let macro_use = contains_macro_use(fld, &new_attrs[..]); let result = with_exts_frame!(fld.cx.syntax_env, macro_use, noop_fold_item(it, fld)); @@ -1508,7 +1508,7 @@ impl Folder for Marker { node: match node { MacInvocTT(path, tts, ctxt) => { MacInvocTT(self.fold_path(path), - self.fold_tts(&tts[]), + self.fold_tts(&tts[..]), mtwt::apply_mark(self.mark, ctxt)) } }, @@ -1914,7 +1914,7 @@ mod test { .collect(); println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name); let string = token::get_ident(final_varref_ident); - println!("varref's first segment's string: \"{}\"", &string[]); + println!("varref's first segment's string: \"{}\"", &string[..]); println!("binding #{}: {}, resolves to {}", binding_idx, bindings[binding_idx], binding_name); mtwt::with_sctable(|x| mtwt::display_sctable(x)); @@ -1967,10 +1967,10 @@ foo_module!(); let cxbinds: Vec<&ast::Ident> = bindings.iter().filter(|b| { let ident = token::get_ident(**b); - let string = &ident[]; + let string = &ident[..]; "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = &cxbinds[]; + let cxbinds: &[&ast::Ident] = &cxbinds[..]; let cxbind = match cxbinds { [b] => b, _ => panic!("expected just one binding for ext_cx") diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 170a455a913..e17329d7d33 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -118,7 +118,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } }; let interned_name = token::get_ident(ident); - let name = &interned_name[]; + let name = &interned_name[..]; p.expect(&token::Eq); let e = p.parse_expr(); @@ -218,7 +218,7 @@ impl<'a, 'b> Context<'a, 'b> { let msg = format!("invalid reference to argument `{}` ({})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, &msg[]); + self.ecx.span_err(self.fmtsp, &msg[..]); return; } { @@ -238,7 +238,7 @@ impl<'a, 'b> Context<'a, 'b> { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, &msg[]); + self.ecx.span_err(self.fmtsp, &msg[..]); return; } }; @@ -587,7 +587,7 @@ impl<'a, 'b> Context<'a, 'b> { -> P<ast::Expr> { let trait_ = match *ty { Known(ref tyname) => { - match &tyname[] { + match &tyname[..] { "" => "Display", "?" => "Debug", "e" => "LowerExp", diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 67990895d07..2c7bf713aad 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -668,7 +668,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> { for i in 0..tt.len() { seq.push(tt.get_tt(i)); } - mk_tts(cx, &seq[]) + mk_tts(cx, &seq[..]) } ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -757,7 +757,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); let mut vector = vec!(stmt_let_sp, stmt_let_tt); - vector.extend(mk_tts(cx, &tts[]).into_iter()); + vector.extend(mk_tts(cx, &tts[..]).into_iter()); let block = cx.expr_block( cx.block_all(sp, vector, diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 7a3a3562bdf..c8d48750c75 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult+'static> { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(&s[]))) + token::intern_and_get_ident(&s[..]))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(&string[]))) + token::intern_and_get_ident(&string[..]))) } /// include! : parse the given file as an expr @@ -117,7 +117,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree None => self.p.span_fatal( self.p.span, &format!("expected item, found `{}`", - self.p.this_token_to_string())[] + self.p.this_token_to_string()) ) } } @@ -141,7 +141,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), - e)[]); + e)); return DummyResult::expr(sp); } Ok(bytes) => bytes, @@ -151,7 +151,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = format!("{}", file.display()); - let interned = token::intern_and_get_ident(&src[]); + let interned = token::intern_and_get_ident(&src[..]); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) @@ -159,7 +159,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Err(_) => { cx.span_err(sp, &format!("{} wasn't a utf-8 file", - file.display())[]); + file.display())); return DummyResult::expr(sp); } } @@ -175,11 +175,11 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) match File::open(&file).read_to_end() { Err(e) => { cx.span_err(sp, - &format!("couldn't read {}: {}", file.display(), e)[]); + &format!("couldn't read {}: {}", file.display(), e)); return DummyResult::expr(sp); } Ok(bytes) => { - let bytes = bytes.iter().map(|x| *x).collect(); + let bytes = bytes.iter().cloned().collect(); base::MacExpr::new(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes)))) } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d649e497ef7..664f7b3e088 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos) -> Box<MatcherPos> { - let match_idx_hi = count_names(&ms[]); + let match_idx_hi = count_names(&ms[..]); let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect(); box MatcherPos { stack: vec![], @@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) p_s.span_diagnostic .span_fatal(sp, &format!("duplicated bind name: {}", - &string)[]) + &string)) } } } @@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess, rdr: TtReader, ms: Vec<TokenTree> ) -> HashMap<Ident, Rc<NamedMatch>> { - match parse(sess, cfg, rdr, &ms[]) { + match parse(sess, cfg, rdr, &ms[..]) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[]) + sess.span_diagnostic.span_fatal(sp, &str[..]) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[]) + sess.span_diagnostic.span_fatal(sp, &str[..]) } } } @@ -283,7 +283,7 @@ pub fn parse(sess: &ParseSess, -> ParseResult { let mut cur_eis = Vec::new(); cur_eis.push(initial_matcher_pos(Rc::new(ms.iter() - .map(|x| (*x).clone()) + .cloned() .collect()), None, rdr.peek().sp.lo)); @@ -447,7 +447,7 @@ pub fn parse(sess: &ParseSess, for dv in &mut (&mut eof_eis[0]).matches { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, &v[])); + return Success(nameize(sess, ms, &v[..])); } else if eof_eis.len() > 1 { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { @@ -533,7 +533,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { _ => { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", - &token_str[])[]) + &token_str[..])) } }, "path" => { @@ -542,7 +542,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { "meta" => token::NtMeta(p.parse_meta_item()), _ => { p.span_fatal_help(sp, - &format!("invalid fragment specifier `{}`", name)[], + &format!("invalid fragment specifier `{}`", name), "valid fragment specifiers are `ident`, `block`, \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ and `item`") diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f322cf8bad0..fa6d934a457 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -50,7 +50,7 @@ impl<'a> ParserAnyMacro<'a> { following", token_str); let span = parser.span; - parser.span_err(span, &msg[]); + parser.span_err(span, &msg[..]); } } } @@ -123,8 +123,8 @@ impl TTMacroExpander for MacroRulesMacroExpander { self.name, self.imported_from, arg, - &self.lhses[], - &self.rhses[]) + &self.lhses, + &self.rhses) } } @@ -151,7 +151,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => &delim.tts[], + TtDelimited(_, ref delim) => &delim.tts[..], _ => cx.span_fatal(sp, "malformed macro lhs") }; // `None` is because we're not interpolating @@ -159,7 +159,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, None, None, arg.iter() - .map(|x| (*x).clone()) + .cloned() .collect(), true); match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) { @@ -192,13 +192,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, &msg[]) + Error(sp, ref msg) => cx.span_fatal(sp, &msg[..]) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, &best_fail_msg[]); + cx.span_fatal(best_fail_spot, &best_fail_msg[..]); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 83234e3b7a5..0d92bd761b4 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -255,7 +255,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), &msg[]); + r.sp_diag.span_fatal(sp.clone(), &msg[..]); } LisConstraint(len, _) => { if len == 0 { @@ -309,7 +309,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ &format!("variable '{:?}' is still repeating at this depth", - token::get_ident(ident))[]); + token::get_ident(ident))); } } } |
