diff options
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/asm.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/ext/base.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ext/build.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/concat_idents.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/bounds.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/clone.rs | 9 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/decodable.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/encodable.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/generic/mod.rs | 46 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/mod.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/deriving/show.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/env.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 57 | ||||
| -rw-r--r-- | src/libsyntax/ext/format.rs | 27 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 21 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 4 |
20 files changed, 116 insertions, 128 deletions
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index b138811187b..b77b822a6b2 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -100,8 +100,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(('=', _)) => None, Some(('+', operand)) => { Some(token::intern_and_get_ident(format!( - "={}", - operand).as_slice())) + "={}", operand)[])) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index aefbb2a1fea..62fe718b522 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -549,7 +549,7 @@ impl<'a> ExtCtxt<'a> { pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } pub fn mod_path(&self) -> Vec<ast::Ident> { let mut v = Vec::new(); - v.push(token::str_to_ident(self.ecfg.crate_name.as_slice())); + v.push(token::str_to_ident(self.ecfg.crate_name[])); v.extend(self.mod_path.iter().map(|a| *a)); return v; } @@ -558,7 +558,7 @@ impl<'a> ExtCtxt<'a> { if self.recursion_count > self.ecfg.recursion_limit { self.span_fatal(ei.call_site, format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name).as_slice()); + ei.callee.name)[]); } let mut call_site = ei.call_site; @@ -669,7 +669,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { - cx.span_err(sp, format!("{} takes no arguments", name).as_slice()); + cx.span_err(sp, format!("{} takes no arguments", name)[]); } } @@ -682,12 +682,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, -> Option<String> { let mut p = cx.new_parser_from_tts(tts); if p.token == token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name).as_slice()); + cx.span_err(sp, format!("{} takes 1 argument", name)[]); return None } let ret = cx.expander().fold_expr(p.parse_expr()); if p.token != token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name).as_slice()); + cx.span_err(sp, format!("{} takes 1 argument", name)[]); } expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { s.get().to_string() diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 9d4992f7453..77165168746 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -712,8 +712,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let loc = self.codemap().lookup_char_pos(span.lo); let expr_file = self.expr_str(span, token::intern_and_get_ident(loc.file - .name - .as_slice())); + .name[])); let expr_line = self.expr_uint(span, loc.line); let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line)); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index e2867c2fbab..03dd08fdf7f 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitInt(i, ast::UnsignedIntLit(_)) | ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => { - accumulator.push_str(format!("{}", i).as_slice()); + accumulator.push_str(format!("{}", i)[]); } ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => { - accumulator.push_str(format!("-{}", i).as_slice()); + accumulator.push_str(format!("-{}", i)[]); } ast::LitBool(b) => { - accumulator.push_str(format!("{}", b).as_slice()); + accumulator.push_str(format!("{}", b)[]); } ast::LitByte(..) | ast::LitBinary(..) => { @@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(accumulator.as_slice()))) + token::intern_and_get_ident(accumulator[]))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index aa18b1be31a..2cf60d30a1b 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(res_str.as_slice()); + let res = str_to_ident(res_str[]); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index 3145b3bb1a4..c27a27fce6a 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -31,8 +31,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt, ref tname => { cx.span_bug(span, format!("expected built-in trait name but \ - found {}", - *tname).as_slice()) + found {}", *tname)[]) } } }, diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index a34764221b3..eedec6f37c8 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -80,13 +80,11 @@ fn cs_clone( EnumNonMatchingCollapsed (..) => { cx.span_bug(trait_span, format!("non-matching enum variants in \ - `deriving({})`", - name).as_slice()) + `deriving({})`", name)[]) } StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, - format!("static method in `deriving({})`", - name).as_slice()) + format!("static method in `deriving({})`", name)[]) } } @@ -103,8 +101,7 @@ fn cs_clone( None => { cx.span_bug(trait_span, format!("unnamed field in normal struct in \ - `deriving({})`", - name).as_slice()) + `deriving({})`", name)[]) } }; cx.field_imm(field.span, ident, subcall(field)) diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index 0a8d59da896..a4c70ebbc8e 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -174,7 +174,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt, let fields = fields.iter().enumerate().map(|(i, &span)| { getarg(cx, span, token::intern_and_get_ident(format!("_field{}", - i).as_slice()), + i)[]), i) }).collect(); diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 30851ebeaae..aac515ed81a 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -162,8 +162,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let name = match name { Some(id) => token::get_ident(id), None => { - token::intern_and_get_ident(format!("_field{}", - i).as_slice()) + token::intern_and_get_ident(format!("_field{}", i)[]) } }; let enc = cx.expr_method_call(span, self_.clone(), diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index d8de3d2db97..a2b5c0b9e96 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -514,15 +514,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - self_args.as_slice(), - nonself_args.as_slice()) + self_args[], + nonself_args[]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - self_args.as_slice(), - nonself_args.as_slice()) + self_args[], + nonself_args[]) }; method_def.create_method(cx, @@ -554,15 +554,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args.as_slice(), - nonself_args.as_slice()) + self_args[], + nonself_args[]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, self_args, - nonself_args.as_slice()) + nonself_args[]) }; method_def.create_method(cx, @@ -649,7 +649,7 @@ impl<'a> MethodDef<'a> { for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(format!("__arg_{}", i).as_slice()); + let ident = cx.ident_of(format!("__arg_{}", i)[]); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); @@ -756,7 +756,7 @@ impl<'a> MethodDef<'a> { struct_path, struct_def, format!("__self_{}", - i).as_slice(), + i)[], ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); @@ -912,22 +912,22 @@ impl<'a> MethodDef<'a> { .collect::<Vec<String>>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(name.as_slice())) + .map(|name|cx.ident_of(name[])) .collect::<Vec<ast::Ident>>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a uint // corresponding to its variant index. let vi_idents: Vec<ast::Ident> = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{}_vi", name.as_slice()); - cx.ident_of(vi_suffix.as_slice()) }) + .map(|name| { let vi_suffix = format!("{}_vi", name[]); + cx.ident_of(vi_suffix[]) }) .collect::<Vec<ast::Ident>>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( - self_arg_idents, variants.as_slice(), vi_idents.as_slice()); + self_arg_idents, variants[], vi_idents[]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -949,12 +949,12 @@ impl<'a> MethodDef<'a> { let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, self_arg_names[0].as_slice()); + let (p, idents) = mk_self_pat(cx, self_arg_names[0][]); subpats.push(p); idents }; for self_arg_name in self_arg_names.tail().iter() { - let (p, idents) = mk_self_pat(cx, self_arg_name.as_slice()); + let (p, idents) = mk_self_pat(cx, self_arg_name[]); subpats.push(p); self_pats_idents.push(idents); } @@ -1010,7 +1010,7 @@ impl<'a> MethodDef<'a> { &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args.as_slice(), nonself_args, + cx, trait_, type_ident, self_args[], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1063,7 +1063,7 @@ impl<'a> MethodDef<'a> { } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args.as_slice(), nonself_args, + cx, trait_, type_ident, self_args[], nonself_args, &catch_all_substructure); // Builds the expression: @@ -1267,7 +1267,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a struct with named and unnamed fields in `deriving`"); } }; - let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice()); + let ident = cx.ident_of(format!("{}_{}", prefix, i)[]); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); @@ -1313,7 +1313,7 @@ impl<'a> TraitDef<'a> { let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); - let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice()); + let ident = cx.ident_of(format!("{}_{}", prefix, i)[]); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); @@ -1356,7 +1356,7 @@ pub fn cs_fold<F>(use_foldl: bool, field.span, old, field.self_.clone(), - field.other.as_slice()) + field.other[]) }) } else { all_fields.iter().rev().fold(base, |old, field| { @@ -1364,12 +1364,12 @@ pub fn cs_fold<F>(use_foldl: bool, field.span, old, field.self_.clone(), - field.other.as_slice()) + field.other[]) }) } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (all_args.as_slice(), tuple), + enum_nonmatch_f(cx, trait_span, (all_args[], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `deriving`") @@ -1409,7 +1409,7 @@ pub fn cs_same_method<F>(f: F, f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (all_self_args.as_slice(), tuple), + enum_nonmatch_f(cx, trait_span, (all_self_args[], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `deriving`") diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 839e99c81d1..4a9076b07b5 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -115,7 +115,7 @@ pub fn expand_meta_deriving(cx: &mut ExtCtxt, cx.span_err(titem.span, format!("unknown `deriving` \ trait: `{}`", - *tname).as_slice()); + *tname)[]); } }; } diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index a68b521bbc9..19b45a1e610 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -127,7 +127,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(format_string.as_slice()); + let s = token::intern_and_get_ident(format_string[]); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 8c17b31f458..9fedc4a158e 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match os::getenv(var.as_slice()) { + let e = match os::getenv(var[]) { None => { cx.expr_path(cx.path_all(sp, true, @@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - s.as_slice())))) + s[])))) } }; MacExpr::new(e) @@ -83,7 +83,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) None => { token::intern_and_get_ident(format!("environment variable `{}` \ not defined", - var).as_slice()) + var)[]) } Some(second) => { match expr_to_string(cx, second, "expected string literal") { @@ -106,7 +106,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, msg.get()); cx.expr_uint(sp, 0) } - Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.as_slice())) + Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s[])) }; MacExpr::new(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index b10ae7a09db..f2b6f6bfe16 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -293,7 +293,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("macro undefined: '{}!'", - extnamestr.get()).as_slice()); + extnamestr.get())[]); // let compilation continue None @@ -309,7 +309,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, }, }); let fm = fresh_mark(); - let marked_before = mark_tts(tts.as_slice(), fm); + let marked_before = mark_tts(tts[], fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -320,7 +320,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, let opt_parsed = { let expanded = expandfun.expand(fld.cx, mac_span, - marked_before.as_slice()); + marked_before[]); parse_thunk(expanded) }; let parsed = match opt_parsed { @@ -329,8 +329,8 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("non-expression macro in expression position: {}", - extnamestr.get().as_slice() - ).as_slice()); + extnamestr.get()[] + )[]); return None; } }; @@ -340,7 +340,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("'{}' is not a tt-style macro", - extnamestr.get()).as_slice()); + extnamestr.get())[]); None } } @@ -445,7 +445,7 @@ pub fn expand_item(it: P<ast::Item>, fld: &mut MacroExpander) if valid_ident { fld.cx.mod_push(it.ident); } - let macro_escape = contains_macro_escape(new_attrs.as_slice()); + let macro_escape = contains_macro_escape(new_attrs[]); let result = with_exts_frame!(fld.cx.syntax_env, macro_escape, noop_fold_item(it, fld)); @@ -553,7 +553,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander) None => { fld.cx.span_err(path_span, format!("macro undefined: '{}!'", - extnamestr).as_slice()); + extnamestr)[]); // let compilation continue return SmallVector::zero(); } @@ -566,7 +566,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander) format!("macro {}! expects no ident argument, \ given '{}'", extnamestr, - token::get_ident(it.ident)).as_slice()); + token::get_ident(it.ident))[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -578,14 +578,14 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_before = mark_tts(tts.as_slice(), fm); - expander.expand(fld.cx, it.span, marked_before.as_slice()) + let marked_before = mark_tts(tts[], fm); + expander.expand(fld.cx, it.span, marked_before[]) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, format!("macro {}! expects an ident argument", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -597,14 +597,14 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_tts = mark_tts(tts.as_slice(), fm); + let marked_tts = mark_tts(tts[], fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } LetSyntaxTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, format!("macro {}! expects an ident argument", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -621,7 +621,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander) _ => { fld.cx.span_err(it.span, format!("{}! is not legal in item position", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } } @@ -639,8 +639,8 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander) // result of expanding a LetSyntaxTT, and thus doesn't // need to be marked. Not that it could be marked anyway. // create issue to recommend refactoring here? - fld.cx.syntax_env.insert(intern(name.as_slice()), ext); - if attr::contains_name(it.attrs.as_slice(), "macro_export") { + fld.cx.syntax_env.insert(intern(name[]), ext); + if attr::contains_name(it.attrs[], "macro_export") { fld.cx.exported_macros.push(it); } SmallVector::zero() @@ -654,7 +654,7 @@ pub fn expand_item_mac(it: P<ast::Item>, fld: &mut MacroExpander) Right(None) => { fld.cx.span_err(path_span, format!("non-item macro in item position: {}", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } }; @@ -903,7 +903,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { None => { fld.cx.span_err(pth.span, format!("macro undefined: '{}!'", - extnamestr).as_slice()); + extnamestr)[]); // let compilation continue return DummyResult::raw_pat(span); } @@ -920,11 +920,11 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { }); let fm = fresh_mark(); - let marked_before = mark_tts(tts.as_slice(), fm); + let marked_before = mark_tts(tts[], fm); let mac_span = fld.cx.original_span(); let expanded = match expander.expand(fld.cx, mac_span, - marked_before.as_slice()).make_pat() { + marked_before[]).make_pat() { Some(e) => e, None => { fld.cx.span_err( @@ -932,7 +932,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { format!( "non-pattern macro in pattern position: {}", extnamestr.get() - ).as_slice() + )[] ); return DummyResult::raw_pat(span); } @@ -944,7 +944,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> { _ => { fld.cx.span_err(span, format!("{}! is not legal in pattern position", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return DummyResult::raw_pat(span); } } @@ -1192,8 +1192,7 @@ pub fn expand_crate(parse_sess: &parse::ParseSess, let mut expander = MacroExpander::new(&mut cx); for ExportedMacros { crate_name, macros } in imported_macros.into_iter() { - let name = format!("<{} macros>", token::get_ident(crate_name)) - .into_string(); + let name = format!("<{} macros>", token::get_ident(crate_name)); for source in macros.into_iter() { let item = parse::parse_item_from_source_str(name.clone(), @@ -1238,7 +1237,7 @@ impl Folder for Marker { node: match node { MacInvocTT(path, tts, ctxt) => { MacInvocTT(self.fold_path(path), - self.fold_tts(tts.as_slice()), + self.fold_tts(tts[]), mtwt::apply_mark(self.mark, ctxt)) } }, @@ -1415,9 +1414,9 @@ mod test { let attr2 = make_dummy_attr ("bar"); let escape_attr = make_dummy_attr ("macro_escape"); let attrs1 = vec!(attr1.clone(), escape_attr, attr2.clone()); - assert_eq!(contains_macro_escape(attrs1.as_slice()),true); + assert_eq!(contains_macro_escape(attrs1[]),true); let attrs2 = vec!(attr1,attr2); - assert_eq!(contains_macro_escape(attrs2.as_slice()),false); + assert_eq!(contains_macro_escape(attrs2[]),false); } // make a MetaWord outer attribute with the given name @@ -1729,7 +1728,7 @@ foo_module!(); let string = ident.get(); "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = cxbinds.as_slice(); + let cxbinds: &[&ast::Ident] = cxbinds[]; let cxbind = match cxbinds { [b] => b, _ => panic!("expected just one binding for ext_cx") diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 95c7fcc564a..aad4045f00a 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -136,7 +136,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, _ => { ecx.span_err(p.span, format!("expected ident for named argument, found `{}`", - p.this_token_to_string()).as_slice()); + p.this_token_to_string())[]); return (invocation, None); } }; @@ -149,7 +149,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, Some(prev) => { ecx.span_err(e.span, format!("duplicate argument named `{}`", - name).as_slice()); + name)[]); ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here"); continue } @@ -240,7 +240,7 @@ impl<'a, 'b> Context<'a, 'b> { let msg = format!("invalid reference to argument `{}` ({})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, msg.as_slice()); + self.ecx.span_err(self.fmtsp, msg[]); return; } { @@ -260,7 +260,7 @@ impl<'a, 'b> Context<'a, 'b> { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, msg.as_slice()); + self.ecx.span_err(self.fmtsp, msg[]); return; } }; @@ -303,19 +303,19 @@ impl<'a, 'b> Context<'a, 'b> { format!("argument redeclared with type `{}` when \ it was previously `{}`", *ty, - *cur).as_slice()); + *cur)[]); } (&Known(ref cur), _) => { self.ecx.span_err(sp, format!("argument used to format with `{}` was \ attempted to not be used for formatting", - *cur).as_slice()); + *cur)[]); } (_, &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument previously used as a format \ argument attempted to be used as `{}`", - *ty).as_slice()); + *ty)[]); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -380,7 +380,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> P<ast::Expr> { let sp = self.fmtsp; - let s = token::intern_and_get_ident(self.literal.as_slice()); + let s = token::intern_and_get_ident(self.literal[]); self.literal.clear(); self.ecx.expr_str(sp, s) } @@ -552,7 +552,7 @@ impl<'a, 'b> Context<'a, 'b> { None => continue // error already generated }; - let name = self.ecx.ident_of(format!("__arg{}", i).as_slice()); + let name = self.ecx.ident_of(format!("__arg{}", i)[]); pats.push(self.ecx.pat_ident(e.span, name)); locals.push(Context::format_arg(self.ecx, e.span, arg_ty, self.ecx.expr_ident(e.span, name))); @@ -569,7 +569,7 @@ impl<'a, 'b> Context<'a, 'b> { }; let lname = self.ecx.ident_of(format!("__arg{}", - *name).as_slice()); + *name)[]); pats.push(self.ecx.pat_ident(e.span, lname)); names[self.name_positions[*name]] = Some(Context::format_arg(self.ecx, e.span, arg_ty, @@ -652,7 +652,7 @@ impl<'a, 'b> Context<'a, 'b> { -> P<ast::Expr> { let trait_ = match *ty { Known(ref tyname) => { - match tyname.as_slice() { + match tyname[] { "" => "Show", "?" => "Show", "e" => "LowerExp", @@ -665,7 +665,7 @@ impl<'a, 'b> Context<'a, 'b> { _ => { ecx.span_err(sp, format!("unknown format trait `{}`", - *tyname).as_slice()); + *tyname)[]); "Dummy" } } @@ -760,8 +760,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, match parser.errors.remove(0) { Some(error) => { cx.ecx.span_err(cx.fmtsp, - format!("invalid format string: {}", - error).as_slice()); + format!("invalid format string: {}", error)[]); return DummyResult::raw_expr(sp); } None => {} diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index c7cb41e2ece..368d4fa8447 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -474,7 +474,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, } fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> { - strs.iter().map(|str| str_to_ident((*str).as_slice())).collect() + strs.iter().map(|str| str_to_ident((*str)[])).collect() } fn id_ext(str: &str) -> ast::Ident { @@ -676,7 +676,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> { for i in range(0, tt.len()) { seq.push(tt.get_tt(i)); } - mk_tts(cx, seq.as_slice()) + mk_tts(cx, seq[]) } ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -765,7 +765,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); let mut vector = vec!(stmt_let_sp, stmt_let_tt); - vector.extend(mk_tts(cx, tts.as_slice()).into_iter()); + vector.extend(mk_tts(cx, tts[]).into_iter()); let block = cx.expr_block( cx.block_all(sp, Vec::new(), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 570231940aa..7c2c5c1530c 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = cx.original_span_in_file(); let loc = cx.codemap().lookup_char_pos(topmost.lo); - let filename = token::intern_and_get_ident(loc.file.name.as_slice()); + let filename = token::intern_and_get_ident(loc.file.name[]); base::MacExpr::new(cx.expr_str(topmost, filename)) } @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult+'static> { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(s.as_slice()))) + token::intern_and_get_ident(s[]))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(string.as_slice()))) + token::intern_and_get_ident(string[]))) } /// include! : parse the given file as an expr @@ -137,7 +137,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, format!("couldn't read {}: {}", file.display(), - e).as_slice()); + e)[]); return DummyResult::expr(sp); } Ok(bytes) => bytes, @@ -147,7 +147,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = file.display().to_string(); - let interned = token::intern_and_get_ident(src.as_slice()); + let interned = token::intern_and_get_ident(src[]); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) @@ -155,7 +155,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Err(_) => { cx.span_err(sp, format!("{} wasn't a utf-8 file", - file.display()).as_slice()); + file.display())[]); return DummyResult::expr(sp); } } @@ -171,9 +171,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) match File::open(&file).read_to_end() { Err(e) => { cx.span_err(sp, - format!("couldn't read {}: {}", - file.display(), - e).as_slice()); + format!("couldn't read {}: {}", file.display(), e)[]); return DummyResult::expr(sp); } Ok(bytes) => { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index bc639c32380..73ef18b8449 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { seq.num_captures } &TtDelimited(_, ref delim) => { - count_names(delim.tts.as_slice()) + count_names(delim.tts[]) } &TtToken(_, MatchNt(..)) => { 1 @@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos) -> Box<MatcherPos> { - let match_idx_hi = count_names(ms.as_slice()); + let match_idx_hi = count_names(ms[]); let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new()); box MatcherPos { stack: vec![], @@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) p_s.span_diagnostic .span_fatal(sp, format!("duplicated bind name: {}", - string.get()).as_slice()) + string.get())[]) } } } @@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess, rdr: TtReader, ms: Vec<TokenTree> ) -> HashMap<Ident, Rc<NamedMatch>> { - match parse(sess, cfg, rdr, ms.as_slice()) { + match parse(sess, cfg, rdr, ms[]) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.as_slice()) + sess.span_diagnostic.span_fatal(sp, str[]) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.as_slice()) + sess.span_diagnostic.span_fatal(sp, str[]) } } } @@ -416,7 +416,7 @@ pub fn parse(sess: &ParseSess, } } TtToken(sp, SubstNt(..)) => { - return Error(sp, "Cannot transcribe in macro LHS".into_string()) + return Error(sp, "Cannot transcribe in macro LHS".to_string()) } seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => { let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); @@ -446,7 +446,7 @@ pub fn parse(sess: &ParseSess, for dv in eof_eis[0].matches.iter_mut() { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, v.as_slice())); + return Success(nameize(sess, ms, v[])); } else if eof_eis.len() > 1u { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { @@ -521,7 +521,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { _ => { let token_str = pprust::token_to_string(&p.token); p.fatal((format!("expected ident, found {}", - token_str.as_slice())).as_slice()) + token_str[]))[]) } }, "path" => { @@ -535,8 +535,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { res } _ => { - p.fatal(format!("unsupported builtin nonterminal parser: {}", - name).as_slice()) + p.fatal(format!("unsupported builtin nonterminal parser: {}", name)[]) } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 92c68b7a9c7..08014dc1338 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -52,7 +52,7 @@ impl<'a> ParserAnyMacro<'a> { following", token_str); let span = parser.span; - parser.span_err(span, msg.as_slice()); + parser.span_err(span, msg[]); } } } @@ -124,8 +124,8 @@ impl TTMacroExpander for MacroRulesMacroExpander { sp, self.name, arg, - self.lhses.as_slice(), - self.rhses.as_slice()) + self.lhses[], + self.rhses[]) } } @@ -160,7 +160,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => delim.tts.as_slice(), + TtDelimited(_, ref delim) => delim.tts[], _ => cx.span_fatal(sp, "malformed macro lhs") }; // `None` is because we're not interpolating @@ -198,13 +198,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice()) + Error(sp, ref msg) => cx.span_fatal(sp, msg[]) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, best_fail_msg.as_slice()); + cx.span_fatal(best_fail_spot, best_fail_msg[]); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 378dbba07fa..deed0b78e87 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), msg.as_slice()); + r.sp_diag.span_fatal(sp.clone(), msg[]); } LisConstraint(len, _) => { if len == 0 { @@ -280,7 +280,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ format!("variable '{}' is still repeating at this depth", - token::get_ident(ident)).as_slice()); + token::get_ident(ident))[]); } } } |
