about summary refs log tree commit diff
path: root/src/libsyntax/ext
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2015-01-07 05:31:23 +0000
committerbors <bors@rust-lang.org>2015-01-07 05:31:23 +0000
commit9e4e524e0eb17c8f463e731f23b544003e8709c6 (patch)
tree916024d35e08f0826c20654f629ec596b5cb1f14 /src/libsyntax/ext
parentea6f65c5f1a3f84e010d2cef02a0160804e9567a (diff)
parenta64000820f0fc32be4d7535a9a92418a434fa4ba (diff)
downloadrust-9e4e524e0eb17c8f463e731f23b544003e8709c6.tar.gz
rust-9e4e524e0eb17c8f463e731f23b544003e8709c6.zip
auto merge of #20677 : alexcrichton/rust/rollup, r=alexcrichton
Diffstat (limited to 'src/libsyntax/ext')
-rw-r--r--src/libsyntax/ext/asm.rs2
-rw-r--r--src/libsyntax/ext/base.rs10
-rw-r--r--src/libsyntax/ext/build.rs6
-rw-r--r--src/libsyntax/ext/concat.rs8
-rw-r--r--src/libsyntax/ext/concat_idents.rs2
-rw-r--r--src/libsyntax/ext/deriving/bounds.rs7
-rw-r--r--src/libsyntax/ext/deriving/clone.rs6
-rw-r--r--src/libsyntax/ext/deriving/decodable.rs2
-rw-r--r--src/libsyntax/ext/deriving/encodable.rs2
-rw-r--r--src/libsyntax/ext/deriving/generic/mod.rs54
-rw-r--r--src/libsyntax/ext/deriving/mod.rs4
-rw-r--r--src/libsyntax/ext/deriving/show.rs6
-rw-r--r--src/libsyntax/ext/env.rs8
-rw-r--r--src/libsyntax/ext/expand.rs49
-rw-r--r--src/libsyntax/ext/format.rs28
-rw-r--r--src/libsyntax/ext/mtwt.rs8
-rw-r--r--src/libsyntax/ext/quote.rs10
-rw-r--r--src/libsyntax/ext/source_util.rs20
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs45
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs229
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs42
21 files changed, 375 insertions, 173 deletions
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs
index b77b822a6b2..04dec0e8028 100644
--- a/src/libsyntax/ext/asm.rs
+++ b/src/libsyntax/ext/asm.rs
@@ -100,7 +100,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                         Some(('=', _)) => None,
                         Some(('+', operand)) => {
                             Some(token::intern_and_get_ident(format!(
-                                        "={}", operand)[]))
+                                        "={}", operand).index(&FullRange)))
                         }
                         _ => {
                             cx.span_err(span, "output operand constraint lacks '=' or '+'");
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 91ae7396ea4..52e402689ba 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -539,7 +539,7 @@ impl<'a> ExtCtxt<'a> {
     pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
     pub fn mod_path(&self) -> Vec<ast::Ident> {
         let mut v = Vec::new();
-        v.push(token::str_to_ident(self.ecfg.crate_name[]));
+        v.push(token::str_to_ident(self.ecfg.crate_name.index(&FullRange)));
         v.extend(self.mod_path.iter().map(|a| *a));
         return v;
     }
@@ -548,7 +548,7 @@ impl<'a> ExtCtxt<'a> {
         if self.recursion_count > self.ecfg.recursion_limit {
             self.span_fatal(ei.call_site,
                             format!("recursion limit reached while expanding the macro `{}`",
-                                    ei.callee.name)[]);
+                                    ei.callee.name).index(&FullRange));
         }
 
         let mut call_site = ei.call_site;
@@ -670,7 +670,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
                       tts: &[ast::TokenTree],
                       name: &str) {
     if tts.len() != 0 {
-        cx.span_err(sp, format!("{} takes no arguments", name)[]);
+        cx.span_err(sp, format!("{} takes no arguments", name).index(&FullRange));
     }
 }
 
@@ -683,12 +683,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
                                -> Option<String> {
     let mut p = cx.new_parser_from_tts(tts);
     if p.token == token::Eof {
-        cx.span_err(sp, format!("{} takes 1 argument", name)[]);
+        cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange));
         return None
     }
     let ret = cx.expander().fold_expr(p.parse_expr());
     if p.token != token::Eof {
-        cx.span_err(sp, format!("{} takes 1 argument", name)[]);
+        cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange));
     }
     expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
         s.get().to_string()
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index ea345f3a458..bd4f295401c 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -642,10 +642,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.expr(sp, ast::ExprLit(P(respan(sp, lit))))
     }
     fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr> {
-        self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyU)))
+        self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyUs)))
     }
     fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr> {
-        self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyI, ast::Sign::new(i))))
+        self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyIs, ast::Sign::new(i))))
     }
     fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr> {
         self.expr_lit(sp, ast::LitInt(u as u64, ast::UnsignedIntLit(ast::TyU8)))
@@ -709,7 +709,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         let loc = self.codemap().lookup_char_pos(span.lo);
         let expr_file = self.expr_str(span,
                                       token::intern_and_get_ident(loc.file
-                                                                  .name[]));
+                                                                  .name.index(&FullRange)));
         let expr_line = self.expr_uint(span, loc.line);
         let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
         let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs
index 03dd08fdf7f..1f1781dceb3 100644
--- a/src/libsyntax/ext/concat.rs
+++ b/src/libsyntax/ext/concat.rs
@@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
                     ast::LitInt(i, ast::UnsignedIntLit(_)) |
                     ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) |
                     ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => {
-                        accumulator.push_str(format!("{}", i)[]);
+                        accumulator.push_str(format!("{}", i).index(&FullRange));
                     }
                     ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) |
                     ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => {
-                        accumulator.push_str(format!("-{}", i)[]);
+                        accumulator.push_str(format!("-{}", i).index(&FullRange));
                     }
                     ast::LitBool(b) => {
-                        accumulator.push_str(format!("{}", b)[]);
+                        accumulator.push_str(format!("{}", b).index(&FullRange));
                     }
                     ast::LitByte(..) |
                     ast::LitBinary(..) => {
@@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
     }
     base::MacExpr::new(cx.expr_str(
             sp,
-            token::intern_and_get_ident(accumulator[])))
+            token::intern_and_get_ident(accumulator.index(&FullRange))))
 }
diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs
index 2cf60d30a1b..02f702248cb 100644
--- a/src/libsyntax/ext/concat_idents.rs
+++ b/src/libsyntax/ext/concat_idents.rs
@@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
             }
         }
     }
-    let res = str_to_ident(res_str[]);
+    let res = str_to_ident(res_str.index(&FullRange));
 
     let e = P(ast::Expr {
         id: ast::DUMMY_NODE_ID,
diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs
index cf29bb048d6..8ac7e57bb81 100644
--- a/src/libsyntax/ext/deriving/bounds.rs
+++ b/src/libsyntax/ext/deriving/bounds.rs
@@ -29,12 +29,13 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
                 "Send" | "Sync" => {
                     return cx.span_err(span,
                                        format!("{} is an unsafe trait and it \
-                                               should be implemented explicitly", *tname)[])
+                                                should be implemented explicitly",
+                                               *tname).as_slice())
                 }
                 ref tname => {
                     cx.span_bug(span,
                                 format!("expected built-in trait name but \
-                                         found {}", *tname)[])
+                                         found {}", *tname).as_slice())
                 }
             }
         },
@@ -47,7 +48,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
     let trait_def = TraitDef {
         span: span,
         attributes: Vec::new(),
-        path: Path::new(vec!("std", "kinds", name)),
+        path: Path::new(vec!("std", "marker", name)),
         additional_bounds: Vec::new(),
         generics: LifetimeBounds::empty(),
         methods: vec!()
diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs
index 3c74a9f4431..d9d6cebd05c 100644
--- a/src/libsyntax/ext/deriving/clone.rs
+++ b/src/libsyntax/ext/deriving/clone.rs
@@ -80,11 +80,11 @@ fn cs_clone(
         EnumNonMatchingCollapsed (..) => {
             cx.span_bug(trait_span,
                         format!("non-matching enum variants in \
-                                 `deriving({})`", name)[])
+                                 `deriving({})`", name).index(&FullRange))
         }
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span,
-                        format!("static method in `deriving({})`", name)[])
+                        format!("static method in `deriving({})`", name).index(&FullRange))
         }
     }
 
@@ -101,7 +101,7 @@ fn cs_clone(
                 None => {
                     cx.span_bug(trait_span,
                                 format!("unnamed field in normal struct in \
-                                         `deriving({})`", name)[])
+                                         `deriving({})`", name).index(&FullRange))
                 }
             };
             cx.field_imm(field.span, ident, subcall(field))
diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs
index 8094f0d3de8..a9289f0175a 100644
--- a/src/libsyntax/ext/deriving/decodable.rs
+++ b/src/libsyntax/ext/deriving/decodable.rs
@@ -198,7 +198,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
                 let fields = fields.iter().enumerate().map(|(i, &span)| {
                     getarg(cx, span,
                            token::intern_and_get_ident(format!("_field{}",
-                                                               i)[]),
+                                                               i).index(&FullRange)),
                            i)
                 }).collect();
 
diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs
index 0fceb0fbfda..7114217d51d 100644
--- a/src/libsyntax/ext/deriving/encodable.rs
+++ b/src/libsyntax/ext/deriving/encodable.rs
@@ -183,7 +183,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
                 let name = match name {
                     Some(id) => token::get_ident(id),
                     None => {
-                        token::intern_and_get_ident(format!("_field{}", i)[])
+                        token::intern_and_get_ident(format!("_field{}", i).index(&FullRange))
                     }
                 };
                 let enc = cx.expr_method_call(span, self_.clone(),
diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs
index 1aa430c4a08..50b3559f369 100644
--- a/src/libsyntax/ext/deriving/generic/mod.rs
+++ b/src/libsyntax/ext/deriving/generic/mod.rs
@@ -510,15 +510,15 @@ impl<'a> TraitDef<'a> {
                     self,
                     struct_def,
                     type_ident,
-                    self_args[],
-                    nonself_args[])
+                    self_args.index(&FullRange),
+                    nonself_args.index(&FullRange))
             } else {
                 method_def.expand_struct_method_body(cx,
                                                      self,
                                                      struct_def,
                                                      type_ident,
-                                                     self_args[],
-                                                     nonself_args[])
+                                                     self_args.index(&FullRange),
+                                                     nonself_args.index(&FullRange))
             };
 
             method_def.create_method(cx,
@@ -550,15 +550,15 @@ impl<'a> TraitDef<'a> {
                     self,
                     enum_def,
                     type_ident,
-                    self_args[],
-                    nonself_args[])
+                    self_args.index(&FullRange),
+                    nonself_args.index(&FullRange))
             } else {
                 method_def.expand_enum_method_body(cx,
                                                    self,
                                                    enum_def,
                                                    type_ident,
                                                    self_args,
-                                                   nonself_args[])
+                                                   nonself_args.index(&FullRange))
             };
 
             method_def.create_method(cx,
@@ -602,7 +602,7 @@ impl<'a> MethodDef<'a> {
         };
         let mut f = self.combine_substructure.borrow_mut();
         let f: &mut CombineSubstructureFunc = &mut *f;
-        f.call_mut((cx, trait_.span, &substructure))
+        f(cx, trait_.span, &substructure)
     }
 
     fn get_ret_ty(&self,
@@ -645,7 +645,7 @@ impl<'a> MethodDef<'a> {
 
         for (i, ty) in self.args.iter().enumerate() {
             let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics);
-            let ident = cx.ident_of(format!("__arg_{}", i)[]);
+            let ident = cx.ident_of(format!("__arg_{}", i).index(&FullRange));
             arg_tys.push((ident, ast_ty));
 
             let arg_expr = cx.expr_ident(trait_.span, ident);
@@ -752,7 +752,7 @@ impl<'a> MethodDef<'a> {
                                              struct_path,
                                              struct_def,
                                              format!("__self_{}",
-                                                     i)[],
+                                                     i).index(&FullRange),
                                              ast::MutImmutable);
             patterns.push(pat);
             raw_fields.push(ident_expr);
@@ -908,22 +908,22 @@ impl<'a> MethodDef<'a> {
             .collect::<Vec<String>>();
 
         let self_arg_idents = self_arg_names.iter()
-            .map(|name|cx.ident_of(name[]))
+            .map(|name|cx.ident_of(name.index(&FullRange)))
             .collect::<Vec<ast::Ident>>();
 
         // The `vi_idents` will be bound, solely in the catch-all, to
         // a series of let statements mapping each self_arg to a uint
         // corresponding to its variant index.
         let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
-            .map(|name| { let vi_suffix = format!("{}_vi", name[]);
-                          cx.ident_of(vi_suffix[]) })
+            .map(|name| { let vi_suffix = format!("{}_vi", name.index(&FullRange));
+                          cx.ident_of(vi_suffix.index(&FullRange)) })
             .collect::<Vec<ast::Ident>>();
 
         // Builds, via callback to call_substructure_method, the
         // delegated expression that handles the catch-all case,
         // using `__variants_tuple` to drive logic if necessary.
         let catch_all_substructure = EnumNonMatchingCollapsed(
-            self_arg_idents, variants[], vi_idents[]);
+            self_arg_idents, variants.index(&FullRange), vi_idents.index(&FullRange));
 
         // These arms are of the form:
         // (Variant1, Variant1, ...) => Body1
@@ -945,12 +945,12 @@ impl<'a> MethodDef<'a> {
                 let mut subpats = Vec::with_capacity(self_arg_names.len());
                 let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1);
                 let first_self_pat_idents = {
-                    let (p, idents) = mk_self_pat(cx, self_arg_names[0][]);
+                    let (p, idents) = mk_self_pat(cx, self_arg_names[0].index(&FullRange));
                     subpats.push(p);
                     idents
                 };
                 for self_arg_name in self_arg_names.tail().iter() {
-                    let (p, idents) = mk_self_pat(cx, self_arg_name[]);
+                    let (p, idents) = mk_self_pat(cx, self_arg_name.index(&FullRange));
                     subpats.push(p);
                     self_pats_idents.push(idents);
                 }
@@ -1006,7 +1006,7 @@ impl<'a> MethodDef<'a> {
                                                 &**variant,
                                                 field_tuples);
                 let arm_expr = self.call_substructure_method(
-                    cx, trait_, type_ident, self_args[], nonself_args,
+                    cx, trait_, type_ident, self_args.index(&FullRange), nonself_args,
                     &substructure);
 
                 cx.arm(sp, vec![single_pat], arm_expr)
@@ -1031,7 +1031,7 @@ impl<'a> MethodDef<'a> {
             let arms: Vec<ast::Arm> = variants.iter().enumerate()
                 .map(|(index, variant)| {
                     let pat = variant_to_pat(cx, sp, type_ident, &**variant);
-                    let lit = ast::LitInt(index as u64, ast::UnsignedIntLit(ast::TyU));
+                    let lit = ast::LitInt(index as u64, ast::UnsignedIntLit(ast::TyUs));
                     cx.arm(sp, vec![pat], cx.expr_lit(sp, lit))
                 }).collect();
 
@@ -1059,7 +1059,7 @@ impl<'a> MethodDef<'a> {
             }
 
             let arm_expr = self.call_substructure_method(
-                cx, trait_, type_ident, self_args[], nonself_args,
+                cx, trait_, type_ident, self_args.index(&FullRange), nonself_args,
                 &catch_all_substructure);
 
             // Builds the expression:
@@ -1263,7 +1263,7 @@ impl<'a> TraitDef<'a> {
                     cx.span_bug(sp, "a struct with named and unnamed fields in `derive`");
                 }
             };
-            let ident = cx.ident_of(format!("{}_{}", prefix, i)[]);
+            let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange));
             paths.push(codemap::Spanned{span: sp, node: ident});
             let val = cx.expr(
                 sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident)))));
@@ -1309,7 +1309,7 @@ impl<'a> TraitDef<'a> {
                 let mut ident_expr = Vec::new();
                 for (i, va) in variant_args.iter().enumerate() {
                     let sp = self.set_expn_info(cx, va.ty.span);
-                    let ident = cx.ident_of(format!("{}_{}", prefix, i)[]);
+                    let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange));
                     let path1 = codemap::Spanned{span: sp, node: ident};
                     paths.push(path1);
                     let expr_path = cx.expr_path(cx.path_ident(sp, ident));
@@ -1352,7 +1352,7 @@ pub fn cs_fold<F>(use_foldl: bool,
                       field.span,
                       old,
                       field.self_.clone(),
-                      field.other[])
+                      field.other.index(&FullRange))
                 })
             } else {
                 all_fields.iter().rev().fold(base, |old, field| {
@@ -1360,13 +1360,13 @@ pub fn cs_fold<F>(use_foldl: bool,
                       field.span,
                       old,
                       field.self_.clone(),
-                      field.other[])
+                      field.other.index(&FullRange))
                 })
             }
         },
         EnumNonMatchingCollapsed(ref all_args, _, tuple) =>
-            enum_nonmatch_f.call_mut((cx, trait_span, (all_args[], tuple),
-                                      substructure.nonself_args)),
+            enum_nonmatch_f(cx, trait_span, (all_args.index(&FullRange), tuple),
+                            substructure.nonself_args),
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span, "static function in `derive`")
         }
@@ -1405,8 +1405,8 @@ pub fn cs_same_method<F>(f: F,
             f(cx, trait_span, called)
         },
         EnumNonMatchingCollapsed(ref all_self_args, _, tuple) =>
-            enum_nonmatch_f.call_mut((cx, trait_span, (all_self_args[], tuple),
-                                     substructure.nonself_args)),
+            enum_nonmatch_f(cx, trait_span, (all_self_args.index(&FullRange), tuple),
+                            substructure.nonself_args),
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span, "static function in `derive`")
         }
diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs
index e72c83b67c8..43a0e0606f8 100644
--- a/src/libsyntax/ext/deriving/mod.rs
+++ b/src/libsyntax/ext/deriving/mod.rs
@@ -73,7 +73,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt,
                     MetaWord(ref tname) => {
                         macro_rules! expand {
                             ($func:path) => ($func(cx, titem.span, &**titem, item,
-                                                   |i| push.call_mut((i,))))
+                                                   |i| push(i)))
                         }
 
                         match tname.get() {
@@ -123,7 +123,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt,
                                 cx.span_err(titem.span,
                                             format!("unknown `derive` \
                                                      trait: `{}`",
-                                                    *tname)[]);
+                                                    *tname).index(&FullRange));
                             }
                         };
                     }
diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs
index eceac4e9a83..fa9a7899a12 100644
--- a/src/libsyntax/ext/deriving/show.rs
+++ b/src/libsyntax/ext/deriving/show.rs
@@ -90,7 +90,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
                 for (i, field) in fields.iter().enumerate() {
                     if i != 0 { format_string.push_str(", "); }
 
-                    format_string.push_str("{}");
+                    format_string.push_str("{:?}");
 
                     exprs.push(field.self_.clone());
                 }
@@ -107,7 +107,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
                     let name = token::get_ident(field.name.unwrap());
                     format_string.push_str(" ");
                     format_string.push_str(name.get());
-                    format_string.push_str(": {}");
+                    format_string.push_str(": {:?}");
 
                     exprs.push(field.self_.clone());
                 }
@@ -127,7 +127,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
     let formatter = substr.nonself_args[0].clone();
 
     let meth = cx.ident_of("write_fmt");
-    let s = token::intern_and_get_ident(format_string[]);
+    let s = token::intern_and_get_ident(format_string.index(&FullRange));
     let format_string = cx.expr_str(span, s);
 
     // phew, not our responsibility any more!
diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs
index 9fedc4a158e..eb3544e3c5c 100644
--- a/src/libsyntax/ext/env.rs
+++ b/src/libsyntax/ext/env.rs
@@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT
         Some(v) => v
     };
 
-    let e = match os::getenv(var[]) {
+    let e = match os::getenv(var.index(&FullRange)) {
       None => {
           cx.expr_path(cx.path_all(sp,
                                    true,
@@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT
                                    cx.ident_of("Some")),
                               vec!(cx.expr_str(sp,
                                                token::intern_and_get_ident(
-                                          s[]))))
+                                          s.index(&FullRange)))))
       }
     };
     MacExpr::new(e)
@@ -83,7 +83,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         None => {
             token::intern_and_get_ident(format!("environment variable `{}` \
                                                  not defined",
-                                                var)[])
+                                                var).index(&FullRange))
         }
         Some(second) => {
             match expr_to_string(cx, second, "expected string literal") {
@@ -106,7 +106,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             cx.span_err(sp, msg.get());
             cx.expr_uint(sp, 0)
         }
-        Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s[]))
+        Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.index(&FullRange)))
     };
     MacExpr::new(e)
 }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 212ec3b0903..3e1bccf394a 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -36,7 +36,7 @@ pub fn expand_type(t: P<ast::Ty>,
                    fld: &mut MacroExpander,
                    impl_ty: Option<P<ast::Ty>>)
                    -> P<ast::Ty> {
-    debug!("expanding type {} with impl_ty {}", t, impl_ty);
+    debug!("expanding type {:?} with impl_ty {:?}", t, impl_ty);
     let t = match (t.node.clone(), impl_ty) {
         // Expand uses of `Self` in impls to the concrete type.
         (ast::Ty_::TyPath(ref path, _), Some(ref impl_ty)) => {
@@ -287,7 +287,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                     fld.cx.span_err(
                         pth.span,
                         format!("macro undefined: '{}!'",
-                                extnamestr.get())[]);
+                                extnamestr.get()).index(&FullRange));
 
                     // let compilation continue
                     None
@@ -303,7 +303,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                                 },
                             });
                         let fm = fresh_mark();
-                        let marked_before = mark_tts(tts[], fm);
+                        let marked_before = mark_tts(tts.index(&FullRange), fm);
 
                         // The span that we pass to the expanders we want to
                         // be the root of the call stack. That's the most
@@ -314,7 +314,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                         let opt_parsed = {
                             let expanded = expandfun.expand(fld.cx,
                                                             mac_span,
-                                                            marked_before[]);
+                                                            marked_before.index(&FullRange));
                             parse_thunk(expanded)
                         };
                         let parsed = match opt_parsed {
@@ -323,8 +323,8 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                                 fld.cx.span_err(
                                     pth.span,
                                     format!("non-expression macro in expression position: {}",
-                                            extnamestr.get()[]
-                                            )[]);
+                                            extnamestr.get().index(&FullRange)
+                                            ).index(&FullRange));
                                 return None;
                             }
                         };
@@ -334,7 +334,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
                         fld.cx.span_err(
                             pth.span,
                             format!("'{}' is not a tt-style macro",
-                                    extnamestr.get())[]);
+                                    extnamestr.get()).index(&FullRange));
                         None
                     }
                 }
@@ -439,7 +439,7 @@ pub fn expand_item(it: P<ast::Item>, fld: &mut MacroExpander)
             if valid_ident {
                 fld.cx.mod_push(it.ident);
             }
-            let macro_use = contains_macro_use(fld, new_attrs[]);
+            let macro_use = contains_macro_use(fld, new_attrs.index(&FullRange));
             let result = with_exts_frame!(fld.cx.syntax_env,
                                           macro_use,
                                           noop_fold_item(it, fld));
@@ -566,7 +566,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
             None => {
                 fld.cx.span_err(path_span,
                                 format!("macro undefined: '{}!'",
-                                        extnamestr)[]);
+                                        extnamestr).index(&FullRange));
                 // let compilation continue
                 return SmallVector::zero();
             }
@@ -579,7 +579,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
                                       format!("macro {}! expects no ident argument, \
                                         given '{}'",
                                       extnamestr,
-                                      token::get_ident(it.ident))[]);
+                                      token::get_ident(it.ident)).index(&FullRange));
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -591,14 +591,14 @@ pub fn expand_item_mac(it: P<ast::Item>,
                         }
                     });
                     // mark before expansion:
-                    let marked_before = mark_tts(tts[], fm);
-                    expander.expand(fld.cx, it.span, marked_before[])
+                    let marked_before = mark_tts(tts.index(&FullRange), fm);
+                    expander.expand(fld.cx, it.span, marked_before.index(&FullRange))
                 }
                 IdentTT(ref expander, span) => {
                     if it.ident.name == parse::token::special_idents::invalid.name {
                         fld.cx.span_err(path_span,
                                         format!("macro {}! expects an ident argument",
-                                                extnamestr.get())[]);
+                                                extnamestr.get()).index(&FullRange));
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -610,13 +610,14 @@ pub fn expand_item_mac(it: P<ast::Item>,
                         }
                     });
                     // mark before expansion:
-                    let marked_tts = mark_tts(tts[], fm);
+                    let marked_tts = mark_tts(tts.index(&FullRange), fm);
                     expander.expand(fld.cx, it.span, it.ident, marked_tts)
                 }
                 MacroRulesTT => {
                     if it.ident.name == parse::token::special_idents::invalid.name {
                         fld.cx.span_err(path_span,
-                                        format!("macro_rules! expects an ident argument")[]);
+                                        format!("macro_rules! expects an ident argument")
+                                            .index(&FullRange));
                         return SmallVector::zero();
                     }
                     fld.cx.bt_push(ExpnInfo {
@@ -648,7 +649,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
                 _ => {
                     fld.cx.span_err(it.span,
                                     format!("{}! is not legal in item position",
-                                            extnamestr.get())[]);
+                                            extnamestr.get()).index(&FullRange));
                     return SmallVector::zero();
                 }
             }
@@ -667,7 +668,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
         None => {
             fld.cx.span_err(path_span,
                             format!("non-item macro in item position: {}",
-                                    extnamestr.get())[]);
+                                    extnamestr.get()).index(&FullRange));
             return SmallVector::zero();
         }
     };
@@ -913,7 +914,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
             None => {
                 fld.cx.span_err(pth.span,
                                 format!("macro undefined: '{}!'",
-                                        extnamestr)[]);
+                                        extnamestr).index(&FullRange));
                 // let compilation continue
                 return DummyResult::raw_pat(span);
             }
@@ -930,11 +931,11 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                     });
 
                     let fm = fresh_mark();
-                    let marked_before = mark_tts(tts[], fm);
+                    let marked_before = mark_tts(tts.index(&FullRange), fm);
                     let mac_span = fld.cx.original_span();
                     let expanded = match expander.expand(fld.cx,
                                         mac_span,
-                                        marked_before[]).make_pat() {
+                                        marked_before.index(&FullRange)).make_pat() {
                         Some(e) => e,
                         None => {
                             fld.cx.span_err(
@@ -942,7 +943,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                                 format!(
                                     "non-pattern macro in pattern position: {}",
                                     extnamestr.get()
-                                )[]
+                                ).index(&FullRange)
                             );
                             return DummyResult::raw_pat(span);
                         }
@@ -954,7 +955,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
                 _ => {
                     fld.cx.span_err(span,
                                     format!("{}! is not legal in pattern position",
-                                            extnamestr.get())[]);
+                                            extnamestr.get()).index(&FullRange));
                     return DummyResult::raw_pat(span);
                 }
             }
@@ -1231,7 +1232,7 @@ impl Folder for Marker {
             node: match node {
                 MacInvocTT(path, tts, ctxt) => {
                     MacInvocTT(self.fold_path(path),
-                               self.fold_tts(tts[]),
+                               self.fold_tts(tts.index(&FullRange)),
                                mtwt::apply_mark(self.mark, ctxt))
                 }
             },
@@ -1712,7 +1713,7 @@ foo_module!();
                 let string = ident.get();
                 "xx" == string
             }).collect();
-        let cxbinds: &[&ast::Ident] = cxbinds[];
+        let cxbinds: &[&ast::Ident] = cxbinds.index(&FullRange);
         let cxbind = match cxbinds {
             [b] => b,
             _ => panic!("expected just one binding for ext_cx")
diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs
index 1f39555f496..44a596d2657 100644
--- a/src/libsyntax/ext/format.rs
+++ b/src/libsyntax/ext/format.rs
@@ -113,7 +113,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                 _ => {
                     ecx.span_err(p.span,
                                  format!("expected ident for named argument, found `{}`",
-                                         p.this_token_to_string())[]);
+                                         p.this_token_to_string()).index(&FullRange));
                     return None;
                 }
             };
@@ -126,7 +126,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                 Some(prev) => {
                     ecx.span_err(e.span,
                                  format!("duplicate argument named `{}`",
-                                         name)[]);
+                                         name).index(&FullRange));
                     ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here");
                     continue
                 }
@@ -217,7 +217,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     let msg = format!("invalid reference to argument `{}` ({})",
                                       arg, self.describe_num_args());
 
-                    self.ecx.span_err(self.fmtsp, msg[]);
+                    self.ecx.span_err(self.fmtsp, msg.index(&FullRange));
                     return;
                 }
                 {
@@ -237,7 +237,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     Some(e) => e.span,
                     None => {
                         let msg = format!("there is no argument named `{}`", name);
-                        self.ecx.span_err(self.fmtsp, msg[]);
+                        self.ecx.span_err(self.fmtsp, msg.index(&FullRange));
                         return;
                     }
                 };
@@ -280,19 +280,19 @@ impl<'a, 'b> Context<'a, 'b> {
                                   format!("argument redeclared with type `{}` when \
                                            it was previously `{}`",
                                           *ty,
-                                          *cur)[]);
+                                          *cur).index(&FullRange));
             }
             (&Known(ref cur), _) => {
                 self.ecx.span_err(sp,
                                   format!("argument used to format with `{}` was \
                                            attempted to not be used for formatting",
-                                           *cur)[]);
+                                           *cur).index(&FullRange));
             }
             (_, &Known(ref ty)) => {
                 self.ecx.span_err(sp,
                                   format!("argument previously used as a format \
                                            argument attempted to be used as `{}`",
-                                           *ty)[]);
+                                           *ty).index(&FullRange));
             }
             (_, _) => {
                 self.ecx.span_err(sp, "argument declared with multiple formats");
@@ -357,7 +357,7 @@ impl<'a, 'b> Context<'a, 'b> {
     /// Translate the accumulated string literals to a literal expression
     fn trans_literal_string(&mut self) -> P<ast::Expr> {
         let sp = self.fmtsp;
-        let s = token::intern_and_get_ident(self.literal[]);
+        let s = token::intern_and_get_ident(self.literal.index(&FullRange));
         self.literal.clear();
         self.ecx.expr_str(sp, s)
     }
@@ -509,7 +509,7 @@ impl<'a, 'b> Context<'a, 'b> {
                 None => continue // error already generated
             };
 
-            let name = self.ecx.ident_of(format!("__arg{}", i)[]);
+            let name = self.ecx.ident_of(format!("__arg{}", i).index(&FullRange));
             pats.push(self.ecx.pat_ident(e.span, name));
             locals.push(Context::format_arg(self.ecx, e.span, arg_ty,
                                             self.ecx.expr_ident(e.span, name)));
@@ -526,7 +526,7 @@ impl<'a, 'b> Context<'a, 'b> {
             };
 
             let lname = self.ecx.ident_of(format!("__arg{}",
-                                                  *name)[]);
+                                                  *name).index(&FullRange));
             pats.push(self.ecx.pat_ident(e.span, lname));
             names[self.name_positions[*name]] =
                 Some(Context::format_arg(self.ecx, e.span, arg_ty,
@@ -606,8 +606,8 @@ impl<'a, 'b> Context<'a, 'b> {
                   -> P<ast::Expr> {
         let trait_ = match *ty {
             Known(ref tyname) => {
-                match tyname[] {
-                    ""  => "Show",
+                match tyname.index(&FullRange) {
+                    ""  => "String",
                     "?" => "Show",
                     "e" => "LowerExp",
                     "E" => "UpperExp",
@@ -619,7 +619,7 @@ impl<'a, 'b> Context<'a, 'b> {
                     _ => {
                         ecx.span_err(sp,
                                      format!("unknown format trait `{}`",
-                                             *tyname)[]);
+                                             *tyname).index(&FullRange));
                         "Dummy"
                     }
                 }
@@ -710,7 +710,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
     }
     if !parser.errors.is_empty() {
         cx.ecx.span_err(cx.fmtsp, format!("invalid format string: {}",
-                                          parser.errors.remove(0))[]);
+                                          parser.errors.remove(0)).index(&FullRange));
         return DummyResult::raw_expr(sp);
     }
     if !cx.literal.is_empty() {
diff --git a/src/libsyntax/ext/mtwt.rs b/src/libsyntax/ext/mtwt.rs
index 4075b208f78..bebd803ac4f 100644
--- a/src/libsyntax/ext/mtwt.rs
+++ b/src/libsyntax/ext/mtwt.rs
@@ -66,7 +66,7 @@ pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
 /// Extend a syntax context with a given mark and sctable (explicit memoization)
 fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
     let key = (ctxt, m);
-    * table.mark_memo.borrow_mut().entry(&key).get().unwrap_or_else(
+    * table.mark_memo.borrow_mut().entry(key).get().unwrap_or_else(
           |vacant_entry|
               vacant_entry.insert(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt))))
 }
@@ -84,7 +84,7 @@ fn apply_rename_internal(id: Ident,
                        table: &SCTable) -> SyntaxContext {
     let key = (ctxt, id, to);
 
-    * table.rename_memo.borrow_mut().entry(&key).get().unwrap_or_else(
+    * table.rename_memo.borrow_mut().entry(key).get().unwrap_or_else(
           |vacant_entry|
               vacant_entry.insert(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt))))
 }
@@ -121,7 +121,7 @@ fn new_sctable_internal() -> SCTable {
 pub fn display_sctable(table: &SCTable) {
     error!("SC table:");
     for (idx,val) in table.table.borrow().iter().enumerate() {
-        error!("{:4} : {}",idx,val);
+        error!("{:4} : {:?}",idx,val);
     }
 }
 
@@ -223,7 +223,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name) -> Vec<Mrk> {
 }
 
 // the internal function for computing marks
-// it's not clear to me whether it's better to use a [] mutable
+// it's not clear to me whether it's better to use a .index(&FullRange) mutable
 // vector or a cons-list for this.
 fn marksof_internal(ctxt: SyntaxContext,
                     stopname: Name,
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index f1b52fa33c3..77aea0c370a 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -273,13 +273,13 @@ pub mod rt {
         );
     }
 
-    impl_to_source_int! { signed, int, TyI }
+    impl_to_source_int! { signed, int, TyIs }
     impl_to_source_int! { signed, i8,  TyI8 }
     impl_to_source_int! { signed, i16, TyI16 }
     impl_to_source_int! { signed, i32, TyI32 }
     impl_to_source_int! { signed, i64, TyI64 }
 
-    impl_to_source_int! { unsigned, uint, TyU }
+    impl_to_source_int! { unsigned, uint, TyUs }
     impl_to_source_int! { unsigned, u8,   TyU8 }
     impl_to_source_int! { unsigned, u16,  TyU16 }
     impl_to_source_int! { unsigned, u32,  TyU32 }
@@ -473,7 +473,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
 }
 
 fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
-    strs.iter().map(|str| str_to_ident((*str)[])).collect()
+    strs.iter().map(|str| str_to_ident((*str).index(&FullRange))).collect()
 }
 
 fn id_ext(str: &str) -> ast::Ident {
@@ -675,7 +675,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
             for i in range(0, tt.len()) {
                 seq.push(tt.get_tt(i));
             }
-            mk_tts(cx, seq[])
+            mk_tts(cx, seq.index(&FullRange))
         }
         ast::TtToken(sp, ref tok) => {
             let e_sp = cx.expr_ident(sp, id_ext("_sp"));
@@ -764,7 +764,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
 
     let mut vector = vec!(stmt_let_sp, stmt_let_tt);
-    vector.extend(mk_tts(cx, tts[]).into_iter());
+    vector.extend(mk_tts(cx, tts.index(&FullRange)).into_iter());
     let block = cx.expr_block(
         cx.block_all(sp,
                      Vec::new(),
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index a49df457cb3..1ba91dd371c 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
     let topmost = cx.original_span_in_file();
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
-    let filename = token::intern_and_get_ident(loc.file.name[]);
+    let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange));
     base::MacExpr::new(cx.expr_str(topmost, filename))
 }
 
@@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                         -> Box<base::MacResult+'static> {
     let s = pprust::tts_to_string(tts);
     base::MacExpr::new(cx.expr_str(sp,
-                                   token::intern_and_get_ident(s[])))
+                                   token::intern_and_get_ident(s.index(&FullRange))))
 }
 
 pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
@@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                    .connect("::");
     base::MacExpr::new(cx.expr_str(
             sp,
-            token::intern_and_get_ident(string[])))
+            token::intern_and_get_ident(string.index(&FullRange))))
 }
 
 /// include! : parse the given file as an expr
@@ -135,9 +135,9 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     let bytes = match File::open(&file).read_to_end() {
         Err(e) => {
             cx.span_err(sp,
-                        format!("couldn't read {}: {}",
+                        format!("couldn't read {:?}: {}",
                                 file.display(),
-                                e)[]);
+                                e).index(&FullRange));
             return DummyResult::expr(sp);
         }
         Ok(bytes) => bytes,
@@ -146,16 +146,16 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         Ok(src) => {
             // Add this input file to the code map to make it available as
             // dependency information
-            let filename = file.display().to_string();
-            let interned = token::intern_and_get_ident(src[]);
+            let filename = format!("{:?}", file.display());
+            let interned = token::intern_and_get_ident(src.index(&FullRange));
             cx.codemap().new_filemap(filename, src);
 
             base::MacExpr::new(cx.expr_str(sp, interned))
         }
         Err(_) => {
             cx.span_err(sp,
-                        format!("{} wasn't a utf-8 file",
-                                file.display())[]);
+                        format!("{:?} wasn't a utf-8 file",
+                                file.display()).index(&FullRange));
             return DummyResult::expr(sp);
         }
     }
@@ -177,7 +177,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     match File::open(&file).read_to_end() {
         Err(e) => {
             cx.span_err(sp,
-                        format!("couldn't read {}: {}", file.display(), e)[]);
+                        format!("couldn't read {:?}: {}", file.display(), e).index(&FullRange));
             return DummyResult::expr(sp);
         }
         Ok(bytes) => {
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 1438d152554..d33d03bbfa9 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint {
                 seq.num_captures
             }
             &TtDelimited(_, ref delim) => {
-                count_names(delim.tts[])
+                count_names(delim.tts.index(&FullRange))
             }
             &TtToken(_, MatchNt(..)) => {
                 1
@@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint {
 
 pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
                            -> Box<MatcherPos> {
-    let match_idx_hi = count_names(ms[]);
+    let match_idx_hi = count_names(ms.index(&FullRange));
     let matches: Vec<_> = range(0, match_idx_hi).map(|_| Vec::new()).collect();
     box MatcherPos {
         stack: vec![],
@@ -219,7 +219,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
                 }
             }
             &TtToken(sp, MatchNt(bind_name, _, _, _)) => {
-                match ret_val.entry(&bind_name) {
+                match ret_val.entry(bind_name) {
                     Vacant(spot) => {
                         spot.insert(res[*idx].clone());
                         *idx += 1;
@@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
                         p_s.span_diagnostic
                            .span_fatal(sp,
                                        format!("duplicated bind name: {}",
-                                               string.get())[])
+                                               string.get()).index(&FullRange))
                     }
                 }
             }
@@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess,
                      rdr: TtReader,
                      ms: Vec<TokenTree> )
                      -> HashMap<Ident, Rc<NamedMatch>> {
-    match parse(sess, cfg, rdr, ms[]) {
+    match parse(sess, cfg, rdr, ms.index(&FullRange)) {
         Success(m) => m,
         Failure(sp, str) => {
-            sess.span_diagnostic.span_fatal(sp, str[])
+            sess.span_diagnostic.span_fatal(sp, str.index(&FullRange))
         }
         Error(sp, str) => {
-            sess.span_diagnostic.span_fatal(sp, str[])
+            sess.span_diagnostic.span_fatal(sp, str.index(&FullRange))
         }
     }
 }
@@ -341,7 +341,7 @@ pub fn parse(sess: &ParseSess,
                         // Only touch the binders we have actually bound
                         for idx in range(ei.match_lo, ei.match_hi) {
                             let sub = (ei.matches[idx]).clone();
-                            new_pos.matches[idx]
+                            (&mut new_pos.matches[idx])
                                    .push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
                                                                        sp.hi))));
                         }
@@ -386,7 +386,7 @@ pub fn parse(sess: &ParseSess,
                             new_ei.idx += 1u;
                             //we specifically matched zero repeats.
                             for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) {
-                                new_ei.matches[idx].push(Rc::new(MatchedSeq(Vec::new(), sp)));
+                                (&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
                             }
 
                             cur_eis.push(new_ei);
@@ -444,10 +444,10 @@ pub fn parse(sess: &ParseSess,
         if token_name_eq(&tok, &token::Eof) {
             if eof_eis.len() == 1u {
                 let mut v = Vec::new();
-                for dv in eof_eis[0].matches.iter_mut() {
+                for dv in (&mut eof_eis[0]).matches.iter_mut() {
                     v.push(dv.pop().unwrap());
                 }
-                return Success(nameize(sess, ms, v[]));
+                return Success(nameize(sess, ms, v.index(&FullRange)));
             } else if eof_eis.len() > 1u {
                 return Error(sp, "ambiguity: multiple successful parses".to_string());
             } else {
@@ -486,7 +486,7 @@ pub fn parse(sess: &ParseSess,
                   TtToken(_, MatchNt(_, name, _, _)) => {
                     let name_string = token::get_ident(name);
                     let match_cur = ei.match_cur;
-                    ei.matches[match_cur].push(Rc::new(MatchedNonterminal(
+                    (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
                         parse_nt(&mut rust_parser, name_string.get()))));
                     ei.idx += 1u;
                     ei.match_cur += 1;
@@ -507,6 +507,17 @@ pub fn parse(sess: &ParseSess,
 
 pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
     match name {
+        "tt" => {
+            p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
+            let res = token::NtTT(P(p.parse_token_tree()));
+            p.quote_depth -= 1u;
+            return res;
+        }
+        _ => {}
+    }
+    // check at the beginning and the parser checks after each bump
+    p.check_unknown_macro_variable();
+    match name {
       "item" => match p.parse_item(Vec::new()) {
         Some(i) => token::NtItem(i),
         None => p.fatal("expected an item keyword")
@@ -522,21 +533,15 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
         _ => {
             let token_str = pprust::token_to_string(&p.token);
             p.fatal((format!("expected ident, found {}",
-                             token_str[]))[])
+                             token_str.index(&FullRange))).index(&FullRange))
         }
       },
       "path" => {
         token::NtPath(box p.parse_path(LifetimeAndTypesWithoutColons))
       }
       "meta" => token::NtMeta(p.parse_meta_item()),
-      "tt" => {
-        p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
-        let res = token::NtTT(P(p.parse_token_tree()));
-        p.quote_depth -= 1u;
-        res
-      }
       _ => {
-          p.fatal(format!("unsupported builtin nonterminal parser: {}", name)[])
+          p.fatal(format!("unsupported builtin nonterminal parser: {}", name).index(&FullRange))
       }
     }
 }
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 9837c8088fa..64c53e298ef 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -1,4 +1,4 @@
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast::{Ident, TtDelimited, TtSequence, TtToken};
+use ast::{TokenTree, TtDelimited, TtSequence, TtToken};
 use ast;
 use codemap::{Span, DUMMY_SP};
 use ext::base::{ExtCtxt, MacResult, SyntaxExtension};
@@ -16,11 +16,11 @@ use ext::base::{NormalTT, TTMacroExpander};
 use ext::tt::macro_parser::{Success, Error, Failure};
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
 use ext::tt::macro_parser::{parse, parse_or_else};
-use parse::lexer::new_tt_reader;
+use parse::lexer::{new_tt_reader, new_tt_reader_with_doc_flag};
 use parse::parser::Parser;
 use parse::attr::ParserAttr;
-use parse::token::{special_idents, gensym_ident};
-use parse::token::{MatchNt, NtTT};
+use parse::token::{special_idents, gensym_ident, NtTT, Token};
+use parse::token::Token::*;
 use parse::token;
 use print;
 use ptr::P;
@@ -52,7 +52,7 @@ impl<'a> ParserAnyMacro<'a> {
                                following",
                               token_str);
             let span = parser.span;
-            parser.span_err(span, msg[]);
+            parser.span_err(span, msg.index(&FullRange));
         }
     }
 }
@@ -109,8 +109,8 @@ impl<'a> MacResult for ParserAnyMacro<'a> {
 }
 
 struct MacroRulesMacroExpander {
-    name: Ident,
-    imported_from: Option<Ident>,
+    name: ast::Ident,
+    imported_from: Option<ast::Ident>,
     lhses: Vec<Rc<NamedMatch>>,
     rhses: Vec<Rc<NamedMatch>>,
 }
@@ -126,16 +126,16 @@ impl TTMacroExpander for MacroRulesMacroExpander {
                           self.name,
                           self.imported_from,
                           arg,
-                          self.lhses[],
-                          self.rhses[])
+                          self.lhses.index(&FullRange),
+                          self.rhses.index(&FullRange))
     }
 }
 
 /// Given `lhses` and `rhses`, this is the new macro we create
 fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                           sp: Span,
-                          name: Ident,
-                          imported_from: Option<Ident>,
+                          name: ast::Ident,
+                          imported_from: Option<ast::Ident>,
                           arg: &[ast::TokenTree],
                           lhses: &[Rc<NamedMatch>],
                           rhses: &[Rc<NamedMatch>])
@@ -154,17 +154,17 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
         match **lhs {
           MatchedNonterminal(NtTT(ref lhs_tt)) => {
             let lhs_tt = match **lhs_tt {
-                TtDelimited(_, ref delim) => delim.tts[],
+                TtDelimited(_, ref delim) => delim.tts.index(&FullRange),
                 _ => cx.span_fatal(sp, "malformed macro lhs")
             };
             // `None` is because we're not interpolating
-            let mut arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
-                                            None,
-                                            None,
-                                            arg.iter()
-                                               .map(|x| (*x).clone())
-                                               .collect());
-            arg_rdr.desugar_doc_comments = true;
+            let arg_rdr = new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
+                                                      None,
+                                                      None,
+                                                      arg.iter()
+                                                         .map(|x| (*x).clone())
+                                                         .collect(),
+                                                      true);
             match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) {
               Success(named_matches) => {
                 let rhs = match *rhses[i] {
@@ -183,7 +183,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                                            Some(named_matches),
                                            imported_from,
                                            rhs);
-                let p = Parser::new(cx.parse_sess(), cx.cfg(), box trncbr);
+                let mut p = Parser::new(cx.parse_sess(), cx.cfg(), box trncbr);
+                p.check_unknown_macro_variable();
                 // Let the context choose how to interpret the result.
                 // Weird, but useful for X-macros.
                 return box ParserAnyMacro {
@@ -194,13 +195,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                 best_fail_spot = sp;
                 best_fail_msg = (*msg).clone();
               },
-              Error(sp, ref msg) => cx.span_fatal(sp, msg[])
+              Error(sp, ref msg) => cx.span_fatal(sp, msg.index(&FullRange))
             }
           }
           _ => cx.bug("non-matcher found in parsed lhses")
         }
     }
-    cx.span_fatal(best_fail_spot, best_fail_msg[]);
+    cx.span_fatal(best_fail_spot, best_fail_msg.index(&FullRange));
 }
 
 // Note that macro-by-example's input is also matched against a token tree:
@@ -260,6 +261,10 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
         _ => cx.span_bug(def.span, "wrong-structured lhs")
     };
 
+    for lhs in lhses.iter() {
+        check_lhs_nt_follows(cx, &**lhs, def.span);
+    }
+
     let rhses = match *argument_map[rhs_nm] {
         MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
         _ => cx.span_bug(def.span, "wrong-structured rhs")
@@ -274,3 +279,181 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
 
     NormalTT(exp, Some(def.span))
 }
+
+fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
+    // lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where
+    // the entire lhs is those tts.
+    // if ever we get box/deref patterns, this could turn into an `if let
+    // &MatchedNonterminal(NtTT(box TtDelimited(...))) = lhs`
+    let matcher = match lhs {
+        &MatchedNonterminal(NtTT(ref inner)) => match &**inner {
+            &TtDelimited(_, ref tts) => tts.tts.as_slice(),
+            _ => cx.span_bug(sp, "wrong-structured lhs for follow check")
+        },
+        _ => cx.span_bug(sp, "wrong-structured lhs for follow check")
+    };
+
+    check_matcher(cx, matcher.iter(), &Eof);
+    // we don't abort on errors on rejection, the driver will do that for us
+    // after parsing/expansion. we can report every error in every macro this way.
+}
+
+// returns the last token that was checked, for TtSequence. this gets used later on.
+fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
+-> Option<(Span, Token)> where I: Iterator<Item=&'a TokenTree> {
+    use print::pprust::token_to_string;
+
+    let mut last = None;
+
+    // 2. For each token T in M:
+    let mut tokens = matcher.peekable();
+    while let Some(token) = tokens.next() {
+        last = match *token {
+            TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
+                // ii. If T is a simple NT, look ahead to the next token T' in
+                // M.
+                let next_token = match tokens.peek() {
+                    // If T' closes a complex NT, replace T' with F
+                    Some(&&TtToken(_, CloseDelim(_))) => follow.clone(),
+                    Some(&&TtToken(_, ref tok)) => tok.clone(),
+                    Some(&&TtSequence(sp, _)) => {
+                        cx.span_err(sp,
+                                    format!("`${0}:{1}` is followed by a \
+                                             sequence repetition, which is not \
+                                             allowed for `{1}` fragments",
+                                            name.as_str(), frag_spec.as_str())
+                                        .as_slice());
+                        Eof
+                    },
+                    // die next iteration
+                    Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
+                    // else, we're at the end of the macro or sequence
+                    None => follow.clone()
+                };
+
+                let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() };
+                // If T' is in the set FOLLOW(NT), continue. Else, reject.
+                match &next_token {
+                    &Eof => return Some((sp, tok.clone())),
+                    _ if is_in_follow(cx, &next_token, frag_spec.as_str()) => continue,
+                    next => {
+                        cx.span_err(sp, format!("`${0}:{1}` is followed by `{2}`, which \
+                                                 is not allowed for `{1}` fragments",
+                                                 name.as_str(), frag_spec.as_str(),
+                                                 token_to_string(next)).as_slice());
+                        continue
+                    },
+                }
+            },
+            TtSequence(sp, ref seq) => {
+                // iii. Else, T is a complex NT.
+                match seq.separator {
+                    // If T has the form $(...)U+ or $(...)U* for some token U,
+                    // run the algorithm on the contents with F set to U. If it
+                    // accepts, continue, else, reject.
+                    Some(ref u) => {
+                        let last = check_matcher(cx, seq.tts.iter(), u);
+                        match last {
+                            // Since the delimiter isn't required after the last
+                            // repetition, make sure that the *next* token is
+                            // sane. This doesn't actually compute the FIRST of
+                            // the rest of the matcher yet, it only considers
+                            // single tokens and simple NTs. This is imprecise,
+                            // but conservatively correct.
+                            Some((span, tok)) => {
+                                let fol = match tokens.peek() {
+                                    Some(&&TtToken(_, ref tok)) => tok.clone(),
+                                    Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
+                                    Some(_) => {
+                                        cx.span_err(sp, "sequence repetition followed by \
+                                                another sequence repetition, which is not allowed");
+                                        Eof
+                                    },
+                                    None => Eof
+                                };
+                                check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(),
+                                              &fol)
+                            },
+                            None => last,
+                        }
+                    },
+                    // If T has the form $(...)+ or $(...)*, run the algorithm
+                    // on the contents with F set to the token following the
+                    // sequence. If it accepts, continue, else, reject.
+                    None => {
+                        let fol = match tokens.peek() {
+                            Some(&&TtToken(_, ref tok)) => tok.clone(),
+                            Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
+                            Some(_) => {
+                                cx.span_err(sp, "sequence repetition followed by another \
+                                             sequence repetition, which is not allowed");
+                                Eof
+                            },
+                            None => Eof
+                        };
+                        check_matcher(cx, seq.tts.iter(), &fol)
+                    }
+                }
+            },
+            TtToken(..) => {
+                // i. If T is not an NT, continue.
+                continue
+            },
+            TtDelimited(_, ref tts) => {
+                // if we don't pass in that close delimiter, we'll incorrectly consider the matcher
+                // `{ $foo:ty }` as having a follow that isn't `RBrace`
+                check_matcher(cx, tts.tts.iter(), &tts.close_token())
+            }
+        }
+    }
+    last
+}
+
+fn is_in_follow(cx: &ExtCtxt, tok: &Token, frag: &str) -> bool {
+    if let &CloseDelim(_) = tok {
+        return true;
+    }
+
+    match frag {
+        "item" => {
+            // since items *must* be followed by either a `;` or a `}`, we can
+            // accept anything after them
+            true
+        },
+        "block" => {
+            // anything can follow block, the braces provide a easy boundary to
+            // maintain
+            true
+        },
+        "stmt" | "expr"  => {
+            match *tok {
+                FatArrow | Comma | Semi => true,
+                _ => false
+            }
+        },
+        "pat" => {
+            match *tok {
+                FatArrow | Comma | Eq => true,
+                _ => false
+            }
+        },
+        "path" | "ty" => {
+            match *tok {
+                Comma | FatArrow | Colon | Eq | Gt => true,
+                Ident(i, _) if i.as_str() == "as" => true,
+                _ => false
+            }
+        },
+        "ident" => {
+            // being a single token, idents are harmless
+            true
+        },
+        "meta" | "tt" => {
+            // being either a single token or a delimited sequence, tt is
+            // harmless
+            true
+        },
+        _ => cx.bug(format!("unrecognized builtin nonterminal {}",
+                            frag).as_slice()),
+    }
+}
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index e4e6f5ac6b0..bc07c7f6cae 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -53,13 +53,28 @@ pub struct TtReader<'a> {
 }
 
 /// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TtSequence`s and `TtNonterminal`s, `interp` can (and
-/// should) be none.
+/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
+/// (and should) be None.
 pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
                          interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
                          imported_from: Option<Ident>,
-                         src: Vec<ast::TokenTree> )
+                         src: Vec<ast::TokenTree>)
                          -> TtReader<'a> {
+    new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
+}
+
+/// The extra `desugar_doc_comments` flag enables reading doc comments
+/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
+///
+/// This can do Macro-By-Example transcription. On the other hand, if
+/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
+/// (and should) be None.
+pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
+                                       interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
+                                       imported_from: Option<Ident>,
+                                       src: Vec<ast::TokenTree>,
+                                       desugar_doc_comments: bool)
+                                       -> TtReader<'a> {
     let mut r = TtReader {
         sp_diag: sp_diag,
         stack: vec!(TtFrame {
@@ -80,7 +95,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
         crate_name_next: None,
         repeat_idx: Vec::new(),
         repeat_len: Vec::new(),
-        desugar_doc_comments: false,
+        desugar_doc_comments: desugar_doc_comments,
         /* dummy values, never read: */
         cur_tok: token::Eof,
         cur_span: DUMMY_SP,
@@ -128,7 +143,7 @@ impl Add for LockstepIterSize {
                     let l_n = token::get_ident(l_id.clone());
                     let r_n = token::get_ident(r_id);
                     LisContradiction(format!("inconsistent lockstep iteration: \
-                                              '{}' has {} items, but '{}' has {}",
+                                              '{:?}' has {} items, but '{:?}' has {}",
                                               l_n, l_len, r_n, r_len).to_string())
                 }
             },
@@ -240,7 +255,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                     }
                     LisContradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        r.sp_diag.span_fatal(sp.clone(), msg[]);
+                        r.sp_diag.span_fatal(sp.clone(), msg.index(&FullRange));
                     }
                     LisConstraint(len, _) => {
                         if len == 0 {
@@ -266,18 +281,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
             }
             // FIXME #2887: think about span stuff here
             TtToken(sp, SubstNt(ident, namep)) => {
+                r.stack.last_mut().unwrap().idx += 1;
                 match lookup_cur_matched(r, ident) {
                     None => {
-                        r.stack.push(TtFrame {
-                            forest: TtToken(sp, SubstNt(ident, namep)),
-                            idx: 0,
-                            dotdotdoted: false,
-                            sep: None
-                        });
+                        r.cur_span = sp;
+                        r.cur_tok = SubstNt(ident, namep);
+                        return ret_val;
                         // this can't be 0 length, just like TtDelimited
                     }
                     Some(cur_matched) => {
-                        r.stack.last_mut().unwrap().idx += 1;
                         match *cur_matched {
                             // sidestep the interpolation tricks for ident because
                             // (a) idents can be in lots of places, so it'd be a pain
@@ -296,8 +308,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                             MatchedSeq(..) => {
                                 r.sp_diag.span_fatal(
                                     r.cur_span, /* blame the macro writer */
-                                    format!("variable '{}' is still repeating at this depth",
-                                            token::get_ident(ident))[]);
+                                    format!("variable '{:?}' is still repeating at this depth",
+                                            token::get_ident(ident)).index(&FullRange));
                             }
                         }
                     }