about summary refs log tree commit diff
path: root/src/libsyntax/ext
diff options
context:
space:
mode:
authormark <markm@cs.wisc.edu>2018-07-02 19:44:01 -0500
committermark <markm@cs.wisc.edu>2018-07-23 21:54:43 -0500
commit5d872727e02d9cc67813fa7f00763e355d39ae06 (patch)
tree386e437a1eed0b3e95c16ed3f45a9885e07414e1 /src/libsyntax/ext
parent63c2d06a0d777f78048963cc55630631505de83b (diff)
downloadrust-5d872727e02d9cc67813fa7f00763e355d39ae06.tar.gz
rust-5d872727e02d9cc67813fa7f00763e355d39ae06.zip
Fix test and errors
Diffstat (limited to 'src/libsyntax/ext')
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs447
-rw-r--r--src/libsyntax/ext/tt/quoted.rs2
2 files changed, 183 insertions, 266 deletions
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 05e59d6b47c..8912be5f69d 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -8,33 +8,28 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use {ast, attr};
+use syntax_pos::{Span, DUMMY_SP};
 use edition::Edition;
-use ext::{
-    base::{DummyResult, ExtCtxt, MacResult, NormalTT, SyntaxExtension, TTMacroExpander},
-    expand::{AstFragment, AstFragmentKind},
-    tt::{
-        macro_parser::{
-            parse, parse_failure_msg, Error, Failure, MatchedNonterminal, MatchedSeq, Success,
-        },
-        quoted,
-        transcribe::transcribe,
-    },
-};
+use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
+use ext::base::{NormalTT, TTMacroExpander};
+use ext::expand::{AstFragment, AstFragmentKind};
+use ext::tt::macro_parser::{Success, Error, Failure};
+use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
+use ext::tt::macro_parser::{parse, parse_failure_msg};
+use ext::tt::quoted;
+use ext::tt::transcribe::transcribe;
 use feature_gate::{self, emit_feature_err, Features, GateIssue};
-use parse::{
-    parser::Parser,
-    token::{self, NtTT, Token::*},
-    Directory, ParseSess,
-};
+use parse::{Directory, ParseSess};
+use parse::parser::Parser;
+use parse::token::{self, NtTT};
+use parse::token::Token::*;
 use symbol::Symbol;
-use syntax_pos::{Span, DUMMY_SP};
 use tokenstream::{TokenStream, TokenTree};
-use {ast, attr};
 
-use std::{
-    borrow::Cow,
-    collections::{hash_map::Entry, HashMap},
-};
+use std::borrow::Cow;
+use std::collections::HashMap;
+use std::collections::hash_map::Entry;
 
 use rustc_data_structures::sync::Lrc;
 
@@ -44,16 +39,12 @@ pub struct ParserAnyMacro<'a> {
     /// Span of the expansion site of the macro this parser is for
     site_span: Span,
     /// The ident of the macro we're parsing
-    macro_ident: ast::Ident,
+    macro_ident: ast::Ident
 }
 
 impl<'a> ParserAnyMacro<'a> {
     pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
-        let ParserAnyMacro {
-            site_span,
-            macro_ident,
-            ref mut parser,
-        } = *self;
+        let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self;
         let fragment = panictry!(parser.parse_ast_fragment(kind, true));
 
         // We allow semicolons at the end of expressions -- e.g. the semicolon in
@@ -86,16 +77,17 @@ impl TTMacroExpander for MacroRulesMacroExpander {
         if !self.valid {
             return DummyResult::any(sp);
         }
-        generic_extension(cx, sp, self.name, input, &self.lhses, &self.rhses)
+        generic_extension(cx,
+                          sp,
+                          self.name,
+                          input,
+                          &self.lhses,
+                          &self.rhses)
     }
 }
 
 fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
-    let sp = sp
-        .macro_backtrace()
-        .last()
-        .map(|trace| trace.call_site)
-        .unwrap_or(sp);
+    let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
     let values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
     values.push(message);
 }
@@ -116,11 +108,10 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
     let mut best_fail_spot = DUMMY_SP;
     let mut best_fail_tok = None;
 
-    for (i, lhs) in lhses.iter().enumerate() {
-        // try each arm's matchers
+    for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
         let lhs_tt = match *lhs {
             quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
-            _ => cx.span_bug(sp, "malformed macro lhs"),
+            _ => cx.span_bug(sp, "malformed macro lhs")
         };
 
         match TokenTree::parse(cx, lhs_tt, arg.clone()) {
@@ -156,11 +147,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                     ownership: cx.current_expansion.directory_ownership,
                 };
                 let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false);
-                p.root_module_name = cx
-                    .current_expansion
-                    .module
-                    .mod_path
-                    .last()
+                p.root_module_name = cx.current_expansion.module.mod_path.last()
                     .map(|id| id.as_str().to_string());
 
                 p.process_potential_macro_variable();
@@ -173,14 +160,16 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                     // so we can print a useful error message if the parse of the expanded
                     // macro leaves unparsed tokens.
                     site_span: sp,
-                    macro_ident: name,
-                });
+                    macro_ident: name
+                })
             }
             Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() {
                 best_fail_spot = sp;
                 best_fail_tok = Some(tok);
             },
-            Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]),
+            Error(err_sp, ref msg) => {
+                cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..])
+            }
         }
     }
 
@@ -196,12 +185,8 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
 // Holy self-referential!
 
 /// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(
-    sess: &ParseSess,
-    features: &Features,
-    def: &ast::Item,
-    edition: Edition,
-) -> SyntaxExtension {
+pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: Edition)
+               -> SyntaxExtension {
     let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
     let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
 
@@ -217,47 +202,33 @@ pub fn compile(
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
     let argument_gram = vec![
-        quoted::TokenTree::Sequence(
-            DUMMY_SP,
-            Lrc::new(quoted::SequenceRepetition {
-                tts: vec![
-                    quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                    quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
-                    quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
-                ],
-                separator: Some(if body.legacy {
-                    token::Semi
-                } else {
-                    token::Comma
-                }),
-                op: quoted::KleeneOp::OneOrMore,
-                num_captures: 2,
-            }),
-        ),
+        quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
+            tts: vec![
+                quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
+                quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
+                quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
+            ],
+            separator: Some(if body.legacy { token::Semi } else { token::Comma }),
+            op: quoted::KleeneOp::OneOrMore,
+            num_captures: 2,
+        })),
         // to phase into semicolon-termination instead of semicolon-separation
-        quoted::TokenTree::Sequence(
-            DUMMY_SP,
-            Lrc::new(quoted::SequenceRepetition {
-                tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
-                separator: None,
-                op: quoted::KleeneOp::ZeroOrMore,
-                num_captures: 0,
-            }),
-        ),
+        quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
+            tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
+            separator: None,
+            op: quoted::KleeneOp::ZeroOrMore,
+            num_captures: 0
+        })),
     ];
 
     let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
         Success(m) => m,
         Failure(sp, tok) => {
             let s = parse_failure_msg(tok);
-            sess.span_diagnostic
-                .span_fatal(sp.substitute_dummy(def.span), &s)
-                .raise();
+            sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
         }
         Error(sp, s) => {
-            sess.span_diagnostic
-                .span_fatal(sp.substitute_dummy(def.span), &s)
-                .raise();
+            sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
         }
     };
 
@@ -265,9 +236,8 @@ pub fn compile(
 
     // Extract the arguments:
     let lhses = match *argument_map[&lhs_nm] {
-        MatchedSeq(ref s, _) => s
-            .iter()
-            .map(|m| {
+        MatchedSeq(ref s, _) => {
+            s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
                         let tt = quoted::parse(
@@ -277,25 +247,22 @@ pub fn compile(
                             features,
                             &def.attrs,
                             edition,
-                        ).pop()
-                            .unwrap();
+                        )
+                        .pop()
+                        .unwrap();
                         valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt);
                         return tt;
                     }
                 }
-                sess.span_diagnostic
-                    .span_bug(def.span, "wrong-structured lhs")
-            })
-            .collect::<Vec<quoted::TokenTree>>(),
-        _ => sess
-            .span_diagnostic
-            .span_bug(def.span, "wrong-structured lhs"),
+                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
+            }).collect::<Vec<quoted::TokenTree>>()
+        }
+        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
     };
 
     let rhses = match *argument_map[&rhs_nm] {
-        MatchedSeq(ref s, _) => s
-            .iter()
-            .map(|m| {
+        MatchedSeq(ref s, _) => {
+            s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = *m {
                     if let NtTT(ref tt) = **nt {
                         return quoted::parse(
@@ -306,16 +273,13 @@ pub fn compile(
                             &def.attrs,
                             edition,
                         ).pop()
-                            .unwrap();
+                         .unwrap();
                     }
                 }
-                sess.span_diagnostic
-                    .span_bug(def.span, "wrong-structured lhs")
-            })
-            .collect::<Vec<quoted::TokenTree>>(),
-        _ => sess
-            .span_diagnostic
-            .span_bug(def.span, "wrong-structured rhs"),
+                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
+            }).collect::<Vec<quoted::TokenTree>>()
+        }
+        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
     };
 
     for rhs in &rhses {
@@ -344,14 +308,14 @@ pub fn compile(
             }
         }
 
-        let unstable_feature = attr::find_stability(&sess.span_diagnostic, &def.attrs, def.span)
-            .and_then(|stability| {
-                if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
-                    Some((stability.feature, issue))
-                } else {
-                    None
-                }
-            });
+        let unstable_feature = attr::find_stability(&sess.span_diagnostic,
+                                                    &def.attrs, def.span).and_then(|stability| {
+            if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
+                Some((stability.feature, issue))
+            } else {
+                None
+            }
+        });
 
         NormalTT {
             expander,
@@ -374,12 +338,10 @@ pub fn compile(
     }
 }
 
-fn check_lhs_nt_follows(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    lhs: &quoted::TokenTree,
-) -> bool {
+fn check_lhs_nt_follows(sess: &ParseSess,
+                        features: &Features,
+                        attrs: &[ast::Attribute],
+                        lhs: &quoted::TokenTree) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
     if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
@@ -404,15 +366,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
                 return false;
             },
             TokenTree::Sequence(span, ref seq) => {
-                if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| match *seq_tt {
-                    TokenTree::MetaVarDecl(_, _, id) => id.name == "vis",
-                    TokenTree::Sequence(_, ref sub_seq) => {
-                        sub_seq.op == quoted::KleeneOp::ZeroOrMore
+                if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
+                    match *seq_tt {
+                        TokenTree::MetaVarDecl(_, _, id) => id.name == "vis",
+                        TokenTree::Sequence(_, ref sub_seq) =>
+                            sub_seq.op == quoted::KleeneOp::ZeroOrMore,
+                        _ => false,
                     }
-                    _ => false,
                 }) {
-                    sess.span_diagnostic
-                        .span_err(span, "repetition matches empty token tree");
+                    sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
                     return false;
                 }
                 if !check_lhs_no_empty_seq(sess, &seq.tts) {
@@ -428,19 +390,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
 fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool {
     match *rhs {
         quoted::TokenTree::Delimited(..) => return true,
-        _ => sess
-            .span_diagnostic
-            .span_err(rhs.span(), "macro rhs must be delimited"),
+        _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited")
     }
     false
 }
 
-fn check_matcher(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    matcher: &[quoted::TokenTree],
-) -> bool {
+fn check_matcher(sess: &ParseSess,
+                 features: &Features,
+                 attrs: &[ast::Attribute],
+                 matcher: &[quoted::TokenTree]) -> bool {
     let first_sets = FirstSets::new(matcher);
     let empty_suffix = TokenSet::empty();
     let err = sess.span_diagnostic.err_count();
@@ -474,9 +432,7 @@ impl FirstSets {
     fn new(tts: &[quoted::TokenTree]) -> FirstSets {
         use self::quoted::TokenTree;
 
-        let mut sets = FirstSets {
-            first: HashMap::new(),
-        };
+        let mut sets = FirstSets { first: HashMap::new() };
         build_recur(&mut sets, tts);
         return sets;
 
@@ -515,9 +471,8 @@ impl FirstSets {
                         // If the sequence contents can be empty, then the first
                         // token could be the separator token itself.
 
-                        if let (Some(ref sep), true) =
-                            (seq_rep.separator.clone(), subfirst.maybe_empty)
-                        {
+                        if let (Some(ref sep), true) = (seq_rep.separator.clone(),
+                                                        subfirst.maybe_empty) {
                             first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
                         }
 
@@ -525,10 +480,7 @@ impl FirstSets {
                         if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore {
                             // If sequence is potentially empty, then
                             // union them (preserving first emptiness).
-                            first.add_all(&TokenSet {
-                                maybe_empty: true,
-                                ..subfirst
-                            });
+                            first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
                         } else {
                             // Otherwise, sequence guaranteed
                             // non-empty; replace first.
@@ -562,18 +514,19 @@ impl FirstSets {
                 TokenTree::Sequence(sp, ref seq_rep) => {
                     match self.first.get(&sp) {
                         Some(&Some(ref subfirst)) => {
+
                             // If the sequence contents can be empty, then the first
                             // token could be the separator token itself.
 
-                            if let (Some(ref sep), true) =
-                                (seq_rep.separator.clone(), subfirst.maybe_empty)
-                            {
+                            if let (Some(ref sep), true) = (seq_rep.separator.clone(),
+                                                            subfirst.maybe_empty) {
                                 first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
                             }
 
                             assert!(first.maybe_empty);
                             first.add_all(subfirst);
-                            if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore {
+                            if subfirst.maybe_empty ||
+                               seq_rep.op == quoted::KleeneOp::ZeroOrMore {
                                 // continue scanning for more first
                                 // tokens, but also make sure we
                                 // restore empty-tracking state
@@ -621,20 +574,12 @@ struct TokenSet {
 
 impl TokenSet {
     // Returns a set for the empty sequence.
-    fn empty() -> Self {
-        TokenSet {
-            tokens: Vec::new(),
-            maybe_empty: true,
-        }
-    }
+    fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } }
 
     // Returns the set `{ tok }` for the single-token (and thus
     // non-empty) sequence [tok].
     fn singleton(tok: quoted::TokenTree) -> Self {
-        TokenSet {
-            tokens: vec![tok],
-            maybe_empty: false,
-        }
+        TokenSet { tokens: vec![tok], maybe_empty: false }
     }
 
     // Changes self to be the set `{ tok }`.
@@ -698,14 +643,12 @@ impl TokenSet {
 //
 // Requires that `first_sets` is pre-computed for `matcher`;
 // see `FirstSets::new`.
-fn check_matcher_core(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    first_sets: &FirstSets,
-    matcher: &[quoted::TokenTree],
-    follow: &TokenSet,
-) -> TokenSet {
+fn check_matcher_core(sess: &ParseSess,
+                      features: &Features,
+                      attrs: &[ast::Attribute],
+                      first_sets: &FirstSets,
+                      matcher: &[quoted::TokenTree],
+                      follow: &TokenSet) -> TokenSet {
     use self::quoted::TokenTree;
 
     let mut last = TokenSet::empty();
@@ -715,13 +658,11 @@ fn check_matcher_core(
     // then ensure T can also be followed by any element of FOLLOW.
     'each_token: for i in 0..matcher.len() {
         let token = &matcher[i];
-        let suffix = &matcher[i + 1..];
+        let suffix = &matcher[i+1..];
 
         let build_suffix_first = || {
             let mut s = first_sets.first(suffix);
-            if s.maybe_empty {
-                s.add_all(follow);
-            }
+            if s.maybe_empty { s.add_all(follow); }
             s
         };
 
@@ -737,12 +678,9 @@ fn check_matcher_core(
                 let can_be_followed_by_any;
                 if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) {
                     let msg = format!("invalid fragment specifier `{}`", bad_frag);
-                    sess.span_diagnostic
-                        .struct_span_err(token.span(), &msg)
-                        .help(
-                            "valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \
-                             `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`",
-                        )
+                    sess.span_diagnostic.struct_span_err(token.span(), &msg)
+                        .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \
+                              `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`")
                         .emit();
                     // (This eliminates false positives and duplicates
                     // from error messages.)
@@ -796,8 +734,12 @@ fn check_matcher_core(
                 // At this point, `suffix_first` is built, and
                 // `my_suffix` is some TokenSet that we can use
                 // for checking the interior of `seq_rep`.
-                let next =
-                    check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix);
+                let next = check_matcher_core(sess,
+                                              features,
+                                              attrs,
+                                              first_sets,
+                                              &seq_rep.tts,
+                                              my_suffix);
                 if next.maybe_empty {
                     last.add_all(&next);
                 } else {
@@ -819,17 +761,16 @@ fn check_matcher_core(
                 for next_token in &suffix_first.tokens {
                     match is_in_follow(next_token, &frag_spec.as_str()) {
                         Err((msg, help)) => {
-                            sess.span_diagnostic
-                                .struct_span_err(next_token.span(), &msg)
-                                .help(help)
-                                .emit();
+                            sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
+                                .help(help).emit();
                             // don't bother reporting every source of
                             // conflict for a particular element of `last`.
                             continue 'each_last;
                         }
                         Ok(true) => {}
                         Ok(false) => {
-                            let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
+                            let may_be = if last.tokens.len() == 1 &&
+                                suffix_first.tokens.len() == 1
                             {
                                 "is"
                             } else {
@@ -838,14 +779,12 @@ fn check_matcher_core(
 
                             sess.span_diagnostic.span_err(
                                 next_token.span(),
-                                &format!(
-                                    "`${name}:{frag}` {may_be} followed by `{next}`, which \
-                                     is not allowed for `{frag}` fragments",
-                                    name = name,
-                                    frag = frag_spec,
-                                    next = quoted_tt_to_string(next_token),
-                                    may_be = may_be
-                                ),
+                                &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
+                                          is not allowed for `{frag}` fragments",
+                                         name=name,
+                                         frag=frag_spec,
+                                         next=quoted_tt_to_string(next_token),
+                                         may_be=may_be)
                             );
                         }
                     }
@@ -910,16 +849,16 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
                 // since items *must* be followed by either a `;` or a `}`, we can
                 // accept anything after them
                 Ok(true)
-            }
+            },
             "block" => {
                 // anything can follow block, the braces provide an easy boundary to
                 // maintain
                 Ok(true)
-            }
-            "stmt" | "expr" => match *tok {
+            },
+            "stmt" | "expr"  => match *tok {
                 TokenTree::Token(_, ref tok) => match *tok {
                     FatArrow | Comma | Semi => Ok(true),
-                    _ => Ok(false),
+                    _ => Ok(false)
                 },
                 _ => Ok(false),
             },
@@ -927,23 +866,16 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
                 TokenTree::Token(_, ref tok) => match *tok {
                     FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
                     Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true),
-                    _ => Ok(false),
+                    _ => Ok(false)
                 },
                 _ => Ok(false),
             },
             "path" | "ty" => match *tok {
                 TokenTree::Token(_, ref tok) => match *tok {
-                    OpenDelim(token::DelimToken::Brace)
-                    | OpenDelim(token::DelimToken::Bracket)
-                    | Comma
-                    | FatArrow
-                    | Colon
-                    | Eq
-                    | Gt
-                    | Semi
-                    | BinOp(token::Or) => Ok(true),
+                    OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
+                    Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
                     Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true),
-                    _ => Ok(false),
+                    _ => Ok(false)
                 },
                 TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
                 _ => Ok(false),
@@ -951,49 +883,43 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
             "ident" | "lifetime" => {
                 // being a single token, idents and lifetimes are harmless
                 Ok(true)
-            }
+            },
             "literal" => {
                 // literals may be of a single token, or two tokens (negative numbers)
                 Ok(true)
-            }
+            },
             "meta" | "tt" => {
                 // being either a single token or a delimited sequence, tt is
                 // harmless
                 Ok(true)
-            }
+            },
             "vis" => {
                 // Explicitly disallow `priv`, on the off chance it comes back.
                 match *tok {
                     TokenTree::Token(_, ref tok) => match *tok {
                         Comma => Ok(true),
                         Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true),
-                        ref tok => Ok(tok.can_begin_type()),
+                        ref tok => Ok(tok.can_begin_type())
                     },
-                    TokenTree::MetaVarDecl(_, _, frag)
-                        if frag.name == "ident" || frag.name == "ty" || frag.name == "path" =>
-                    {
-                        Ok(true)
-                    }
-                    _ => Ok(false),
+                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident"
+                                                       || frag.name == "ty"
+                                                       || frag.name == "path" => Ok(true),
+                    _ => Ok(false)
                 }
-            }
+            },
             "" => Ok(true), // keywords::Invalid
-            _ => Err((
-                format!("invalid fragment specifier `{}`", frag),
-                "valid fragment specifiers are `ident`, `block`, \
-                 `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \
-                 `literal`, `item` and `vis`",
-            )),
+            _ => Err((format!("invalid fragment specifier `{}`", frag),
+                     "valid fragment specifiers are `ident`, `block`, \
+                      `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \
+                      `literal`, `item` and `vis`"))
         }
     }
 }
 
-fn has_legal_fragment_specifier(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    tok: &quoted::TokenTree,
-) -> Result<(), String> {
+fn has_legal_fragment_specifier(sess: &ParseSess,
+                                features: &Features,
+                                attrs: &[ast::Attribute],
+                                tok: &quoted::TokenTree) -> Result<(), String> {
     debug!("has_legal_fragment_specifier({:?})", tok);
     if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
         let frag_name = frag_spec.as_str();
@@ -1005,45 +931,38 @@ fn has_legal_fragment_specifier(
     Ok(())
 }
 
-fn is_legal_fragment_specifier(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    frag_name: &str,
-    frag_span: Span,
-) -> bool {
+fn is_legal_fragment_specifier(sess: &ParseSess,
+                               features: &Features,
+                               attrs: &[ast::Attribute],
+                               frag_name: &str,
+                               frag_span: Span) -> bool {
     match frag_name {
-        "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" | "path" | "ty" | "ident"
-        | "meta" | "tt" | "" => true,
+        "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" |
+        "path" | "ty" | "ident" | "meta" | "tt" | "" => true,
         "literal" => {
-            if !features.macro_literal_matcher
-                && !attr::contains_name(attrs, "allow_internal_unstable")
-            {
+            if !features.macro_literal_matcher &&
+               !attr::contains_name(attrs, "allow_internal_unstable") {
                 let explain = feature_gate::EXPLAIN_LITERAL_MATCHER;
-                emit_feature_err(
-                    sess,
-                    "macro_literal_matcher",
-                    frag_span,
-                    GateIssue::Language,
-                    explain,
-                );
+                emit_feature_err(sess,
+                                 "macro_literal_matcher",
+                                 frag_span,
+                                 GateIssue::Language,
+                                 explain);
             }
             true
-        }
+        },
         "vis" => {
-            if !features.macro_vis_matcher && !attr::contains_name(attrs, "allow_internal_unstable")
-            {
+            if !features.macro_vis_matcher &&
+               !attr::contains_name(attrs, "allow_internal_unstable") {
                 let explain = feature_gate::EXPLAIN_VIS_MATCHER;
-                emit_feature_err(
-                    sess,
-                    "macro_vis_matcher",
-                    frag_span,
-                    GateIssue::Language,
-                    explain,
-                );
+                emit_feature_err(sess,
+                                 "macro_vis_matcher",
+                                 frag_span,
+                                 GateIssue::Language,
+                                 explain);
             }
             true
-        }
+        },
         _ => false,
     }
 }
@@ -1053,9 +972,7 @@ fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
         quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
         quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
-        _ => panic!(
-            "unexpected quoted::TokenTree::{{Sequence or Delimited}} \
-             in follow set checker"
-        ),
+        _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+                     in follow set checker"),
     }
 }
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index 1bca6dd0653..e209e077bf4 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -13,7 +13,7 @@ use feature_gate::{self, emit_feature_err, Features, GateIssue};
 use parse::{token, ParseSess};
 use print::pprust;
 use symbol::keywords;
-use syntax_pos::{edition::Edition, BytePos, Span, DUMMY_SP};
+use syntax_pos::{edition::Edition, BytePos, Span};
 use tokenstream;
 use {ast, attr};