diff options
| author | Andre Bogus <bogusandre@gmail.com> | 2017-05-12 20:05:39 +0200 |
|---|---|---|
| committer | Andre Bogus <bogusandre@gmail.com> | 2017-05-12 20:05:39 +0200 |
| commit | a9c163ebe9deeaf74699fc8642d919cdb2b5e617 (patch) | |
| tree | a964a99f5353d47f5468e7c9b55ba658c549bd79 /src/libsyntax/ext | |
| parent | e19ccb71c8427135a69d874623af68422aeeb9e9 (diff) | |
| download | rust-a9c163ebe9deeaf74699fc8642d919cdb2b5e617.tar.gz rust-a9c163ebe9deeaf74699fc8642d919cdb2b5e617.zip | |
Fix some clippy warnings in libsyntax
This is mostly removing stray ampersands, needless returns and lifetimes.
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/base.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 26 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 49 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 45 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/quoted.rs | 19 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 6 |
8 files changed, 82 insertions, 81 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index f731c5abdd6..f78089aaa75 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -636,7 +636,7 @@ pub struct ExpansionData { /// One of these is made during expansion and incrementally updated as we go; /// when a macro expansion occurs, the resulting nodes have the backtrace() -/// -> expn_info of their expansion context stored into their span. +/// -> `expn_info` of their expansion context stored into their span. pub struct ExtCtxt<'a> { pub parse_sess: &'a parse::ParseSess, pub ecfg: expand::ExpansionConfig<'a>, @@ -709,7 +709,7 @@ impl<'a> ExtCtxt<'a> { } ctxt = info.call_site.ctxt; last_macro = Some(info.call_site); - return Some(()); + Some(()) }).is_none() { break } @@ -770,9 +770,9 @@ impl<'a> ExtCtxt<'a> { } pub fn trace_macros_diag(&self) { for (sp, notes) in self.expansions.iter() { - let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro"); + let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro"); for note in notes { - db.note(¬e); + db.note(note); } db.emit(); } @@ -795,7 +795,7 @@ impl<'a> ExtCtxt<'a> { v.push(self.ident_of(s)); } v.extend(components.iter().map(|s| self.ident_of(s))); - return v + v } pub fn name_of(&self, st: &str) -> ast::Name { Symbol::intern(st) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index a8aa103f80a..a5633679539 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -415,19 +415,19 @@ impl<'a, 'b> MacroExpander<'a, 'b> { match *ext { MultiModifier(ref mac) => { - let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + let meta = panictry!(attr.parse_meta(self.cx.parse_sess)); let item = mac.expand(self.cx, attr.span, &meta, item); kind.expect_from_annotatables(item) } MultiDecorator(ref mac) => { let mut items = Vec::new(); - let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + let meta = panictry!(attr.parse_meta(self.cx.parse_sess)); mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item)); items.push(item); kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let item_toks = stream_for_item(&item, &self.cx.parse_sess); + let item_toks = stream_for_item(&item, self.cx.parse_sess); let span = Span { ctxt: self.cx.backtrace(), ..attr.span }; let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks); @@ -439,7 +439,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } _ => { let msg = &format!("macro `{}` may not be used in attributes", attr.path); - self.cx.span_err(attr.span, &msg); + self.cx.span_err(attr.span, msg); kind.dummy(attr.span) } } @@ -454,7 +454,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; let path = &mac.node.path; - let ident = ident.unwrap_or(keywords::Invalid.ident()); + let ident = ident.unwrap_or_else(|| keywords::Invalid.ident()); let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark)); let opt_expanded = match *ext { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { @@ -591,7 +591,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } _ => { let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); - self.cx.span_err(span, &msg); + self.cx.span_err(span, msg); kind.dummy(span) } } @@ -749,19 +749,15 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { fn check_attributes(&mut self, attrs: &[ast::Attribute]) { let features = self.cx.ecfg.features.unwrap(); for attr in attrs.iter() { - feature_gate::check_attribute(&attr, &self.cx.parse_sess, features); + feature_gate::check_attribute(attr, self.cx.parse_sess, features); } } } pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> { - for i in 0 .. attrs.len() { - if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) { - return Some(attrs.remove(i)); - } - } - - None + attrs.iter() + .position(|a| !attr::is_known(a) && !is_builtin_attr(a)) + .map(|i| attrs.remove(i)) } // These are pretty nasty. Ideally, we would keep the tokens around, linked from @@ -923,7 +919,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { let result = noop_fold_item(item, self); self.cx.current_expansion.module = orig_module; self.cx.current_expansion.directory_ownership = orig_directory_ownership; - return result; + result } // Ensure that test functions are accessible from the test harness. ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index d7a85baa3ff..85ae65e6b79 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -23,7 +23,7 @@ use tokenstream::{TokenStream, TokenTree}; /// /// This is registered as a set of expression syntax extension called quote! /// that lifts its argument token-tree to an AST representing the -/// construction of the same token tree, with token::SubstNt interpreted +/// construction of the same token tree, with `token::SubstNt` interpreted /// as antiquotes (splices). pub mod rt { @@ -389,7 +389,7 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> { result = results.pop().unwrap(); result.push(tree); } - tree @ _ => result.push(tree), + tree => result.push(tree), } } result diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 22a5776315a..4183583d66f 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -150,7 +150,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); - return DummyResult::expr(sp); + DummyResult::expr(sp) } } } @@ -167,7 +167,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - return DummyResult::expr(sp); + DummyResult::expr(sp) } Ok(..) => { // Add this input file to the code map to make it available as diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index eb0b7c29f8d..8e28135b11d 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -39,40 +39,40 @@ //! Example: Start parsing `a a a a b` against [· a $( a )* a b]. //! //! Remaining input: `a a a a b` -//! next_eis: [· a $( a )* a b] +//! `next_eis`: `[· a $( a )* a b]` //! //! - - - Advance over an `a`. - - - //! //! Remaining input: `a a a b` -//! cur: [a · $( a )* a b] +//! cur: `[a · $( a )* a b]` //! Descend/Skip (first item). -//! next: [a $( · a )* a b] [a $( a )* · a b]. +//! next: `[a $( · a )* a b] [a $( a )* · a b]`. //! //! - - - Advance over an `a`. - - - //! //! Remaining input: `a a b` -//! cur: [a $( a · )* a b] next: [a $( a )* a · b] +//! cur: `[a $( a · )* a b]` next: `[a $( a )* a · b]` //! Finish/Repeat (first item) -//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] +//! next: `[a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]` //! //! - - - Advance over an `a`. - - - (this looks exactly like the last step) //! //! Remaining input: `a b` -//! cur: [a $( a · )* a b] next: [a $( a )* a · b] +//! cur: `[a $( a · )* a b]` next: `[a $( a )* a · b]` //! Finish/Repeat (first item) -//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] +//! next: `[a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]` //! //! - - - Advance over an `a`. - - - (this looks exactly like the last step) //! //! Remaining input: `b` -//! cur: [a $( a · )* a b] next: [a $( a )* a · b] +//! cur: `[a $( a · )* a b]` next: `[a $( a )* a · b]` //! Finish/Repeat (first item) -//! next: [a $( a )* · a b] [a $( · a )* a b] +//! next: `[a $( a )* · a b] [a $( · a )* a b]` //! //! - - - Advance over a `b`. - - - //! //! Remaining input: `` -//! eof: [a $( a )* a b ·] +//! eof: `[a $( a )* a b ·]` pub use self::NamedMatch::*; pub use self::ParseResult::*; @@ -178,20 +178,20 @@ fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> { }) } -/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL: +/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`: /// so it is associated with a single ident in a parse, and all -/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type -/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a -/// single token::MATCH_NONTERMINAL in the TokenTree that produced it. +/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type +/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a +/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it. /// -/// The in-memory structure of a particular NamedMatch represents the match +/// The in-memory structure of a particular `NamedMatch` represents the match /// that occurred when a particular subset of a matcher was applied to a /// particular token tree. /// -/// The width of each MatchedSeq in the NamedMatch, and the identity of the -/// `MatchedNonterminal`s, will depend on the token tree it was applied to: -/// each MatchedSeq corresponds to a single TTSeq in the originating -/// token tree. The depth of the NamedMatch structure will therefore depend +/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of +/// the `MatchedNonterminal`s, will depend on the token tree it was applied +/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating +/// token tree. The depth of the `NamedMatch` structure will therefore depend /// only on the nesting depth of `ast::TTSeq`s in the originating /// token tree it was derived from. @@ -334,7 +334,7 @@ fn inner_parse_loop(sess: &ParseSess, // Check if we need a separator if idx == len && ei.sep.is_some() { // We have a separator, and it is the current token. - if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) { + if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) { ei.idx += 1; next_eis.push(ei); } @@ -401,7 +401,7 @@ fn inner_parse_loop(sess: &ParseSess, cur_eis.push(ei); } TokenTree::Token(_, ref t) => { - if token_name_eq(t, &token) { + if token_name_eq(t, token) { ei.idx += 1; next_eis.push(ei); } @@ -485,11 +485,8 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op } fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { - match name { - "tt" => { - return token::NtTT(p.parse_token_tree()); - } - _ => {} + if let "tt" = name { + return token::NtTT(p.parse_token_tree()); } // check at the beginning and the parser checks after each bump p.process_potential_macro_variable(); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f959ccc989e..9e3fe30e7bf 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -94,7 +94,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, -> Box<MacResult+'cx> { if cx.trace_macros() { let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp); - let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert(vec![]); + let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new); values.push(format!("expands to `{}! {{ {} }}`", name, arg)); } @@ -206,7 +206,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item) let mut valid = true; // Extract the arguments: - let lhses = match **argument_map.get(&lhs_nm).unwrap() { + let lhses = match *argument_map[&lhs_nm] { MatchedSeq(ref s, _) => { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { @@ -222,7 +222,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item) _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }; - let rhses = match **argument_map.get(&rhs_nm).unwrap() { + let rhses = match *argument_map[&rhs_nm] { MatchedSeq(ref s, _) => { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { @@ -260,13 +260,12 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: "ed::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. - match lhs { - "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts), - _ => { - let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; - sess.span_diagnostic.span_err(lhs.span(), msg); - false - } + if let quoted::TokenTree::Delimited(_, ref tts) = *lhs { + check_matcher(sess, features, &tts.tts) + } else { + let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; + sess.span_diagnostic.span_err(lhs.span(), msg); + false } // we don't abort on errors on rejection, the driver will do that for us // after parsing/expansion. we can report every error in every macro this way. @@ -283,17 +282,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { return false; }, TokenTree::Sequence(span, ref seq) => { - if seq.separator.is_none() { - if seq.tts.iter().all(|seq_tt| { - match *seq_tt { - TokenTree::Sequence(_, ref sub_seq) => - sub_seq.op == quoted::KleeneOp::ZeroOrMore, - _ => false, - } - }) { - sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); - return false; + if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| { + match *seq_tt { + TokenTree::Sequence(_, ref sub_seq) => + sub_seq.op == quoted::KleeneOp::ZeroOrMore, + _ => false, } + }) { + sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); + return false; } if !check_lhs_no_empty_seq(sess, &seq.tts) { return false; @@ -407,7 +404,7 @@ impl FirstSets { } } - return first; + first } } @@ -469,7 +466,7 @@ impl FirstSets { // we only exit the loop if `tts` was empty or if every // element of `tts` matches the empty sequence. assert!(first.maybe_empty); - return first; + first } } @@ -579,7 +576,7 @@ fn check_matcher_core(sess: &ParseSess, let build_suffix_first = || { let mut s = first_sets.first(suffix); if s.maybe_empty { s.add_all(follow); } - return s; + s }; // (we build `suffix_first` on demand below; you can tell @@ -861,6 +858,6 @@ fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), - _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"), + _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} in follow set checker"), } } diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index d216effbd45..fa65e9501c2 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -96,6 +96,17 @@ impl TokenTree { } } + pub fn is_empty(&self) -> bool { + match *self { + TokenTree::Delimited(_, ref delimed) => match delimed.delim { + token::NoDelim => delimed.tts.is_empty(), + _ => false, + }, + TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(), + _ => true, + } + } + pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { @@ -144,9 +155,9 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars } _ => end_sp, }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), }; sess.missing_fragment_specifiers.borrow_mut().insert(span); result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident())); @@ -228,10 +239,10 @@ fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess) Some(op) => return (Some(tok), op), None => span, }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), } }, - tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), }; sess.span_diagnostic.span_err(span, "expected `*` or `+`"); diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 947089b0b9a..2a435bdea10 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -121,20 +121,20 @@ pub fn transcribe(sp_diag: &Handler, &repeats) { LockstepIterSize::Unconstrained => { panic!(sp_diag.span_fatal( - sp.clone(), /* blame macro writer */ + sp, /* blame macro writer */ "attempted to repeat an expression \ containing no syntax \ variables matched as repeating at this depth")); } LockstepIterSize::Contradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - panic!(sp_diag.span_fatal(sp.clone(), &msg[..])); + panic!(sp_diag.span_fatal(sp, &msg[..])); } LockstepIterSize::Constraint(len, _) => { if len == 0 { if seq.op == quoted::KleeneOp::OneOrMore { // FIXME #2887 blame invoker - panic!(sp_diag.span_fatal(sp.clone(), + panic!(sp_diag.span_fatal(sp, "this must repeat at least once")); } } else { |
