about summary refs log tree commit diff
diff options
context:
space:
mode:
authorAndre Bogus <bogusandre@gmail.com>2017-05-12 20:05:39 +0200
committerAndre Bogus <bogusandre@gmail.com>2017-05-12 20:05:39 +0200
commita9c163ebe9deeaf74699fc8642d919cdb2b5e617 (patch)
treea964a99f5353d47f5468e7c9b55ba658c549bd79
parente19ccb71c8427135a69d874623af68422aeeb9e9 (diff)
downloadrust-a9c163ebe9deeaf74699fc8642d919cdb2b5e617.tar.gz
rust-a9c163ebe9deeaf74699fc8642d919cdb2b5e617.zip
Fix some clippy warnings in libsyntax
This is mostly removing stray ampersands, needless returns and lifetimes.
-rw-r--r--src/libsyntax/ast.rs18
-rw-r--r--src/libsyntax/attr.rs17
-rw-r--r--src/libsyntax/codemap.rs4
-rw-r--r--src/libsyntax/config.rs8
-rw-r--r--src/libsyntax/diagnostics/plugin.rs6
-rw-r--r--src/libsyntax/ext/base.rs10
-rw-r--r--src/libsyntax/ext/expand.rs26
-rw-r--r--src/libsyntax/ext/quote.rs4
-rw-r--r--src/libsyntax/ext/source_util.rs4
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs49
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs45
-rw-r--r--src/libsyntax/ext/tt/quoted.rs19
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs6
-rw-r--r--src/libsyntax/feature_gate.rs47
-rw-r--r--src/libsyntax/json.rs2
-rw-r--r--src/libsyntax/parse/attr.rs4
-rw-r--r--src/libsyntax/parse/classify.rs4
-rw-r--r--src/libsyntax/parse/common.rs2
-rw-r--r--src/libsyntax/parse/lexer/comments.rs2
-rw-r--r--src/libsyntax/parse/lexer/mod.rs122
-rw-r--r--src/libsyntax/parse/mod.rs146
-rw-r--r--src/libsyntax/parse/obsolete.rs2
-rw-r--r--src/libsyntax/parse/parser.rs73
-rw-r--r--src/libsyntax/parse/token.rs46
-rw-r--r--src/libsyntax/print/pp.rs12
-rw-r--r--src/libsyntax/print/pprust.rs297
-rw-r--r--src/libsyntax/std_inject.rs4
-rw-r--r--src/libsyntax/test.rs19
-rw-r--r--src/libsyntax/tokenstream.rs14
-rw-r--r--src/libsyntax/util/lev_distance.rs7
-rw-r--r--src/libsyntax/util/move_map.rs8
-rw-r--r--src/libsyntax/visit.rs8
32 files changed, 498 insertions, 537 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index e5bb02fe082..24ce99208ed 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -715,7 +715,7 @@ impl Stmt {
             StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| {
                 (mac, MacStmtStyle::Semicolon, attrs)
             })),
-            node @ _ => node,
+            node => node,
         };
         self
     }
@@ -1076,16 +1076,16 @@ impl LitKind {
     pub fn is_unsuffixed(&self) -> bool {
         match *self {
             // unsuffixed variants
-            LitKind::Str(..) => true,
-            LitKind::ByteStr(..) => true,
-            LitKind::Byte(..) => true,
-            LitKind::Char(..) => true,
-            LitKind::Int(_, LitIntType::Unsuffixed) => true,
-            LitKind::FloatUnsuffixed(..) => true,
+            LitKind::Str(..) |
+            LitKind::ByteStr(..) |
+            LitKind::Byte(..) |
+            LitKind::Char(..) |
+            LitKind::Int(_, LitIntType::Unsuffixed) |
+            LitKind::FloatUnsuffixed(..) |
             LitKind::Bool(..) => true,
             // suffixed variants
-            LitKind::Int(_, LitIntType::Signed(..)) => false,
-            LitKind::Int(_, LitIntType::Unsigned(..)) => false,
+            LitKind::Int(_, LitIntType::Signed(..)) |
+            LitKind::Int(_, LitIntType::Unsigned(..)) |
             LitKind::Float(..) => false,
         }
     }
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 0980b73e80c..45f891d8dc5 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -112,7 +112,7 @@ impl NestedMetaItem {
     /// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem.
     pub fn meta_item(&self) -> Option<&MetaItem> {
         match self.node {
-            NestedMetaItemKind::MetaItem(ref item) => Some(&item),
+            NestedMetaItemKind::MetaItem(ref item) => Some(item),
             _ => None
         }
     }
@@ -120,7 +120,7 @@ impl NestedMetaItem {
     /// Returns the Lit if self is a NestedMetaItemKind::Literal.
     pub fn literal(&self) -> Option<&Lit> {
         match self.node {
-            NestedMetaItemKind::Literal(ref lit) => Some(&lit),
+            NestedMetaItemKind::Literal(ref lit) => Some(lit),
             _ => None
         }
     }
@@ -259,7 +259,7 @@ impl MetaItem {
         match self.node {
             MetaItemKind::NameValue(ref v) => {
                 match v.node {
-                    LitKind::Str(ref s, _) => Some((*s).clone()),
+                    LitKind::Str(ref s, _) => Some(*s),
                     _ => None,
                 }
             },
@@ -1217,9 +1217,10 @@ impl LitKind {
                 Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
             }
             LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
-            LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
-                true => "true",
-                false => "false",
+            LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
+                "true"
+            } else {
+                "false"
             }))),
         }
     }
@@ -1261,7 +1262,7 @@ impl<T: HasAttrs> HasAttrs for Spanned<T> {
 
 impl HasAttrs for Vec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
-        &self
+        self
     }
     fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
         f(self)
@@ -1270,7 +1271,7 @@ impl HasAttrs for Vec<Attribute> {
 
 impl HasAttrs for ThinVec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
-        &self
+        self
     }
     fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
         f(self.into()).into()
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index 0c8be1d4f24..d32c3ec5f46 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -485,7 +485,7 @@ impl CodeMap {
         match self.span_to_snippet(sp) {
             Ok(snippet) => {
                 let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
-                if snippet.len() > 0 && !snippet.contains('\n') {
+                if !snippet.is_empty() && !snippet.contains('\n') {
                     Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
                 } else {
                     sp
@@ -502,7 +502,7 @@ impl CodeMap {
     pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
         for fm in self.files.borrow().iter() {
             if filename == fm.name {
-               (self.dep_tracking_callback.borrow())(&fm);
+               (self.dep_tracking_callback.borrow())(fm);
                 return Some(fm.clone());
             }
         }
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index ede8a33df65..2e98c7d9626 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -123,7 +123,7 @@ impl<'a> StripUnconfigured<'a> {
                 return false;
             }
 
-            let mis = if !is_cfg(&attr) {
+            let mis = if !is_cfg(attr) {
                 return true;
             } else if let Some(mis) = attr.meta_item_list() {
                 mis
@@ -150,7 +150,7 @@ impl<'a> StripUnconfigured<'a> {
         // flag the offending attributes
         for attr in attrs.iter() {
             if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
-                let mut err = feature_err(&self.sess,
+                let mut err = feature_err(self.sess,
                                           "stmt_expr_attributes",
                                           attr.span,
                                           GateIssue::Language,
@@ -258,7 +258,7 @@ impl<'a> StripUnconfigured<'a> {
     pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> {
         if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
             if !field.attrs.is_empty() {
-                let mut err = feature_err(&self.sess,
+                let mut err = feature_err(self.sess,
                                           "struct_field_attributes",
                                           field.span,
                                           GateIssue::Language,
@@ -290,7 +290,7 @@ impl<'a> StripUnconfigured<'a> {
         for attr in attrs.iter() {
             if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
                 let mut err = feature_err(
-                    &self.sess,
+                    self.sess,
                     "struct_field_attributes",
                     attr.span,
                     GateIssue::Language,
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index fe5cb87ad59..ca89a80fdee 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -111,7 +111,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
     // overflow the maximum line width.
     description.map(|raw_msg| {
         let msg = raw_msg.as_str();
-        if !msg.starts_with("\n") || !msg.ends_with("\n") {
+        if !msg.starts_with('\n') || !msg.ends_with('\n') {
             ecx.span_err(span, &format!(
                 "description for error code {} doesn't start and end with a newline",
                 code
@@ -177,7 +177,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
             if let Err(e) = output_metadata(ecx,
                                             &target_triple,
                                             &crate_name.name.as_str(),
-                                            &diagnostics) {
+                                            diagnostics) {
                 ecx.span_bug(span, &format!(
                     "error writing metadata for triple `{}` and crate `{}`, error: {}, \
                      cause: {:?}",
@@ -227,7 +227,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
 
     MacEager::items(SmallVector::many(vec![
         P(ast::Item {
-            ident: name.clone(),
+            ident: *name,
             attrs: Vec::new(),
             id: ast::DUMMY_NODE_ID,
             node: ast::ItemKind::Const(
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index f731c5abdd6..f78089aaa75 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -636,7 +636,7 @@ pub struct ExpansionData {
 
 /// One of these is made during expansion and incrementally updated as we go;
 /// when a macro expansion occurs, the resulting nodes have the backtrace()
-/// -> expn_info of their expansion context stored into their span.
+/// -> `expn_info` of their expansion context stored into their span.
 pub struct ExtCtxt<'a> {
     pub parse_sess: &'a parse::ParseSess,
     pub ecfg: expand::ExpansionConfig<'a>,
@@ -709,7 +709,7 @@ impl<'a> ExtCtxt<'a> {
                 }
                 ctxt = info.call_site.ctxt;
                 last_macro = Some(info.call_site);
-                return Some(());
+                Some(())
             }).is_none() {
                 break
             }
@@ -770,9 +770,9 @@ impl<'a> ExtCtxt<'a> {
     }
     pub fn trace_macros_diag(&self) {
         for (sp, notes) in self.expansions.iter() {
-            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro");
+            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
             for note in notes {
-                db.note(&note);
+                db.note(note);
             }
             db.emit();
         }
@@ -795,7 +795,7 @@ impl<'a> ExtCtxt<'a> {
             v.push(self.ident_of(s));
         }
         v.extend(components.iter().map(|s| self.ident_of(s)));
-        return v
+        v
     }
     pub fn name_of(&self, st: &str) -> ast::Name {
         Symbol::intern(st)
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index a8aa103f80a..a5633679539 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -415,19 +415,19 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
 
         match *ext {
             MultiModifier(ref mac) => {
-                let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+                let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
                 let item = mac.expand(self.cx, attr.span, &meta, item);
                 kind.expect_from_annotatables(item)
             }
             MultiDecorator(ref mac) => {
                 let mut items = Vec::new();
-                let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+                let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
                 mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
                 items.push(item);
                 kind.expect_from_annotatables(items)
             }
             SyntaxExtension::AttrProcMacro(ref mac) => {
-                let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+                let item_toks = stream_for_item(&item, self.cx.parse_sess);
 
                 let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
                 let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
@@ -439,7 +439,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             }
             _ => {
                 let msg = &format!("macro `{}` may not be used in attributes", attr.path);
-                self.cx.span_err(attr.span, &msg);
+                self.cx.span_err(attr.span, msg);
                 kind.dummy(attr.span)
             }
         }
@@ -454,7 +454,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         };
         let path = &mac.node.path;
 
-        let ident = ident.unwrap_or(keywords::Invalid.ident());
+        let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
         let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
         let opt_expanded = match *ext {
             NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
@@ -591,7 +591,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             }
             _ => {
                 let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
-                self.cx.span_err(span, &msg);
+                self.cx.span_err(span, msg);
                 kind.dummy(span)
             }
         }
@@ -749,19 +749,15 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
     fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
         let features = self.cx.ecfg.features.unwrap();
         for attr in attrs.iter() {
-            feature_gate::check_attribute(&attr, &self.cx.parse_sess, features);
+            feature_gate::check_attribute(attr, self.cx.parse_sess, features);
         }
     }
 }
 
 pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
-    for i in 0 .. attrs.len() {
-        if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
-             return Some(attrs.remove(i));
-        }
-    }
-
-    None
+    attrs.iter()
+         .position(|a| !attr::is_known(a) && !is_builtin_attr(a))
+         .map(|i| attrs.remove(i))
 }
 
 // These are pretty nasty. Ideally, we would keep the tokens around, linked from
@@ -923,7 +919,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
                 let result = noop_fold_item(item, self);
                 self.cx.current_expansion.module = orig_module;
                 self.cx.current_expansion.directory_ownership = orig_directory_ownership;
-                return result;
+                result
             }
             // Ensure that test functions are accessible from the test harness.
             ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => {
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index d7a85baa3ff..85ae65e6b79 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -23,7 +23,7 @@ use tokenstream::{TokenStream, TokenTree};
 ///
 /// This is registered as a set of expression syntax extension called quote!
 /// that lifts its argument token-tree to an AST representing the
-/// construction of the same token tree, with token::SubstNt interpreted
+/// construction of the same token tree, with `token::SubstNt` interpreted
 /// as antiquotes (splices).
 
 pub mod rt {
@@ -389,7 +389,7 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
                 result = results.pop().unwrap();
                 result.push(tree);
             }
-            tree @ _ => result.push(tree),
+            tree => result.push(tree),
         }
     }
     result
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 22a5776315a..4183583d66f 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -150,7 +150,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
             cx.span_err(sp,
                         &format!("{} wasn't a utf-8 file",
                                 file.display()));
-            return DummyResult::expr(sp);
+            DummyResult::expr(sp)
         }
     }
 }
@@ -167,7 +167,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
         Err(e) => {
             cx.span_err(sp,
                         &format!("couldn't read {}: {}", file.display(), e));
-            return DummyResult::expr(sp);
+            DummyResult::expr(sp)
         }
         Ok(..) => {
             // Add this input file to the code map to make it available as
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index eb0b7c29f8d..8e28135b11d 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -39,40 +39,40 @@
 //! Example: Start parsing `a a a a b` against [· a $( a )* a b].
 //!
 //! Remaining input: `a a a a b`
-//! next_eis: [· a $( a )* a b]
+//! `next_eis`: `[· a $( a )* a b]`
 //!
 //! - - - Advance over an `a`. - - -
 //!
 //! Remaining input: `a a a b`
-//! cur: [a · $( a )* a b]
+//! cur: `[a · $( a )* a b]`
 //! Descend/Skip (first item).
-//! next: [a $( · a )* a b]  [a $( a )* · a b].
+//! next: `[a $( · a )* a b]  [a $( a )* · a b]`.
 //!
 //! - - - Advance over an `a`. - - -
 //!
 //! Remaining input: `a a b`
-//! cur: [a $( a · )* a b]  next: [a $( a )* a · b]
+//! cur: `[a $( a · )* a b]`  next: `[a $( a )* a · b]`
 //! Finish/Repeat (first item)
-//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
+//! next: `[a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]`
 //!
 //! - - - Advance over an `a`. - - - (this looks exactly like the last step)
 //!
 //! Remaining input: `a b`
-//! cur: [a $( a · )* a b]  next: [a $( a )* a · b]
+//! cur: `[a $( a · )* a b]`  next: `[a $( a )* a · b]`
 //! Finish/Repeat (first item)
-//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
+//! next: `[a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]`
 //!
 //! - - - Advance over an `a`. - - - (this looks exactly like the last step)
 //!
 //! Remaining input: `b`
-//! cur: [a $( a · )* a b]  next: [a $( a )* a · b]
+//! cur: `[a $( a · )* a b]`  next: `[a $( a )* a · b]`
 //! Finish/Repeat (first item)
-//! next: [a $( a )* · a b]  [a $( · a )* a b]
+//! next: `[a $( a )* · a b]  [a $( · a )* a b]`
 //!
 //! - - - Advance over a `b`. - - -
 //!
 //! Remaining input: ``
-//! eof: [a $( a )* a b ·]
+//! eof: `[a $( a )* a b ·]`
 
 pub use self::NamedMatch::*;
 pub use self::ParseResult::*;
@@ -178,20 +178,20 @@ fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
     })
 }
 
-/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
+/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
 /// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
-/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
-/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
+/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
+/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
 ///
-/// The in-memory structure of a particular NamedMatch represents the match
+/// The in-memory structure of a particular `NamedMatch` represents the match
 /// that occurred when a particular subset of a matcher was applied to a
 /// particular token tree.
 ///
-/// The width of each MatchedSeq in the NamedMatch, and the identity of the
-/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
-/// each MatchedSeq corresponds to a single TTSeq in the originating
-/// token tree. The depth of the NamedMatch structure will therefore depend
+/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
+/// the `MatchedNonterminal`s, will depend on the token tree it was applied
+/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
+/// token tree. The depth of the `NamedMatch` structure will therefore depend
 /// only on the nesting depth of `ast::TTSeq`s in the originating
 /// token tree it was derived from.
 
@@ -334,7 +334,7 @@ fn inner_parse_loop(sess: &ParseSess,
                 // Check if we need a separator
                 if idx == len && ei.sep.is_some() {
                     // We have a separator, and it is the current token.
-                    if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) {
+                    if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
                         ei.idx += 1;
                         next_eis.push(ei);
                     }
@@ -401,7 +401,7 @@ fn inner_parse_loop(sess: &ParseSess,
                     cur_eis.push(ei);
                 }
                 TokenTree::Token(_, ref t) => {
-                    if token_name_eq(t, &token) {
+                    if token_name_eq(t, token) {
                         ei.idx += 1;
                         next_eis.push(ei);
                     }
@@ -485,11 +485,8 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op
 }
 
 fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
-    match name {
-        "tt" => {
-            return token::NtTT(p.parse_token_tree());
-        }
-        _ => {}
+    if let "tt" = name {
+        return token::NtTT(p.parse_token_tree());
     }
     // check at the beginning and the parser checks after each bump
     p.process_potential_macro_variable();
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index f959ccc989e..9e3fe30e7bf 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -94,7 +94,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                           -> Box<MacResult+'cx> {
     if cx.trace_macros() {
         let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
-        let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert(vec![]);
+        let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
         values.push(format!("expands to `{}! {{ {} }}`", name, arg));
     }
 
@@ -206,7 +206,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
     let mut valid = true;
 
     // Extract the arguments:
-    let lhses = match **argument_map.get(&lhs_nm).unwrap() {
+    let lhses = match *argument_map[&lhs_nm] {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
@@ -222,7 +222,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
         _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
     };
 
-    let rhses = match **argument_map.get(&rhs_nm).unwrap() {
+    let rhses = match *argument_map[&rhs_nm] {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
@@ -260,13 +260,12 @@ fn check_lhs_nt_follows(sess: &ParseSess,
                         lhs: &quoted::TokenTree) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    match lhs {
-        &quoted::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts),
-        _ => {
-            let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
-            sess.span_diagnostic.span_err(lhs.span(), msg);
-            false
-        }
+    if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
+        check_matcher(sess, features, &tts.tts)
+    } else {
+        let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+        sess.span_diagnostic.span_err(lhs.span(), msg);
+        false
     }
     // we don't abort on errors on rejection, the driver will do that for us
     // after parsing/expansion. we can report every error in every macro this way.
@@ -283,17 +282,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
                 return false;
             },
             TokenTree::Sequence(span, ref seq) => {
-                if seq.separator.is_none() {
-                    if seq.tts.iter().all(|seq_tt| {
-                        match *seq_tt {
-                            TokenTree::Sequence(_, ref sub_seq) =>
-                                sub_seq.op == quoted::KleeneOp::ZeroOrMore,
-                            _ => false,
-                        }
-                    }) {
-                        sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
-                        return false;
+                if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
+                    match *seq_tt {
+                        TokenTree::Sequence(_, ref sub_seq) =>
+                            sub_seq.op == quoted::KleeneOp::ZeroOrMore,
+                        _ => false,
                     }
+                }) {
+                    sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+                    return false;
                 }
                 if !check_lhs_no_empty_seq(sess, &seq.tts) {
                     return false;
@@ -407,7 +404,7 @@ impl FirstSets {
                 }
             }
 
-            return first;
+            first
         }
     }
 
@@ -469,7 +466,7 @@ impl FirstSets {
         // we only exit the loop if `tts` was empty or if every
         // element of `tts` matches the empty sequence.
         assert!(first.maybe_empty);
-        return first;
+        first
     }
 }
 
@@ -579,7 +576,7 @@ fn check_matcher_core(sess: &ParseSess,
         let build_suffix_first = || {
             let mut s = first_sets.first(suffix);
             if s.maybe_empty { s.add_all(follow); }
-            return s;
+            s
         };
 
         // (we build `suffix_first` on demand below; you can tell
@@ -861,6 +858,6 @@ fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
         quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
-        _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
+        _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} in follow set checker"),
     }
 }
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index d216effbd45..fa65e9501c2 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -96,6 +96,17 @@ impl TokenTree {
         }
     }
 
+    pub fn is_empty(&self) -> bool {
+        match *self {
+            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+                token::NoDelim => delimed.tts.is_empty(),
+                _ => false,
+            },
+            TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
+            _ => true,
+        }
+    }
+
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
             (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
@@ -144,9 +155,9 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
                             }
                             _ => end_sp,
                         },
-                        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                        tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
                     },
-                    tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+                    tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
                 };
                 sess.missing_fragment_specifiers.borrow_mut().insert(span);
                 result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
@@ -228,10 +239,10 @@ fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess)
                     Some(op) => return (Some(tok), op),
                     None => span,
                 },
-                tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
             }
         },
-        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+        tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
     };
 
     sess.span_diagnostic.span_err(span, "expected `*` or `+`");
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 947089b0b9a..2a435bdea10 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -121,20 +121,20 @@ pub fn transcribe(sp_diag: &Handler,
                                          &repeats) {
                     LockstepIterSize::Unconstrained => {
                         panic!(sp_diag.span_fatal(
-                            sp.clone(), /* blame macro writer */
+                            sp, /* blame macro writer */
                             "attempted to repeat an expression \
                              containing no syntax \
                              variables matched as repeating at this depth"));
                     }
                     LockstepIterSize::Contradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
+                        panic!(sp_diag.span_fatal(sp, &msg[..]));
                     }
                     LockstepIterSize::Constraint(len, _) => {
                         if len == 0 {
                             if seq.op == quoted::KleeneOp::OneOrMore {
                                 // FIXME #2887 blame invoker
-                                panic!(sp_diag.span_fatal(sp.clone(),
+                                panic!(sp_diag.span_fatal(sp,
                                                           "this must repeat at least once"));
                             }
                         } else {
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index b6a2c983fd4..f95693f6820 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -472,7 +472,7 @@ pub enum Stability {
 impl ::std::fmt::Debug for AttributeGate {
     fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
         match *self {
-            Gated(ref stab, ref name, ref expl, _) =>
+            Gated(ref stab, name, expl, _) =>
                 write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
             Ungated => write!(fmt, "Ungated")
         }
@@ -816,7 +816,7 @@ pub const BUILTIN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeG
 ];
 
 // cfg(...)'s that are feature gated
-const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[
+const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
     // (name in cfg, feature, function to check if the feature is enabled)
     ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
     ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
@@ -881,7 +881,7 @@ impl<'a> Context<'a> {
         let name = unwrap_or!(attr.name(), return).as_str();
         for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
             if name == n {
-                if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
+                if let Gated(_, name, desc, ref has_feature) = *gateage {
                     gate_feature_fn!(self, has_feature, attr.span, name, desc);
                 }
                 debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
@@ -1098,7 +1098,7 @@ fn contains_novel_literal(item: &ast::MetaItem) -> bool {
         NameValue(ref lit) => !lit.node.is_str(),
         List(ref list) => list.iter().any(|li| {
             match li.node {
-                MetaItem(ref mi) => contains_novel_literal(&mi),
+                MetaItem(ref mi) => contains_novel_literal(mi),
                 Literal(_) => true,
             }
         }),
@@ -1120,7 +1120,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
             return
         }
 
-        let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
+        let meta = panictry!(attr.parse_meta(self.context.parse_sess));
         if contains_novel_literal(&meta) {
             gate_feature_post!(&self, attr_literals, attr.span,
                                "non-string literals in attributes, or string \
@@ -1216,14 +1216,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
             }
 
             ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
-                match polarity {
-                    ast::ImplPolarity::Negative => {
-                        gate_feature_post!(&self, optin_builtin_traits,
-                                           i.span,
-                                           "negative trait bounds are not yet fully implemented; \
-                                            use marker types for now");
-                    },
-                    _ => {}
+                if let ast::ImplPolarity::Negative = polarity {
+                    gate_feature_post!(&self, optin_builtin_traits,
+                                       i.span,
+                                       "negative trait bounds are not yet fully implemented; \
+                                        use marker types for now");
                 }
 
                 if let ast::Defaultness::Default = defaultness {
@@ -1272,10 +1269,9 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
 
     fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) {
         if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty {
-            match output_ty.node {
-                ast::TyKind::Never => return,
-                _ => (),
-            };
+            if let ast::TyKind::Never = output_ty.node {
+                return
+            }
             self.visit_ty(output_ty)
         }
     }
@@ -1373,17 +1369,14 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
                 span: Span,
                 _node_id: NodeId) {
         // check for const fn declarations
-        match fn_kind {
-            FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) => {
-                gate_feature_post!(&self, const_fn, span, "const fn is unstable");
-            }
-            _ => {
-                // stability of const fn methods are covered in
-                // visit_trait_item and visit_impl_item below; this is
-                // because default methods don't pass through this
-                // point.
-            }
+        if let FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) =
+            fn_kind {
+            gate_feature_post!(&self, const_fn, span, "const fn is unstable");
         }
+        // stability of const fn methods are covered in
+        // visit_trait_item and visit_impl_item below; this is
+        // because default methods don't pass through this
+        // point.
 
         match fn_kind {
             FnKind::ItemFn(_, _, _, _, abi, _, _) |
diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs
index 0271ddbccbf..ccc2a2aef31 100644
--- a/src/libsyntax/json.rs
+++ b/src/libsyntax/json.rs
@@ -330,7 +330,7 @@ impl DiagnosticSpanLine {
                       })
                      .collect()
              })
-            .unwrap_or(vec![])
+            .unwrap_or_else(|_| vec![])
     }
 }
 
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 92cec462ffb..082930777e5 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -62,7 +62,7 @@ impl<'a> Parser<'a> {
                 _ => break,
             }
         }
-        return Ok(attrs);
+        Ok(attrs)
     }
 
     /// Matches `attribute = # ! [ meta_item ]`
@@ -182,7 +182,7 @@ impl<'a> Parser<'a> {
                     }
 
                     let attr = self.parse_attribute(true)?;
-                    assert!(attr.style == ast::AttrStyle::Inner);
+                    assert_eq!(attr.style, ast::AttrStyle::Inner);
                     attrs.push(attr);
                 }
                 token::DocComment(s) => {
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index 4fe4ec7e4c0..c2755cf0591 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -43,14 +43,14 @@ pub fn expr_is_simple_block(e: &ast::Expr) -> bool {
 }
 
 /// this statement requires a semicolon after it.
-/// note that in one case (stmt_semi), we've already
+/// note that in one case (`stmt_semi`), we've already
 /// seen the semicolon, and thus don't need another.
 pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
     match *stmt {
         ast::StmtKind::Local(_) => true,
         ast::StmtKind::Item(_) => false,
         ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
-        ast::StmtKind::Semi(..) => false,
+        ast::StmtKind::Semi(..) |
         ast::StmtKind::Mac(..) => false,
     }
 }
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index b57708f9193..fe931f7cf6a 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -12,7 +12,7 @@
 
 use parse::token;
 
-/// SeqSep : a sequence separator (token)
+/// `SeqSep` : a sequence separator (token)
 /// and whether a trailing separator is allowed.
 pub struct SeqSep {
     pub sep: Option<token::Token>,
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 7ac322b144c..8b545d3b909 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -77,7 +77,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
         while j > i && lines[j - 1].trim().is_empty() {
             j -= 1;
         }
-        lines[i..j].iter().cloned().collect()
+        lines[i..j].to_vec()
     }
 
     /// remove a "[ \t]*\*" block from each line, if possible
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index ded1f0b599a..454167695e1 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -144,7 +144,7 @@ impl<'a> StringReader<'a> {
 
 impl<'a> StringReader<'a> {
     /// For comments.rs, which hackily pokes into next_pos and ch
-    pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
+    pub fn new_raw(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
         let mut sr = StringReader::new_raw_internal(sess, filemap);
         sr.bump();
         sr
@@ -180,7 +180,7 @@ impl<'a> StringReader<'a> {
 
     pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
         let mut sr = StringReader::new_raw(sess, filemap);
-        if let Err(_) = sr.advance_token() {
+        if sr.advance_token().is_err() {
             sr.emit_fatal_errors();
             panic!(FatalError);
         }
@@ -205,7 +205,7 @@ impl<'a> StringReader<'a> {
 
         sr.bump();
 
-        if let Err(_) = sr.advance_token() {
+        if sr.advance_token().is_err() {
             sr.emit_fatal_errors();
             panic!(FatalError);
         }
@@ -525,7 +525,7 @@ impl<'a> StringReader<'a> {
                         self.bump();
                     }
 
-                    return if doc_comment {
+                    if doc_comment {
                         self.with_str_from(start_bpos, |string| {
                             // comments with only more "/"s are not doc comments
                             let tok = if is_doc_comment(string) {
@@ -544,7 +544,7 @@ impl<'a> StringReader<'a> {
                             tok: token::Comment,
                             sp: mk_sp(start_bpos, self.pos),
                         })
-                    };
+                    }
                 }
                 Some('*') => {
                     self.bump();
@@ -766,7 +766,7 @@ impl<'a> StringReader<'a> {
             }
             let pos = self.pos;
             self.check_float_base(start_bpos, pos, base);
-            return token::Float(self.name_from(start_bpos));
+            token::Float(self.name_from(start_bpos))
         } else {
             // it might be a float if it has an exponent
             if self.ch_is('e') || self.ch_is('E') {
@@ -776,7 +776,7 @@ impl<'a> StringReader<'a> {
                 return token::Float(self.name_from(start_bpos));
             }
             // but we certainly have an integer!
-            return token::Integer(self.name_from(start_bpos));
+            token::Integer(self.name_from(start_bpos))
         }
     }
 
@@ -1053,9 +1053,9 @@ impl<'a> StringReader<'a> {
         self.bump();
         if self.ch_is('=') {
             self.bump();
-            return token::BinOpEq(op);
+            token::BinOpEq(op)
         } else {
-            return token::BinOp(op);
+            token::BinOp(op)
         }
     }
 
@@ -1102,15 +1102,15 @@ impl<'a> StringReader<'a> {
             // One-byte tokens.
             ';' => {
                 self.bump();
-                return Ok(token::Semi);
+                Ok(token::Semi)
             }
             ',' => {
                 self.bump();
-                return Ok(token::Comma);
+                Ok(token::Comma)
             }
             '.' => {
                 self.bump();
-                return if self.ch_is('.') {
+                if self.ch_is('.') {
                     self.bump();
                     if self.ch_is('.') {
                         self.bump();
@@ -1120,61 +1120,61 @@ impl<'a> StringReader<'a> {
                     }
                 } else {
                     Ok(token::Dot)
-                };
+                }
             }
             '(' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Paren));
+                Ok(token::OpenDelim(token::Paren))
             }
             ')' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Paren));
+                Ok(token::CloseDelim(token::Paren))
             }
             '{' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Brace));
+                Ok(token::OpenDelim(token::Brace))
             }
             '}' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Brace));
+                Ok(token::CloseDelim(token::Brace))
             }
             '[' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Bracket));
+                Ok(token::OpenDelim(token::Bracket))
             }
             ']' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Bracket));
+                Ok(token::CloseDelim(token::Bracket))
             }
             '@' => {
                 self.bump();
-                return Ok(token::At);
+                Ok(token::At)
             }
             '#' => {
                 self.bump();
-                return Ok(token::Pound);
+                Ok(token::Pound)
             }
             '~' => {
                 self.bump();
-                return Ok(token::Tilde);
+                Ok(token::Tilde)
             }
             '?' => {
                 self.bump();
-                return Ok(token::Question);
+                Ok(token::Question)
             }
             ':' => {
                 self.bump();
                 if self.ch_is(':') {
                     self.bump();
-                    return Ok(token::ModSep);
+                    Ok(token::ModSep)
                 } else {
-                    return Ok(token::Colon);
+                    Ok(token::Colon)
                 }
             }
 
             '$' => {
                 self.bump();
-                return Ok(token::Dollar);
+                Ok(token::Dollar)
             }
 
             // Multi-byte tokens.
@@ -1182,21 +1182,21 @@ impl<'a> StringReader<'a> {
                 self.bump();
                 if self.ch_is('=') {
                     self.bump();
-                    return Ok(token::EqEq);
+                    Ok(token::EqEq)
                 } else if self.ch_is('>') {
                     self.bump();
-                    return Ok(token::FatArrow);
+                    Ok(token::FatArrow)
                 } else {
-                    return Ok(token::Eq);
+                    Ok(token::Eq)
                 }
             }
             '!' => {
                 self.bump();
                 if self.ch_is('=') {
                     self.bump();
-                    return Ok(token::Ne);
+                    Ok(token::Ne)
                 } else {
-                    return Ok(token::Not);
+                    Ok(token::Not)
                 }
             }
             '<' => {
@@ -1204,21 +1204,21 @@ impl<'a> StringReader<'a> {
                 match self.ch.unwrap_or('\x00') {
                     '=' => {
                         self.bump();
-                        return Ok(token::Le);
+                        Ok(token::Le)
                     }
                     '<' => {
-                        return Ok(self.binop(token::Shl));
+                        Ok(self.binop(token::Shl))
                     }
                     '-' => {
                         self.bump();
                         match self.ch.unwrap_or('\x00') {
                             _ => {
-                                return Ok(token::LArrow);
+                                Ok(token::LArrow)
                             }
                         }
                     }
                     _ => {
-                        return Ok(token::Lt);
+                        Ok(token::Lt)
                     }
                 }
             }
@@ -1227,13 +1227,13 @@ impl<'a> StringReader<'a> {
                 match self.ch.unwrap_or('\x00') {
                     '=' => {
                         self.bump();
-                        return Ok(token::Ge);
+                        Ok(token::Ge)
                     }
                     '>' => {
-                        return Ok(self.binop(token::Shr));
+                        Ok(self.binop(token::Shr))
                     }
                     _ => {
-                        return Ok(token::Gt);
+                        Ok(token::Gt)
                     }
                 }
             }
@@ -1303,7 +1303,7 @@ impl<'a> StringReader<'a> {
                 };
                 self.bump(); // advance ch past token
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::Char(id), suffix));
+                Ok(token::Literal(token::Char(id), suffix))
             }
             'b' => {
                 self.bump();
@@ -1314,7 +1314,7 @@ impl<'a> StringReader<'a> {
                     _ => unreachable!(),  // Should have been a token::Ident above.
                 };
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(lit, suffix));
+                Ok(token::Literal(lit, suffix))
             }
             '"' => {
                 let start_bpos = self.pos;
@@ -1345,7 +1345,7 @@ impl<'a> StringReader<'a> {
                 };
                 self.bump();
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::Str_(id), suffix));
+                Ok(token::Literal(token::Str_(id), suffix))
             }
             'r' => {
                 let start_bpos = self.pos;
@@ -1416,24 +1416,24 @@ impl<'a> StringReader<'a> {
                     Symbol::intern("??")
                 };
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
+                Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
             }
             '-' => {
                 if self.nextch_is('>') {
                     self.bump();
                     self.bump();
-                    return Ok(token::RArrow);
+                    Ok(token::RArrow)
                 } else {
-                    return Ok(self.binop(token::Minus));
+                    Ok(self.binop(token::Minus))
                 }
             }
             '&' => {
                 if self.nextch_is('&') {
                     self.bump();
                     self.bump();
-                    return Ok(token::AndAnd);
+                    Ok(token::AndAnd)
                 } else {
-                    return Ok(self.binop(token::And));
+                    Ok(self.binop(token::And))
                 }
             }
             '|' => {
@@ -1441,27 +1441,27 @@ impl<'a> StringReader<'a> {
                     Some('|') => {
                         self.bump();
                         self.bump();
-                        return Ok(token::OrOr);
+                        Ok(token::OrOr)
                     }
                     _ => {
-                        return Ok(self.binop(token::Or));
+                        Ok(self.binop(token::Or))
                     }
                 }
             }
             '+' => {
-                return Ok(self.binop(token::Plus));
+                Ok(self.binop(token::Plus))
             }
             '*' => {
-                return Ok(self.binop(token::Star));
+                Ok(self.binop(token::Star))
             }
             '/' => {
-                return Ok(self.binop(token::Slash));
+                Ok(self.binop(token::Slash))
             }
             '^' => {
-                return Ok(self.binop(token::Caret));
+                Ok(self.binop(token::Caret))
             }
             '%' => {
-                return Ok(self.binop(token::Percent));
+                Ok(self.binop(token::Percent))
             }
             c => {
                 let last_bpos = self.pos;
@@ -1470,7 +1470,7 @@ impl<'a> StringReader<'a> {
                                                           bpos,
                                                           "unknown start of token",
                                                           c);
-                unicode_chars::check_for_substitution(&self, c, &mut err);
+                unicode_chars::check_for_substitution(self, c, &mut err);
                 self.fatal_errs.push(err);
                 Err(())
             }
@@ -1492,14 +1492,14 @@ impl<'a> StringReader<'a> {
         if self.ch_is('\n') {
             self.bump();
         }
-        return val;
+        val
     }
 
     fn read_one_line_comment(&mut self) -> String {
         let val = self.read_to_eol();
         assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') ||
                 (val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!'));
-        return val;
+        val
     }
 
     fn consume_non_eol_whitespace(&mut self) {
@@ -1543,7 +1543,7 @@ impl<'a> StringReader<'a> {
             Symbol::intern("?")
         };
         self.bump(); // advance ch past token
-        return token::Byte(id);
+        token::Byte(id)
     }
 
     fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool {
@@ -1576,7 +1576,7 @@ impl<'a> StringReader<'a> {
             Symbol::intern("??")
         };
         self.bump();
-        return token::ByteStr(id);
+        token::ByteStr(id)
     }
 
     fn scan_raw_byte_string(&mut self) -> token::Lit {
@@ -1629,8 +1629,8 @@ impl<'a> StringReader<'a> {
             self.bump();
         }
         self.bump();
-        return token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
-                                 hash_count);
+        token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
+                                 hash_count)
     }
 }
 
@@ -1648,7 +1648,7 @@ fn in_range(c: Option<char>, lo: char, hi: char) -> bool {
 }
 
 fn is_dec_digit(c: Option<char>) -> bool {
-    return in_range(c, '0', '9');
+    in_range(c, '0', '9')
 }
 
 pub fn is_doc_comment(s: &str) -> bool {
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index fe3ca1cf230..4fcf7614622 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -107,18 +107,18 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess)
     parser.parse_inner_attributes()
 }
 
-pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                       -> PResult<'a, ast::Crate> {
+pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess)
+                                       -> PResult<ast::Crate> {
     new_parser_from_source_str(sess, name, source).parse_crate_mod()
 }
 
-pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                             -> PResult<'a, Vec<ast::Attribute>> {
+pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess)
+                                             -> PResult<Vec<ast::Attribute>> {
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
-pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, P<ast::Expr>> {
+pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<P<ast::Expr>> {
     new_parser_from_source_str(sess, name, source).parse_expr()
 }
 
@@ -126,29 +126,29 @@ pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a Pa
 ///
 /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err`
 /// when a syntax error occurred.
-pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, Option<P<ast::Item>>> {
+pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<Option<P<ast::Item>>> {
     new_parser_from_source_str(sess, name, source).parse_item()
 }
 
-pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, ast::MetaItem> {
+pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<ast::MetaItem> {
     new_parser_from_source_str(sess, name, source).parse_meta_item()
 }
 
-pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, Option<ast::Stmt>> {
+pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<Option<ast::Stmt>> {
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
                                         -> TokenStream {
     filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
 }
 
 // Create a new parser from a source string
-pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
-                                      -> Parser<'a> {
+pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String)
+                                      -> Parser {
     filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
 }
 
@@ -173,7 +173,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 }
 
 /// Given a filemap and config, return a parser
-pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
+pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
     let end_pos = filemap.end_pos;
     let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
 
@@ -186,7 +186,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
     stream_to_parser(sess, tts.into_iter().collect())
 }
 
@@ -216,8 +216,8 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream
     panictry!(srdr.parse_all_token_trees())
 }
 
-/// Given stream and the ParseSess, produce a parser
-pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+/// Given stream and the `ParseSess`, produce a parser
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
     Parser::new(sess, stream, None, false)
 }
 
@@ -251,7 +251,7 @@ pub fn char_lit(lit: &str) -> (char, isize) {
             (c, 4)
         }
         'u' => {
-            assert!(lit.as_bytes()[2] == b'{');
+            assert_eq!(lit.as_bytes()[2], b'{');
             let idx = lit.find('}').unwrap();
             let v = u32::from_str_radix(&lit[3..idx], 16).unwrap();
             let c = char::from_u32(v).unwrap();
@@ -287,51 +287,46 @@ pub fn str_lit(lit: &str) -> String {
     }
 
     let mut chars = lit.char_indices().peekable();
-    loop {
-        match chars.next() {
-            Some((i, c)) => {
-                match c {
-                    '\\' => {
-                        let ch = chars.peek().unwrap_or_else(|| {
-                            panic!("{}", error(i))
-                        }).1;
-
-                        if ch == '\n' {
-                            eat(&mut chars);
-                        } else if ch == '\r' {
-                            chars.next();
-                            let ch = chars.peek().unwrap_or_else(|| {
-                                panic!("{}", error(i))
-                            }).1;
-
-                            if ch != '\n' {
-                                panic!("lexer accepted bare CR");
-                            }
-                            eat(&mut chars);
-                        } else {
-                            // otherwise, a normal escape
-                            let (c, n) = char_lit(&lit[i..]);
-                            for _ in 0..n - 1 { // we don't need to move past the first \
-                                chars.next();
-                            }
-                            res.push(c);
-                        }
-                    },
-                    '\r' => {
-                        let ch = chars.peek().unwrap_or_else(|| {
-                            panic!("{}", error(i))
-                        }).1;
+    while let Some((i, c)) = chars.next() {
+        match c {
+            '\\' => {
+                let ch = chars.peek().unwrap_or_else(|| {
+                    panic!("{}", error(i))
+                }).1;
+
+                if ch == '\n' {
+                    eat(&mut chars);
+                } else if ch == '\r' {
+                    chars.next();
+                    let ch = chars.peek().unwrap_or_else(|| {
+                        panic!("{}", error(i))
+                    }).1;
 
-                        if ch != '\n' {
-                            panic!("lexer accepted bare CR");
-                        }
+                    if ch != '\n' {
+                        panic!("lexer accepted bare CR");
+                    }
+                    eat(&mut chars);
+                } else {
+                    // otherwise, a normal escape
+                    let (c, n) = char_lit(&lit[i..]);
+                    for _ in 0..n - 1 { // we don't need to move past the first \
                         chars.next();
-                        res.push('\n');
                     }
-                    c => res.push(c),
+                    res.push(c);
                 }
             },
-            None => break
+            '\r' => {
+                let ch = chars.peek().unwrap_or_else(|| {
+                    panic!("{}", error(i))
+                }).1;
+
+                if ch != '\n' {
+                    panic!("lexer accepted bare CR");
+                }
+                chars.next();
+                res.push('\n');
+            }
+            c => res.push(c),
         }
     }
 
@@ -348,20 +343,15 @@ pub fn raw_str_lit(lit: &str) -> String {
 
     // FIXME #8372: This could be a for-loop if it didn't borrow the iterator
     let mut chars = lit.chars().peekable();
-    loop {
-        match chars.next() {
-            Some(c) => {
-                if c == '\r' {
-                    if *chars.peek().unwrap() != '\n' {
-                        panic!("lexer accepted bare CR");
-                    }
-                    chars.next();
-                    res.push('\n');
-                } else {
-                    res.push(c);
-                }
-            },
-            None => break
+    while let Some(c) = chars.next() {
+        if c == '\r' {
+            if *chars.peek().unwrap() != '\n' {
+                panic!("lexer accepted bare CR");
+            }
+            chars.next();
+            res.push('\n');
+        } else {
+            res.push(c);
         }
     }
 
@@ -459,7 +449,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
     if lit.len() == 1 {
         (lit.as_bytes()[0], 1)
     } else {
-        assert!(lit.as_bytes()[0] == b'\\', err(0));
+        assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0));
         let b = match lit.as_bytes()[1] {
             b'"' => b'"',
             b'n' => b'\n',
@@ -480,7 +470,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
                 }
             }
         };
-        return (b, 2);
+        (b, 2)
     }
 }
 
@@ -491,7 +481,7 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
     let error = |i| format!("lexer should have rejected {} at {}", lit, i);
 
     /// Eat everything up to a non-whitespace
-    fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
+    fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
         loop {
             match it.peek().map(|x| x.1) {
                 Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
@@ -578,7 +568,7 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler
             if let Some(err) = err {
                 err!(diag, |span, diag| diag.span_err(span, err));
             }
-            return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
+            return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
         }
     }
 
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index d5baec675e4..078e86aa294 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -59,7 +59,7 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
 
         if !self.obsolete_set.contains(&kind) &&
             (error || self.sess.span_diagnostic.can_emit_warnings) {
-            err.note(&format!("{}", desc));
+            err.note(desc);
             self.obsolete_set.insert(kind);
         }
         err.emit();
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index ca1351e3b41..28c57e0855f 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -248,7 +248,7 @@ impl TokenCursor {
     fn next_desugared(&mut self) -> TokenAndSpan {
         let (sp, name) = match self.next() {
             TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
-            tok @ _ => return tok,
+            tok => return tok,
         };
 
         let stripped = strip_doc_comment_decoration(&name.as_str());
@@ -354,7 +354,7 @@ pub enum Error {
 }
 
 impl Error {
-    pub fn span_err<'a>(self, sp: Span, handler: &'a errors::Handler) -> DiagnosticBuilder<'a> {
+    pub fn span_err(self, sp: Span, handler: &errors::Handler) -> DiagnosticBuilder {
         match self {
             Error::FileNotFoundForModule { ref mod_name,
                                            ref default_path,
@@ -478,9 +478,10 @@ impl<'a> Parser<'a> {
     }
 
     fn next_tok(&mut self) -> TokenAndSpan {
-        let mut next = match self.desugar_doc_comments {
-            true => self.token_cursor.next_desugared(),
-            false => self.token_cursor.next(),
+        let mut next = if self.desugar_doc_comments {
+            self.token_cursor.next_desugared()
+        } else {
+            self.token_cursor.next()
         };
         if next.sp == syntax_pos::DUMMY_SP {
             next.sp = self.prev_span;
@@ -551,7 +552,7 @@ impl<'a> Parser<'a> {
             // This might be a sign we need a connect method on Iterator.
             let b = i.next()
                      .map_or("".to_string(), |t| t.to_string());
-            i.enumerate().fold(b, |mut b, (i, ref a)| {
+            i.enumerate().fold(b, |mut b, (i, a)| {
                 if tokens.len() > 2 && i == tokens.len() - 2 {
                     b.push_str(", or ");
                 } else if tokens.len() == 2 && i == tokens.len() - 2 {
@@ -985,18 +986,15 @@ impl<'a> Parser<'a> {
                 token::CloseDelim(..) | token::Eof => break,
                 _ => {}
             };
-            match sep.sep {
-                Some(ref t) => {
-                    if first {
-                        first = false;
-                    } else {
-                        if let Err(e) = self.expect(t) {
-                            fe(e);
-                            break;
-                        }
+            if let Some(ref t) = sep.sep {
+                if first {
+                    first = false;
+                } else {
+                    if let Err(e) = self.expect(t) {
+                        fe(e);
+                        break;
                     }
                 }
-                _ => ()
             }
             if sep.trailing_sep_allowed && kets.iter().any(|k| self.check(k)) {
                 break;
@@ -1493,7 +1491,7 @@ impl<'a> Parser<'a> {
         let sum_span = ty.span.to(self.prev_span);
 
         let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
-            "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty));
+            "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
 
         match ty.node {
             TyKind::Rptr(ref lifetime, ref mut_ty) => {
@@ -1547,8 +1545,8 @@ impl<'a> Parser<'a> {
 
     pub fn is_named_argument(&mut self) -> bool {
         let offset = match self.token {
-            token::BinOp(token::And) => 1,
-            token::AndAnd => 1,
+            token::BinOp(token::And) |
+            token::AndAnd |
             _ if self.token.is_keyword(keywords::Mut) => 1,
             _ => 0
         };
@@ -2571,7 +2569,7 @@ impl<'a> Parser<'a> {
                             s.print_usize(float.trunc() as usize)?;
                             s.pclose()?;
                             word(&mut s.s, ".")?;
-                            word(&mut s.s, fstr.splitn(2, ".").last().unwrap())
+                            word(&mut s.s, fstr.splitn(2, '.').last().unwrap())
                         });
                         err.span_suggestion(
                             lo.to(self.prev_span),
@@ -3154,10 +3152,11 @@ impl<'a> Parser<'a> {
 
         let attrs = self.parse_outer_attributes()?;
         let pats = self.parse_pats()?;
-        let mut guard = None;
-        if self.eat_keyword(keywords::If) {
-            guard = Some(self.parse_expr()?);
-        }
+        let guard = if self.eat_keyword(keywords::If) {
+            Some(self.parse_expr()?)
+        } else {
+            None
+        };
         self.expect(&token::FatArrow)?;
         let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?;
 
@@ -3600,10 +3599,11 @@ impl<'a> Parser<'a> {
         let lo = self.span;
         let pat = self.parse_pat()?;
 
-        let mut ty = None;
-        if self.eat(&token::Colon) {
-            ty = Some(self.parse_ty()?);
-        }
+        let ty = if self.eat(&token::Colon) {
+            Some(self.parse_ty()?)
+        } else {
+            None
+        };
         let init = self.parse_initializer()?;
         Ok(P(ast::Local {
             ty: ty,
@@ -3929,7 +3929,7 @@ impl<'a> Parser<'a> {
                 },
                 None => {
                     let unused_attrs = |attrs: &[_], s: &mut Self| {
-                        if attrs.len() > 0 {
+                        if !attrs.is_empty() {
                             if s.prev_token_kind == PrevTokenKind::DocComment {
                                 s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
                             } else {
@@ -4815,7 +4815,7 @@ impl<'a> Parser<'a> {
                 self.expect(&token::Not)?;
             }
 
-            self.complain_if_pub_macro(&vis, prev_span);
+            self.complain_if_pub_macro(vis, prev_span);
 
             // eat a matched-delimiter token tree:
             *at_end = true;
@@ -4917,13 +4917,10 @@ impl<'a> Parser<'a> {
                 }
             }
         } else {
-            match polarity {
-                ast::ImplPolarity::Negative => {
-                    // This is a negated type implementation
-                    // `impl !MyType {}`, which is not allowed.
-                    self.span_err(neg_span, "inherent implementation can't be negated");
-                },
-                _ => {}
+            if let ast::ImplPolarity::Negative = polarity {
+                // This is a negated type implementation
+                // `impl !MyType {}`, which is not allowed.
+                self.span_err(neg_span, "inherent implementation can't be negated");
             }
             None
         };
@@ -5185,7 +5182,7 @@ impl<'a> Parser<'a> {
                 let path_span = self.prev_span;
                 let help_msg = format!("make this visible only to module `{}` with `in`:", path);
                 self.expect(&token::CloseDelim(token::Paren))?;  // `)`
-                let mut err = self.span_fatal_help(path_span, &msg, &suggestion);
+                let mut err = self.span_fatal_help(path_span, msg, suggestion);
                 err.span_suggestion(path_span, &help_msg, format!("in {}", path));
                 err.emit();  // emit diagnostic, but continue with public visibility
             }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 25cabef70c1..77db604c56e 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -53,6 +53,10 @@ impl DelimToken {
     pub fn len(self) -> usize {
         if self == NoDelim { 0 } else { 1 }
     }
+
+    pub fn is_empty(self) -> bool {
+        self == NoDelim
+    }
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@@ -198,17 +202,17 @@ impl Token {
     pub fn can_begin_expr(&self) -> bool {
         match *self {
             Ident(ident)                => ident_can_begin_expr(ident), // value name or keyword
-            OpenDelim(..)               => true, // tuple, array or block
-            Literal(..)                 => true, // literal
-            Not                         => true, // operator not
-            BinOp(Minus)                => true, // unary minus
-            BinOp(Star)                 => true, // dereference
-            BinOp(Or) | OrOr            => true, // closure
-            BinOp(And)                  => true, // reference
-            AndAnd                      => true, // double reference
-            DotDot | DotDotDot          => true, // range notation
-            Lt | BinOp(Shl)             => true, // associated path
-            ModSep                      => true, // global path
+            OpenDelim(..)               | // tuple, array or block
+            Literal(..)                 | // literal
+            Not                         | // operator not
+            BinOp(Minus)                | // unary minus
+            BinOp(Star)                 | // dereference
+            BinOp(Or) | OrOr            | // closure
+            BinOp(And)                  | // reference
+            AndAnd                      | // double reference
+            DotDot | DotDotDot          | // range notation
+            Lt | BinOp(Shl)             | // associated path
+            ModSep                      | // global path
             Pound                       => true, // expression attributes
             Interpolated(ref nt) => match **nt {
                 NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
@@ -222,16 +226,16 @@ impl Token {
     pub fn can_begin_type(&self) -> bool {
         match *self {
             Ident(ident)                => ident_can_begin_type(ident), // type name or keyword
-            OpenDelim(Paren)            => true, // tuple
-            OpenDelim(Bracket)          => true, // array
-            Underscore                  => true, // placeholder
-            Not                         => true, // never
-            BinOp(Star)                 => true, // raw pointer
-            BinOp(And)                  => true, // reference
-            AndAnd                      => true, // double reference
-            Question                    => true, // maybe bound in trait object
-            Lifetime(..)                => true, // lifetime bound in trait object
-            Lt | BinOp(Shl)             => true, // associated path
+            OpenDelim(Paren)            | // tuple
+            OpenDelim(Bracket)          | // array
+            Underscore                  | // placeholder
+            Not                         | // never
+            BinOp(Star)                 | // raw pointer
+            BinOp(And)                  | // reference
+            AndAnd                      | // double reference
+            Question                    | // maybe bound in trait object
+            Lifetime(..)                | // lifetime bound in trait object
+            Lt | BinOp(Shl)             | // associated path
             ModSep                      => true, // global path
             Interpolated(ref nt) => match **nt {
                 NtIdent(..) | NtTy(..) | NtPath(..) => true,
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 1d67c2a2c2b..e893c859247 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -113,22 +113,22 @@
 //! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
 //! and point-in-infinite-stream senses freely.
 //!
-//! There is a parallel ring buffer, 'size', that holds the calculated size of
+//! There is a parallel ring buffer, `size`, that holds the calculated size of
 //! each token. Why calculated? Because for Begin/End pairs, the "size"
 //! includes everything between the pair. That is, the "size" of Begin is
 //! actually the sum of the sizes of everything between Begin and the paired
-//! End that follows. Since that is arbitrarily far in the future, 'size' is
+//! End that follows. Since that is arbitrarily far in the future, `size` is
 //! being rewritten regularly while the printer runs; in fact most of the
-//! machinery is here to work out 'size' entries on the fly (and give up when
+//! machinery is here to work out `size` entries on the fly (and give up when
 //! they're so obviously over-long that "infinity" is a good enough
 //! approximation for purposes of line breaking).
 //!
 //! The "input side" of the printer is managed as an abstract process called
-//! SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in
+//! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in
 //! other words, the process of calculating 'size' entries.
 //!
 //! The "output side" of the printer is managed by an abstract process called
-//! PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
+//! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to
 //! do with each token/size pair it consumes as it goes. It's trying to consume
 //! the entire buffered window, but can't output anything until the size is >=
 //! 0 (sizes are set to negative while they're pending calculation).
@@ -409,7 +409,7 @@ impl<'a> Printer<'a> {
     pub fn advance_right(&mut self) {
         self.right += 1;
         self.right %= self.buf_len;
-        assert!(self.right != self.left);
+        assert_ne!(self.right, self.left);
     }
     pub fn advance_left(&mut self) -> io::Result<()> {
         debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right,
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 0c7e8fda837..6114db25fe8 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -233,7 +233,7 @@ pub fn token_to_string(tok: &Token) -> String {
         token::CloseDelim(token::Bracket) => "]".to_string(),
         token::OpenDelim(token::Brace) => "{".to_string(),
         token::CloseDelim(token::Brace) => "}".to_string(),
-        token::OpenDelim(token::NoDelim) => " ".to_string(),
+        token::OpenDelim(token::NoDelim) |
         token::CloseDelim(token::NoDelim) => " ".to_string(),
         token::Pound                => "#".to_string(),
         token::Dollar               => "$".to_string(),
@@ -244,7 +244,7 @@ pub fn token_to_string(tok: &Token) -> String {
             let mut out = match lit {
                 token::Byte(b)           => format!("b'{}'", b),
                 token::Char(c)           => format!("'{}'", c),
-                token::Float(c)          => c.to_string(),
+                token::Float(c)          |
                 token::Integer(c)        => c.to_string(),
                 token::Str_(s)           => format!("\"{}\"", s),
                 token::StrRaw(s, n)      => format!("r{delim}\"{string}\"{delim}",
@@ -277,23 +277,23 @@ pub fn token_to_string(tok: &Token) -> String {
         token::Shebang(s)           => format!("/* shebang: {}*/", s),
 
         token::Interpolated(ref nt) => match **nt {
-            token::NtExpr(ref e)        => expr_to_string(&e),
-            token::NtMeta(ref e)        => meta_item_to_string(&e),
-            token::NtTy(ref e)          => ty_to_string(&e),
-            token::NtPath(ref e)        => path_to_string(&e),
-            token::NtItem(ref e)        => item_to_string(&e),
-            token::NtBlock(ref e)       => block_to_string(&e),
-            token::NtStmt(ref e)        => stmt_to_string(&e),
-            token::NtPat(ref e)         => pat_to_string(&e),
+            token::NtExpr(ref e)        => expr_to_string(e),
+            token::NtMeta(ref e)        => meta_item_to_string(e),
+            token::NtTy(ref e)          => ty_to_string(e),
+            token::NtPath(ref e)        => path_to_string(e),
+            token::NtItem(ref e)        => item_to_string(e),
+            token::NtBlock(ref e)       => block_to_string(e),
+            token::NtStmt(ref e)        => stmt_to_string(e),
+            token::NtPat(ref e)         => pat_to_string(e),
             token::NtIdent(ref e)       => ident_to_string(e.node),
             token::NtTT(ref tree)       => tt_to_string(tree.clone()),
-            token::NtArm(ref e)         => arm_to_string(&e),
-            token::NtImplItem(ref e)    => impl_item_to_string(&e),
-            token::NtTraitItem(ref e)   => trait_item_to_string(&e),
-            token::NtGenerics(ref e)    => generics_to_string(&e),
-            token::NtWhereClause(ref e) => where_clause_to_string(&e),
-            token::NtArg(ref e)         => arg_to_string(&e),
-            token::NtVis(ref e)         => vis_to_string(&e),
+            token::NtArm(ref e)         => arm_to_string(e),
+            token::NtImplItem(ref e)    => impl_item_to_string(e),
+            token::NtTraitItem(ref e)   => trait_item_to_string(e),
+            token::NtGenerics(ref e)    => generics_to_string(e),
+            token::NtWhereClause(ref e) => where_clause_to_string(e),
+            token::NtArg(ref e)         => arg_to_string(e),
+            token::NtVis(ref e)         => vis_to_string(e),
         }
     }
 }
@@ -520,8 +520,7 @@ pub trait PrintState<'a> {
 
         let mut result = None;
 
-        if let &Some(ref lits) = self.literals()
-        {
+        if let Some(ref lits) = *self.literals() {
             while cur_lit < lits.len() {
                 let ltrl = (*lits)[cur_lit].clone();
                 if ltrl.pos > pos { break; }
@@ -618,11 +617,8 @@ pub trait PrintState<'a> {
 
     fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
         self.maybe_print_comment(lit.span.lo)?;
-        match self.next_lit(lit.span.lo) {
-            Some(ref ltrl) => {
-                return word(self.writer(), &(*ltrl).lit);
-            }
-            _ => ()
+        if let Some(ref ltrl) = self.next_lit(lit.span.lo) {
+            return word(self.writer(), &(*ltrl).lit);
         }
         match lit.node {
             ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
@@ -799,7 +795,7 @@ pub trait PrintState<'a> {
                 self.popen()?;
                 self.commasep(Consistent,
                               &items[..],
-                              |s, i| s.print_meta_list_item(&i))?;
+                              |s, i| s.print_meta_list_item(i))?;
                 self.pclose()?;
             }
         }
@@ -965,11 +961,9 @@ impl<'a> State<'a> {
     {
         self.rbox(0, b)?;
         let len = elts.len();
-        let mut i = 0;
-        for elt in elts {
+        for (i, elt) in elts.iter().enumerate() {
             self.maybe_print_comment(get_span(elt).hi)?;
             op(self, elt)?;
-            i += 1;
             if i < len {
                 word(&mut self.s, ",")?;
                 self.maybe_print_trailing_comment(get_span(elt),
@@ -982,14 +976,14 @@ impl<'a> State<'a> {
 
     pub fn commasep_exprs(&mut self, b: Breaks,
                           exprs: &[P<ast::Expr>]) -> io::Result<()> {
-        self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span)
+        self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
     }
 
     pub fn print_mod(&mut self, _mod: &ast::Mod,
                      attrs: &[ast::Attribute]) -> io::Result<()> {
         self.print_inner_attributes(attrs)?;
         for item in &_mod.items {
-            self.print_item(&item)?;
+            self.print_item(item)?;
         }
         Ok(())
     }
@@ -1018,7 +1012,7 @@ impl<'a> State<'a> {
         match ty.node {
             ast::TyKind::Slice(ref ty) => {
                 word(&mut self.s, "[")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, "]")?;
             }
             ast::TyKind::Ptr(ref mt) => {
@@ -1040,7 +1034,7 @@ impl<'a> State<'a> {
             ast::TyKind::Tup(ref elts) => {
                 self.popen()?;
                 self.commasep(Inconsistent, &elts[..],
-                              |s, ty| s.print_type(&ty))?;
+                              |s, ty| s.print_type(ty))?;
                 if elts.len() == 1 {
                     word(&mut self.s, ",")?;
                 }
@@ -1048,7 +1042,7 @@ impl<'a> State<'a> {
             }
             ast::TyKind::Paren(ref typ) => {
                 self.popen()?;
-                self.print_type(&typ)?;
+                self.print_type(typ)?;
                 self.pclose()?;
             }
             ast::TyKind::BareFn(ref f) => {
@@ -1081,14 +1075,14 @@ impl<'a> State<'a> {
             }
             ast::TyKind::Array(ref ty, ref v) => {
                 word(&mut self.s, "[")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, "; ")?;
-                self.print_expr(&v)?;
+                self.print_expr(v)?;
                 word(&mut self.s, "]")?;
             }
             ast::TyKind::Typeof(ref e) => {
                 word(&mut self.s, "typeof(")?;
-                self.print_expr(&e)?;
+                self.print_expr(e)?;
                 word(&mut self.s, ")")?;
             }
             ast::TyKind::Infer => {
@@ -1130,7 +1124,7 @@ impl<'a> State<'a> {
                 }
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&t)?;
+                self.print_type(t)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the head-ibox
                 self.end() // end the outer cbox
@@ -1187,7 +1181,7 @@ impl<'a> State<'a> {
                 self.head(&visibility_qualified(&item.vis, "extern crate"))?;
                 if let Some(p) = *optional_path {
                     let val = p.as_str();
-                    if val.contains("-") {
+                    if val.contains('-') {
                         self.print_string(&val, ast::StrStyle::Cooked)?;
                     } else {
                         self.print_name(p)?;
@@ -1203,7 +1197,7 @@ impl<'a> State<'a> {
             }
             ast::ItemKind::Use(ref vp) => {
                 self.head(&visibility_qualified(&item.vis, "use"))?;
-                self.print_view_path(&vp)?;
+                self.print_view_path(vp)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end inner head-block
                 self.end()?; // end outer head-block
@@ -1215,12 +1209,12 @@ impl<'a> State<'a> {
                 }
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 space(&mut self.s)?;
                 self.end()?; // end the head-ibox
 
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer cbox
             }
@@ -1228,12 +1222,12 @@ impl<'a> State<'a> {
                 self.head(&visibility_qualified(&item.vis, "const"))?;
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 space(&mut self.s)?;
                 self.end()?; // end the head-ibox
 
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer cbox
             }
@@ -1249,7 +1243,7 @@ impl<'a> State<'a> {
                     &item.vis
                 )?;
                 word(&mut self.s, " ")?;
-                self.print_block_with_attrs(&body, &item.attrs)?;
+                self.print_block_with_attrs(body, &item.attrs)?;
             }
             ast::ItemKind::Mod(ref _mod) => {
                 self.head(&visibility_qualified(&item.vis, "mod"))?;
@@ -1282,7 +1276,7 @@ impl<'a> State<'a> {
                 self.print_where_clause(&params.where_clause)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer ibox
             }
@@ -1297,11 +1291,11 @@ impl<'a> State<'a> {
             }
             ast::ItemKind::Struct(ref struct_def, ref generics) => {
                 self.head(&visibility_qualified(&item.vis, "struct"))?;
-                self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+                self.print_struct(struct_def, generics, item.ident, item.span, true)?;
             }
             ast::ItemKind::Union(ref struct_def, ref generics) => {
                 self.head(&visibility_qualified(&item.vis, "union"))?;
-                self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+                self.print_struct(struct_def, generics, item.ident, item.span, true)?;
             }
             ast::ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
                 self.head("")?;
@@ -1333,11 +1327,8 @@ impl<'a> State<'a> {
                     space(&mut self.s)?;
                 }
 
-                match polarity {
-                    ast::ImplPolarity::Negative => {
-                        word(&mut self.s, "!")?;
-                    },
-                    _ => {}
+                if let ast::ImplPolarity::Negative = polarity {
+                    word(&mut self.s, "!")?;
                 }
 
                 if let Some(ref t) = *opt_trait {
@@ -1346,7 +1337,7 @@ impl<'a> State<'a> {
                     self.word_space("for")?;
                 }
 
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 self.print_where_clause(&generics.where_clause)?;
 
                 space(&mut self.s)?;
@@ -1543,7 +1534,7 @@ impl<'a> State<'a> {
             Some(ref d) => {
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&d)
+                self.print_expr(d)
             }
             _ => Ok(())
         }
@@ -1571,7 +1562,7 @@ impl<'a> State<'a> {
         self.print_outer_attributes(&ti.attrs)?;
         match ti.node {
             ast::TraitItemKind::Const(ref ty, ref default) => {
-                self.print_associated_const(ti.ident, &ty,
+                self.print_associated_const(ti.ident, ty,
                                             default.as_ref().map(|expr| &**expr),
                                             &ast::Visibility::Inherited)?;
             }
@@ -1614,7 +1605,7 @@ impl<'a> State<'a> {
         self.print_defaultness(ii.defaultness)?;
         match ii.node {
             ast::ImplItemKind::Const(ref ty, ref expr) => {
-                self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?;
+                self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?;
             }
             ast::ImplItemKind::Method(ref sig, ref body) => {
                 self.head("")?;
@@ -1650,38 +1641,38 @@ impl<'a> State<'a> {
                 self.word_nbsp("let")?;
 
                 self.ibox(INDENT_UNIT)?;
-                self.print_local_decl(&loc)?;
+                self.print_local_decl(loc)?;
                 self.end()?;
                 if let Some(ref init) = loc.init {
                     self.nbsp()?;
                     self.word_space("=")?;
-                    self.print_expr(&init)?;
+                    self.print_expr(init)?;
                 }
                 word(&mut self.s, ";")?;
                 self.end()?;
             }
-            ast::StmtKind::Item(ref item) => self.print_item(&item)?,
+            ast::StmtKind::Item(ref item) => self.print_item(item)?,
             ast::StmtKind::Expr(ref expr) => {
                 self.space_if_not_bol()?;
-                self.print_expr_outer_attr_style(&expr, false)?;
+                self.print_expr_outer_attr_style(expr, false)?;
                 if parse::classify::expr_requires_semi_to_be_stmt(expr) {
                     word(&mut self.s, ";")?;
                 }
             }
             ast::StmtKind::Semi(ref expr) => {
                 self.space_if_not_bol()?;
-                self.print_expr_outer_attr_style(&expr, false)?;
+                self.print_expr_outer_attr_style(expr, false)?;
                 word(&mut self.s, ";")?;
             }
             ast::StmtKind::Mac(ref mac) => {
                 let (ref mac, style, ref attrs) = **mac;
                 self.space_if_not_bol()?;
-                self.print_outer_attributes(&attrs)?;
+                self.print_outer_attributes(attrs)?;
                 let delim = match style {
                     ast::MacStmtStyle::Braces => token::Brace,
                     _ => token::Paren
                 };
-                self.print_mac(&mac, delim)?;
+                self.print_mac(mac, delim)?;
                 if style == ast::MacStmtStyle::Semicolon {
                     word(&mut self.s, ";")?;
                 }
@@ -1735,7 +1726,7 @@ impl<'a> State<'a> {
                 ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
                     self.maybe_print_comment(st.span.lo)?;
                     self.space_if_not_bol()?;
-                    self.print_expr_outer_attr_style(&expr, false)?;
+                    self.print_expr_outer_attr_style(expr, false)?;
                     self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
                 }
                 _ => self.print_stmt(st)?,
@@ -1755,9 +1746,9 @@ impl<'a> State<'a> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else if ")?;
-                        self.print_expr(&i)?;
+                        self.print_expr(i)?;
                         space(&mut self.s)?;
-                        self.print_block(&then)?;
+                        self.print_block(then)?;
                         self.print_else(e.as_ref().map(|e| &**e))
                     }
                     // "another else-if-let"
@@ -1765,12 +1756,12 @@ impl<'a> State<'a> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else if let ")?;
-                        self.print_pat(&pat)?;
+                        self.print_pat(pat)?;
                         space(&mut self.s)?;
                         self.word_space("=")?;
-                        self.print_expr(&expr)?;
+                        self.print_expr(expr)?;
                         space(&mut self.s)?;
-                        self.print_block(&then)?;
+                        self.print_block(then)?;
                         self.print_else(e.as_ref().map(|e| &**e))
                     }
                     // "final else"
@@ -1778,7 +1769,7 @@ impl<'a> State<'a> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else ")?;
-                        self.print_block(&b)
+                        self.print_block(b)
                     }
                     // BLEAH, constraints would be great here
                     _ => {
@@ -1844,12 +1835,8 @@ impl<'a> State<'a> {
                                       binop: ast::BinOp) -> bool {
         match sub_expr.node {
             ast::ExprKind::Binary(ref sub_op, _, _) => {
-                if AssocOp::from_ast_binop(sub_op.node).precedence() <
-                    AssocOp::from_ast_binop(binop.node).precedence() {
-                    true
-                } else {
-                    false
-                }
+                AssocOp::from_ast_binop(sub_op.node).precedence() <
+                    AssocOp::from_ast_binop(binop.node).precedence()
             }
             _ => true
         }
@@ -1929,7 +1916,7 @@ impl<'a> State<'a> {
                     space(&mut self.s)?;
                 }
                 word(&mut self.s, "..")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 self.end()?;
             }
             _ => if !fields.is_empty() {
@@ -1969,7 +1956,7 @@ impl<'a> State<'a> {
         if !tys.is_empty() {
             word(&mut self.s, "::<")?;
             self.commasep(Inconsistent, tys,
-                          |s, ty| s.print_type(&ty))?;
+                          |s, ty| s.print_type(ty))?;
             word(&mut self.s, ">")?;
         }
         self.print_call_post(base_args)
@@ -2038,7 +2025,7 @@ impl<'a> State<'a> {
                 self.print_expr_vec(&exprs[..], attrs)?;
             }
             ast::ExprKind::Repeat(ref element, ref count) => {
-                self.print_expr_repeat(&element, &count, attrs)?;
+                self.print_expr_repeat(element, count, attrs)?;
             }
             ast::ExprKind::Struct(ref path, ref fields, ref wth) => {
                 self.print_expr_struct(path, &fields[..], wth, attrs)?;
@@ -2047,43 +2034,43 @@ impl<'a> State<'a> {
                 self.print_expr_tup(&exprs[..], attrs)?;
             }
             ast::ExprKind::Call(ref func, ref args) => {
-                self.print_expr_call(&func, &args[..])?;
+                self.print_expr_call(func, &args[..])?;
             }
             ast::ExprKind::MethodCall(ident, ref tys, ref args) => {
                 self.print_expr_method_call(ident, &tys[..], &args[..])?;
             }
             ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
-                self.print_expr_binary(op, &lhs, &rhs)?;
+                self.print_expr_binary(op, lhs, rhs)?;
             }
             ast::ExprKind::Unary(op, ref expr) => {
-                self.print_expr_unary(op, &expr)?;
+                self.print_expr_unary(op, expr)?;
             }
             ast::ExprKind::AddrOf(m, ref expr) => {
-                self.print_expr_addr_of(m, &expr)?;
+                self.print_expr_addr_of(m, expr)?;
             }
             ast::ExprKind::Lit(ref lit) => {
-                self.print_literal(&lit)?;
+                self.print_literal(lit)?;
             }
             ast::ExprKind::Cast(ref expr, ref ty) => {
                 if let ast::ExprKind::Cast(..) = expr.node {
-                    self.print_expr(&expr)?;
+                    self.print_expr(expr)?;
                 } else {
-                    self.print_expr_maybe_paren(&expr)?;
+                    self.print_expr_maybe_paren(expr)?;
                 }
                 space(&mut self.s)?;
                 self.word_space("as")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
             }
             ast::ExprKind::Type(ref expr, ref ty) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
             }
             ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
-                self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
+                self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?;
             }
             ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
-                self.print_if_let(&pat, &expr, &blk, elseopt.as_ref().map(|e| &**e))?;
+                self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
             }
             ast::ExprKind::While(ref test, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2091,9 +2078,9 @@ impl<'a> State<'a> {
                     self.word_space(":")?;
                 }
                 self.head("while")?;
-                self.print_expr(&test)?;
+                self.print_expr(test)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2101,12 +2088,12 @@ impl<'a> State<'a> {
                     self.word_space(":")?;
                 }
                 self.head("while let")?;
-                self.print_pat(&pat)?;
+                self.print_pat(pat)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2114,12 +2101,12 @@ impl<'a> State<'a> {
                     self.word_space(":")?;
                 }
                 self.head("for")?;
-                self.print_pat(&pat)?;
+                self.print_pat(pat)?;
                 space(&mut self.s)?;
                 self.word_space("in")?;
-                self.print_expr(&iter)?;
+                self.print_expr(iter)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Loop(ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2128,13 +2115,13 @@ impl<'a> State<'a> {
                 }
                 self.head("loop")?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Match(ref expr, ref arms) => {
                 self.cbox(INDENT_UNIT)?;
                 self.ibox(4)?;
                 self.word_nbsp("match")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 space(&mut self.s)?;
                 self.bopen()?;
                 self.print_inner_attributes_no_trailing_hardbreak(attrs)?;
@@ -2146,7 +2133,7 @@ impl<'a> State<'a> {
             ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
                 self.print_capture_clause(capture_clause)?;
 
-                self.print_fn_block_args(&decl)?;
+                self.print_fn_block_args(decl)?;
                 space(&mut self.s)?;
                 self.print_expr(body)?;
                 self.end()?; // need to close a box
@@ -2161,48 +2148,48 @@ impl<'a> State<'a> {
                 self.cbox(INDENT_UNIT)?;
                 // head-box, will be closed by print-block after {
                 self.ibox(0)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Assign(ref lhs, ref rhs) => {
-                self.print_expr(&lhs)?;
+                self.print_expr(lhs)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&rhs)?;
+                self.print_expr(rhs)?;
             }
             ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
-                self.print_expr(&lhs)?;
+                self.print_expr(lhs)?;
                 space(&mut self.s)?;
                 word(&mut self.s, op.node.to_string())?;
                 self.word_space("=")?;
-                self.print_expr(&rhs)?;
+                self.print_expr(rhs)?;
             }
             ast::ExprKind::Field(ref expr, id) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ".")?;
                 self.print_ident(id.node)?;
             }
             ast::ExprKind::TupField(ref expr, id) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ".")?;
                 self.print_usize(id.node)?;
             }
             ast::ExprKind::Index(ref expr, ref index) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, "[")?;
-                self.print_expr(&index)?;
+                self.print_expr(index)?;
                 word(&mut self.s, "]")?;
             }
             ast::ExprKind::Range(ref start, ref end, limits) => {
-                if let &Some(ref e) = start {
-                    self.print_expr(&e)?;
+                if let Some(ref e) = *start {
+                    self.print_expr(e)?;
                 }
                 if limits == ast::RangeLimits::HalfOpen {
                     word(&mut self.s, "..")?;
                 } else {
                     word(&mut self.s, "...")?;
                 }
-                if let &Some(ref e) = end {
-                    self.print_expr(&e)?;
+                if let Some(ref e) = *end {
+                    self.print_expr(e)?;
                 }
             }
             ast::ExprKind::Path(None, ref path) => {
@@ -2233,12 +2220,9 @@ impl<'a> State<'a> {
             }
             ast::ExprKind::Ret(ref result) => {
                 word(&mut self.s, "return")?;
-                match *result {
-                    Some(ref expr) => {
-                        word(&mut self.s, " ")?;
-                        self.print_expr(&expr)?;
-                    }
-                    _ => ()
+                if let Some(ref expr) = *result {
+                    word(&mut self.s, " ")?;
+                    self.print_expr(expr)?;
                 }
             }
             ast::ExprKind::InlineAsm(ref a) => {
@@ -2268,7 +2252,7 @@ impl<'a> State<'a> {
                 self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
                     s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
                     s.popen()?;
-                    s.print_expr(&o)?;
+                    s.print_expr(o)?;
                     s.pclose()?;
                     Ok(())
                 })?;
@@ -2308,7 +2292,7 @@ impl<'a> State<'a> {
             ast::ExprKind::Paren(ref e) => {
                 self.popen()?;
                 self.print_inner_attributes_inline(attrs)?;
-                self.print_expr(&e)?;
+                self.print_expr(e)?;
                 self.pclose()?;
             },
             ast::ExprKind::Try(ref e) => {
@@ -2318,7 +2302,7 @@ impl<'a> State<'a> {
             ast::ExprKind::Catch(ref blk) => {
                 self.head("do catch")?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?
+                self.print_block_with_attrs(blk, attrs)?
             }
         }
         self.ann.post(self, NodeExpr(expr))?;
@@ -2329,7 +2313,7 @@ impl<'a> State<'a> {
         self.print_pat(&loc.pat)?;
         if let Some(ref ty) = loc.ty {
             self.word_space(":")?;
-            self.print_type(&ty)?;
+            self.print_type(ty)?;
         }
         Ok(())
     }
@@ -2397,7 +2381,7 @@ impl<'a> State<'a> {
             space(&mut self.s)?;
             self.word_space("as")?;
             let depth = path.segments.len() - qself.position;
-            self.print_path(&path, false, depth, false)?;
+            self.print_path(path, false, depth, false)?;
         }
         word(&mut self.s, ">")?;
         word(&mut self.s, "::")?;
@@ -2438,7 +2422,7 @@ impl<'a> State<'a> {
                     self.commasep(
                         Inconsistent,
                         &data.types,
-                        |s, ty| s.print_type(&ty))?;
+                        |s, ty| s.print_type(ty))?;
                         comma = true;
                 }
 
@@ -2461,13 +2445,13 @@ impl<'a> State<'a> {
                 self.commasep(
                     Inconsistent,
                     &data.inputs,
-                    |s, ty| s.print_type(&ty))?;
+                    |s, ty| s.print_type(ty))?;
                 word(&mut self.s, ")")?;
 
                 if let Some(ref ty) = data.output {
                     self.space_if_not_bol()?;
                     self.word_space("->")?;
-                    self.print_type(&ty)?;
+                    self.print_type(ty)?;
                 }
             }
         }
@@ -2496,24 +2480,24 @@ impl<'a> State<'a> {
                 self.print_ident(path1.node)?;
                 if let Some(ref p) = *sub {
                     word(&mut self.s, "@")?;
-                    self.print_pat(&p)?;
+                    self.print_pat(p)?;
                 }
             }
             PatKind::TupleStruct(ref path, ref elts, ddpos) => {
                 self.print_path(path, true, 0, false)?;
                 self.popen()?;
                 if let Some(ddpos) = ddpos {
-                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
                     if ddpos != 0 {
                         self.word_space(",")?;
                     }
                     word(&mut self.s, "..")?;
                     if ddpos != elts.len() {
                         word(&mut self.s, ",")?;
-                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
                     }
                 } else {
-                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
                 }
                 self.pclose()?;
             }
@@ -2549,17 +2533,17 @@ impl<'a> State<'a> {
             PatKind::Tuple(ref elts, ddpos) => {
                 self.popen()?;
                 if let Some(ddpos) = ddpos {
-                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
                     if ddpos != 0 {
                         self.word_space(",")?;
                     }
                     word(&mut self.s, "..")?;
                     if ddpos != elts.len() {
                         word(&mut self.s, ",")?;
-                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
                     }
                 } else {
-                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
                     if elts.len() == 1 {
                         word(&mut self.s, ",")?;
                     }
@@ -2568,41 +2552,41 @@ impl<'a> State<'a> {
             }
             PatKind::Box(ref inner) => {
                 word(&mut self.s, "box ")?;
-                self.print_pat(&inner)?;
+                self.print_pat(inner)?;
             }
             PatKind::Ref(ref inner, mutbl) => {
                 word(&mut self.s, "&")?;
                 if mutbl == ast::Mutability::Mutable {
                     word(&mut self.s, "mut ")?;
                 }
-                self.print_pat(&inner)?;
+                self.print_pat(inner)?;
             }
             PatKind::Lit(ref e) => self.print_expr(&**e)?,
             PatKind::Range(ref begin, ref end, ref end_kind) => {
-                self.print_expr(&begin)?;
+                self.print_expr(begin)?;
                 space(&mut self.s)?;
                 match *end_kind {
                     RangeEnd::Included => word(&mut self.s, "...")?,
                     RangeEnd::Excluded => word(&mut self.s, "..")?,
                 }
-                self.print_expr(&end)?;
+                self.print_expr(end)?;
             }
             PatKind::Slice(ref before, ref slice, ref after) => {
                 word(&mut self.s, "[")?;
                 self.commasep(Inconsistent,
                                    &before[..],
-                                   |s, p| s.print_pat(&p))?;
+                                   |s, p| s.print_pat(p))?;
                 if let Some(ref p) = *slice {
                     if !before.is_empty() { self.word_space(",")?; }
                     if p.node != PatKind::Wild {
-                        self.print_pat(&p)?;
+                        self.print_pat(p)?;
                     }
                     word(&mut self.s, "..")?;
                     if !after.is_empty() { self.word_space(",")?; }
                 }
                 self.commasep(Inconsistent,
                                    &after[..],
-                                   |s, p| s.print_pat(&p))?;
+                                   |s, p| s.print_pat(p))?;
                 word(&mut self.s, "]")?;
             }
             PatKind::Mac(ref m) => self.print_mac(m, token::Paren)?,
@@ -2628,12 +2612,12 @@ impl<'a> State<'a> {
                 space(&mut self.s)?;
                 self.word_space("|")?;
             }
-            self.print_pat(&p)?;
+            self.print_pat(p)?;
         }
         space(&mut self.s)?;
         if let Some(ref e) = arm.guard {
             self.word_space("if")?;
-            self.print_expr(&e)?;
+            self.print_expr(e)?;
             space(&mut self.s)?;
         }
         self.word_space("=>")?;
@@ -2641,7 +2625,7 @@ impl<'a> State<'a> {
         match arm.body.node {
             ast::ExprKind::Block(ref blk) => {
                 // the block will close the pattern's ibox
-                self.print_block_unclosed_indent(&blk, INDENT_UNIT)?;
+                self.print_block_unclosed_indent(blk, INDENT_UNIT)?;
 
                 // If it is a user-provided unsafe block, print a comma after it
                 if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
@@ -2673,7 +2657,7 @@ impl<'a> State<'a> {
                 self.print_mutability(m)?;
                 word(&mut self.s, "self")?;
                 self.word_space(":")?;
-                self.print_type(&typ)
+                self.print_type(typ)
             }
         }
     }
@@ -2725,7 +2709,7 @@ impl<'a> State<'a> {
         self.word_space("->")?;
         match decl.output {
             ast::FunctionRetTy::Ty(ref ty) => {
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 self.maybe_print_comment(ty.span.lo)
             }
             ast::FunctionRetTy::Default(..) => unreachable!(),
@@ -2839,7 +2823,7 @@ impl<'a> State<'a> {
             Some(ref default) => {
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_type(&default)
+                self.print_type(default)
             }
             _ => Ok(())
         }
@@ -2865,7 +2849,7 @@ impl<'a> State<'a> {
                                                                              ref bounds,
                                                                              ..}) => {
                     self.print_formal_lifetime_list(bound_lifetimes)?;
-                    self.print_type(&bounded_ty)?;
+                    self.print_type(bounded_ty)?;
                     self.print_bounds(":", bounds)?;
                 }
                 ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime,
@@ -2977,7 +2961,7 @@ impl<'a> State<'a> {
         match decl.output {
             ast::FunctionRetTy::Default(..) => unreachable!(),
             ast::FunctionRetTy::Ty(ref ty) =>
-                self.print_type(&ty)?
+                self.print_type(ty)?
         }
         self.end()?;
 
@@ -3044,14 +3028,9 @@ impl<'a> State<'a> {
         if self.next_comment().is_none() {
             hardbreak(&mut self.s)?;
         }
-        loop {
-            match self.next_comment() {
-                Some(ref cmnt) => {
-                    self.print_comment(cmnt)?;
-                    self.cur_cmnt_and_lit.cur_cmnt += 1;
-                }
-                _ => break
-            }
+        while let Some(ref cmnt) = self.next_comment() {
+            self.print_comment(cmnt)?;
+            self.cur_cmnt_and_lit.cur_cmnt += 1;
         }
         Ok(())
     }
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index c7820a15fb3..8e257102e1c 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -18,7 +18,7 @@ use ptr::P;
 use tokenstream::TokenStream;
 
 /// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
 /// The expanded code uses the unstable `#[prelude_import]` attribute.
 fn ignored_span(sp: Span) -> Span {
     let mark = Mark::fresh();
@@ -49,7 +49,7 @@ pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option<Strin
         None => return krate,
     };
 
-    let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
+    let crate_name = Symbol::intern(&alt_std_name.unwrap_or_else(|| name.to_string()));
 
     krate.module.items.insert(0, P(ast::Item {
         attrs: vec![attr::mk_attr_outer(DUMMY_SP,
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 91746a2edd9..bb1a6ff65a5 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -106,9 +106,8 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
         // Add a special __test module to the crate that will contain code
         // generated for the test harness
         let (mod_, reexport) = mk_test_module(&mut self.cx);
-        match reexport {
-            Some(re) => folded.module.items.push(re),
-            None => {}
+        if let Some(re) = reexport {
+            folded.module.items.push(re)
         }
         folded.module.items.push(mod_);
         folded
@@ -257,7 +256,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt,
     let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
     cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
     let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
-        ident: sym.clone(),
+        ident: sym,
         attrs: Vec::new(),
         id: ast::DUMMY_NODE_ID,
         node: ast::ItemKind::Mod(reexport_mod),
@@ -308,7 +307,7 @@ fn generate_test_harness(sess: &ParseSess,
 }
 
 /// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
 /// The expanded code calls some unstable functions in the test crate.
 fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
     Span { ctxt: cx.ctxt, ..sp }
@@ -354,7 +353,7 @@ fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
         }
     }
 
-    return has_test_attr && has_test_signature(i) == Yes;
+    has_test_attr && has_test_signature(i) == Yes
 }
 
 fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
@@ -385,7 +384,7 @@ fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
                       `fn(&mut Bencher) -> ()`");
     }
 
-    return has_bench_attr && has_test_signature(i);
+    has_bench_attr && has_test_signature(i)
 }
 
 fn is_ignored(i: &ast::Item) -> bool {
@@ -504,16 +503,14 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
                            ast::Unsafety::Normal,
                            dummy_spanned(ast::Constness::NotConst),
                            ::abi::Abi::Rust, ast::Generics::default(), main_body);
-    let main = P(ast::Item {
+    P(ast::Item {
         ident: Ident::from_str("main"),
         attrs: vec![main_attr],
         id: ast::DUMMY_NODE_ID,
         node: main,
         vis: ast::Visibility::Public,
         span: sp
-    });
-
-    return main;
+    })
 }
 
 fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 86bfdebe42b..9c1371a31fe 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -10,16 +10,16 @@
 
 //! # Token Streams
 //!
-//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
 //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
 //! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
 //!
 //! ## Ownership
-//! TokenStreams are persistent data structures constructed as ropes with reference
-//! counted-children. In general, this means that calling an operation on a TokenStream
-//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
-//! the original. This essentially coerces TokenStreams into 'views' of their subparts,
-//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
+//! `TokenStreams` are persistent data structures constructed as ropes with reference
+//! counted-children. In general, this means that calling an operation on a `TokenStream`
+//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
+//! the original. This essentially coerces `TokenStream`s into 'views' of their subparts,
+//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
 //! ownership of the original.
 
 use syntax_pos::{BytePos, Span, DUMMY_SP};
@@ -88,7 +88,7 @@ impl Delimited {
 /// If the syntax extension is an MBE macro, it will attempt to match its
 /// LHS token tree against the provided token tree, and if it finds a
 /// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
+/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
 ///
 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs
index a6fff2d7074..9307f3c58d4 100644
--- a/src/libsyntax/util/lev_distance.rs
+++ b/src/libsyntax/util/lev_distance.rs
@@ -53,9 +53,10 @@ pub fn find_best_match_for_name<'a, T>(iter_names: T,
     iter_names
     .filter_map(|&name| {
         let dist = lev_distance(lookup, &name.as_str());
-        match dist <= max_dist {    // filter the unwanted cases
-            true => Some((name, dist)),
-            false => None,
+        if dist <= max_dist {    // filter the unwanted cases
+            Some((name, dist))
+        } else {
+            None
         }
     })
     .min_by_key(|&(_, val)| val)    // extract the tuple containing the minimum edit distance
diff --git a/src/libsyntax/util/move_map.rs b/src/libsyntax/util/move_map.rs
index fe05e2958b3..8cc37afa354 100644
--- a/src/libsyntax/util/move_map.rs
+++ b/src/libsyntax/util/move_map.rs
@@ -37,10 +37,10 @@ impl<T> MoveMap<T> for Vec<T> {
                 // move the read_i'th item out of the vector and map it
                 // to an iterator
                 let e = ptr::read(self.get_unchecked(read_i));
-                let mut iter = f(e).into_iter();
+                let iter = f(e).into_iter();
                 read_i += 1;
 
-                while let Some(e) = iter.next() {
+                for e in iter {
                     if write_i < read_i {
                         ptr::write(self.get_unchecked_mut(write_i), e);
                         write_i += 1;
@@ -93,10 +93,10 @@ impl<T> MoveMap<T> for SmallVector<T> {
                 // move the read_i'th item out of the vector and map it
                 // to an iterator
                 let e = ptr::read(self.get_unchecked(read_i));
-                let mut iter = f(e).into_iter();
+                let iter = f(e).into_iter();
                 read_i += 1;
 
-                while let Some(e) = iter.next() {
+                for e in iter {
                     if write_i < read_i {
                         ptr::write(self.get_unchecked_mut(write_i), e);
                         write_i += 1;
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 2e42c6986e6..6e613d1eee7 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -343,9 +343,7 @@ pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) {
             visitor.visit_ty(ty);
             visitor.visit_expr(expression)
         }
-        TyKind::TraitObject(ref bounds) => {
-            walk_list!(visitor, visit_ty_param_bound, bounds);
-        }
+        TyKind::TraitObject(ref bounds) |
         TyKind::ImplTrait(ref bounds) => {
             walk_list!(visitor, visit_ty_param_bound, bounds);
         }
@@ -540,7 +538,7 @@ pub fn walk_fn<'a, V>(visitor: &mut V, kind: FnKind<'a>, declaration: &'a FnDecl
             walk_fn_decl(visitor, declaration);
             visitor.visit_block(body);
         }
-        FnKind::Method(_, ref sig, _, body) => {
+        FnKind::Method(_, sig, _, body) => {
             visitor.visit_generics(&sig.generics);
             walk_fn_decl(visitor, declaration);
             visitor.visit_block(body);
@@ -776,7 +774,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
         }
         ExprKind::InlineAsm(ref ia) => {
             for &(_, ref input) in &ia.inputs {
-                visitor.visit_expr(&input)
+                visitor.visit_expr(input)
             }
             for output in &ia.outputs {
                 visitor.visit_expr(&output.expr)