about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/diagnostics.rs11
-rw-r--r--src/libsyntax/parse/lexer/mod.rs110
-rw-r--r--src/libsyntax/parse/lexer/tests.rs68
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs40
-rw-r--r--src/libsyntax/parse/lexer/unicode_chars.rs7
-rw-r--r--src/libsyntax/parse/mod.rs9
-rw-r--r--src/libsyntax/parse/parser.rs5
-rw-r--r--src/libsyntax/parse/parser/expr.rs34
-rw-r--r--src/libsyntax/parse/parser/item.rs17
-rw-r--r--src/libsyntax/parse/parser/module.rs2
-rw-r--r--src/libsyntax/parse/parser/pat.rs486
-rw-r--r--src/libsyntax/parse/parser/stmt.rs24
-rw-r--r--src/libsyntax/parse/parser/ty.rs9
-rw-r--r--src/libsyntax/parse/tests.rs10
-rw-r--r--src/libsyntax/parse/token.rs2
15 files changed, 444 insertions, 390 deletions
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index 730efb5ef01..1fbf28fb830 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -8,7 +8,6 @@ use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, Token
 use crate::parse::token::{self, TokenKind};
 use crate::print::pprust;
 use crate::ptr::P;
-use crate::source_map::Spanned;
 use crate::symbol::{kw, sym};
 use crate::ThinVec;
 use crate::util::parser::AssocOp;
@@ -592,18 +591,18 @@ impl<'a> Parser<'a> {
 
     crate fn maybe_report_invalid_custom_discriminants(
         sess: &ParseSess,
-        variants: &[Spanned<ast::Variant_>],
+        variants: &[ast::Variant],
     ) {
-        let has_fields = variants.iter().any(|variant| match variant.node.data {
+        let has_fields = variants.iter().any(|variant| match variant.data {
             VariantData::Tuple(..) | VariantData::Struct(..) => true,
             VariantData::Unit(..) => false,
         });
 
-        let discriminant_spans = variants.iter().filter(|variant| match variant.node.data {
+        let discriminant_spans = variants.iter().filter(|variant| match variant.data {
             VariantData::Tuple(..) | VariantData::Struct(..) => false,
             VariantData::Unit(..) => true,
         })
-        .filter_map(|variant| variant.node.disr_expr.as_ref().map(|c| c.value.span))
+        .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span))
         .collect::<Vec<_>>();
 
         if !discriminant_spans.is_empty() && has_fields {
@@ -618,7 +617,7 @@ impl<'a> Parser<'a> {
                 err.span_label(sp, "disallowed custom discriminant");
             }
             for variant in variants.iter() {
-                match &variant.node.data {
+                match &variant.data {
                     VariantData::Struct(..) => {
                         err.span_label(
                             variant.span,
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index e86d4c7fde6..66add869359 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -4,13 +4,11 @@ use crate::symbol::{sym, Symbol};
 use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
 
 use errors::{FatalError, DiagnosticBuilder};
-use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION};
+use syntax_pos::{BytePos, Pos, Span};
 use rustc_lexer::Base;
 use rustc_lexer::unescape;
 
-use std::borrow::Cow;
 use std::char;
-use std::iter;
 use std::convert::TryInto;
 use rustc_data_structures::sync::Lrc;
 use log::debug;
@@ -84,7 +82,7 @@ impl<'a> StringReader<'a> {
 
 
     fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
-        self.override_span.unwrap_or_else(|| Span::new(lo, hi, NO_EXPANSION))
+        self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
     }
 
     /// Returns the next token, including trivia like whitespace or comments.
@@ -181,18 +179,7 @@ impl<'a> StringReader<'a> {
                 let string = self.str_from(start);
                 // comments with only more "/"s are not doc comments
                 let tok = if is_doc_comment(string) {
-                    let mut idx = 0;
-                    loop {
-                        idx = match string[idx..].find('\r') {
-                            None => break,
-                            Some(it) => idx + it + 1
-                        };
-                        if string[idx..].chars().next() != Some('\n') {
-                            self.err_span_(start + BytePos(idx as u32 - 1),
-                                            start + BytePos(idx as u32),
-                                            "bare CR not allowed in doc-comment");
-                        }
-                    }
+                    self.forbid_bare_cr(start, string, "bare CR not allowed in doc-comment");
                     token::DocComment(Symbol::intern(string))
                 } else {
                     token::Comment
@@ -217,15 +204,10 @@ impl<'a> StringReader<'a> {
                 }
 
                 let tok = if is_doc_comment {
-                    let has_cr = string.contains('\r');
-                    let string = if has_cr {
-                        self.translate_crlf(start,
-                                            string,
-                                            "bare CR not allowed in block doc-comment")
-                    } else {
-                        string.into()
-                    };
-                    token::DocComment(Symbol::intern(&string[..]))
+                    self.forbid_bare_cr(start,
+                                        string,
+                                        "bare CR not allowed in block doc-comment");
+                    token::DocComment(Symbol::intern(string))
                 } else {
                     token::Comment
                 };
@@ -291,9 +273,6 @@ impl<'a> StringReader<'a> {
             }
             rustc_lexer::TokenKind::Semi => token::Semi,
             rustc_lexer::TokenKind::Comma => token::Comma,
-            rustc_lexer::TokenKind::DotDotDot => token::DotDotDot,
-            rustc_lexer::TokenKind::DotDotEq => token::DotDotEq,
-            rustc_lexer::TokenKind::DotDot => token::DotDot,
             rustc_lexer::TokenKind::Dot => token::Dot,
             rustc_lexer::TokenKind::OpenParen => token::OpenDelim(token::Paren),
             rustc_lexer::TokenKind::CloseParen => token::CloseDelim(token::Paren),
@@ -305,42 +284,20 @@ impl<'a> StringReader<'a> {
             rustc_lexer::TokenKind::Pound => token::Pound,
             rustc_lexer::TokenKind::Tilde => token::Tilde,
             rustc_lexer::TokenKind::Question => token::Question,
-            rustc_lexer::TokenKind::ColonColon => token::ModSep,
             rustc_lexer::TokenKind::Colon => token::Colon,
             rustc_lexer::TokenKind::Dollar => token::Dollar,
-            rustc_lexer::TokenKind::EqEq => token::EqEq,
             rustc_lexer::TokenKind::Eq => token::Eq,
-            rustc_lexer::TokenKind::FatArrow => token::FatArrow,
-            rustc_lexer::TokenKind::Ne => token::Ne,
             rustc_lexer::TokenKind::Not => token::Not,
-            rustc_lexer::TokenKind::Le => token::Le,
-            rustc_lexer::TokenKind::LArrow => token::LArrow,
             rustc_lexer::TokenKind::Lt => token::Lt,
-            rustc_lexer::TokenKind::ShlEq => token::BinOpEq(token::Shl),
-            rustc_lexer::TokenKind::Shl => token::BinOp(token::Shl),
-            rustc_lexer::TokenKind::Ge => token::Ge,
             rustc_lexer::TokenKind::Gt => token::Gt,
-            rustc_lexer::TokenKind::ShrEq => token::BinOpEq(token::Shr),
-            rustc_lexer::TokenKind::Shr => token::BinOp(token::Shr),
-            rustc_lexer::TokenKind::RArrow => token::RArrow,
             rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
-            rustc_lexer::TokenKind::MinusEq => token::BinOpEq(token::Minus),
             rustc_lexer::TokenKind::And => token::BinOp(token::And),
-            rustc_lexer::TokenKind::AndEq => token::BinOpEq(token::And),
-            rustc_lexer::TokenKind::AndAnd => token::AndAnd,
             rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
-            rustc_lexer::TokenKind::OrEq => token::BinOpEq(token::Or),
-            rustc_lexer::TokenKind::OrOr => token::OrOr,
             rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
-            rustc_lexer::TokenKind::PlusEq => token::BinOpEq(token::Plus),
             rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
-            rustc_lexer::TokenKind::StarEq => token::BinOpEq(token::Star),
             rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
-            rustc_lexer::TokenKind::SlashEq => token::BinOpEq(token::Slash),
             rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
-            rustc_lexer::TokenKind::CaretEq => token::BinOpEq(token::Caret),
             rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
-            rustc_lexer::TokenKind::PercentEq => token::BinOpEq(token::Percent),
 
             rustc_lexer::TokenKind::Unknown => {
                 let c = self.str_from(start).chars().next().unwrap();
@@ -516,49 +473,16 @@ impl<'a> StringReader<'a> {
         &self.src[self.src_index(start)..self.src_index(end)]
     }
 
-    /// Converts CRLF to LF in the given string, raising an error on bare CR.
-    fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> Cow<'b, str> {
-        let mut chars = s.char_indices().peekable();
-        while let Some((i, ch)) = chars.next() {
-            if ch == '\r' {
-                if let Some((lf_idx, '\n')) = chars.peek() {
-                    return translate_crlf_(self, start, s, *lf_idx, chars, errmsg).into();
-                }
-                let pos = start + BytePos(i as u32);
-                let end_pos = start + BytePos((i + ch.len_utf8()) as u32);
-                self.err_span_(pos, end_pos, errmsg);
-            }
-        }
-        return s.into();
-
-        fn translate_crlf_(rdr: &StringReader<'_>,
-                           start: BytePos,
-                           s: &str,
-                           mut j: usize,
-                           mut chars: iter::Peekable<impl Iterator<Item = (usize, char)>>,
-                           errmsg: &str)
-                           -> String {
-            let mut buf = String::with_capacity(s.len());
-            // Skip first CR
-            buf.push_str(&s[.. j - 1]);
-            while let Some((i, ch)) = chars.next() {
-                if ch == '\r' {
-                    if j < i {
-                        buf.push_str(&s[j..i]);
-                    }
-                    let next = i + ch.len_utf8();
-                    j = next;
-                    if chars.peek().map(|(_, ch)| *ch) != Some('\n') {
-                        let pos = start + BytePos(i as u32);
-                        let end_pos = start + BytePos(next as u32);
-                        rdr.err_span_(pos, end_pos, errmsg);
-                    }
-                }
-            }
-            if j < s.len() {
-                buf.push_str(&s[j..]);
-            }
-            buf
+    fn forbid_bare_cr(&self, start: BytePos, s: &str, errmsg: &str) {
+        let mut idx = 0;
+        loop {
+            idx = match s[idx..].find('\r') {
+                None => break,
+                Some(it) => idx + it + 1
+            };
+            self.err_span_(start + BytePos(idx as u32 - 1),
+                           start + BytePos(idx as u32),
+                           errmsg);
         }
     }
 
diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs
index fc47e4f0b18..a915aa42fd1 100644
--- a/src/libsyntax/parse/lexer/tests.rs
+++ b/src/libsyntax/parse/lexer/tests.rs
@@ -1,41 +1,17 @@
 use super::*;
 
-use crate::ast::CrateConfig;
 use crate::symbol::Symbol;
 use crate::source_map::{SourceMap, FilePathMapping};
-use crate::feature_gate::UnstableFeatures;
 use crate::parse::token;
-use crate::diagnostics::plugin::ErrorMap;
 use crate::with_default_globals;
 use std::io;
 use std::path::PathBuf;
-use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition};
-use rustc_data_structures::fx::{FxHashSet, FxHashMap};
-use rustc_data_structures::sync::{Lock, Once};
+use errors::{Handler, emitter::EmitterWriter};
+use syntax_pos::{BytePos, Span};
 
 fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
-    let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
-                                                        Some(sm.clone()),
-                                                        false,
-                                                        false,
-                                                        false);
-    ParseSess {
-        span_diagnostic: errors::Handler::with_emitter(true, None, Box::new(emitter)),
-        unstable_features: UnstableFeatures::from_environment(),
-        config: CrateConfig::default(),
-        included_mod_stack: Lock::new(Vec::new()),
-        source_map: sm,
-        missing_fragment_specifiers: Lock::new(FxHashSet::default()),
-        raw_identifier_spans: Lock::new(Vec::new()),
-        registered_diagnostics: Lock::new(ErrorMap::new()),
-        buffered_lints: Lock::new(vec![]),
-        edition: Edition::from_session(),
-        ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
-        param_attr_spans: Lock::new(Vec::new()),
-        let_chains_spans: Lock::new(Vec::new()),
-        async_closure_spans: Lock::new(Vec::new()),
-        injected_crate_name: Once::new(),
-    }
+    let emitter = EmitterWriter::new(Box::new(io::sink()), Some(sm.clone()), false, false, false);
+    ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
 }
 
 // open a string reader for the given string
@@ -61,7 +37,7 @@ fn t1() {
         let tok1 = string_reader.next_token();
         let tok2 = Token::new(
             mk_ident("fn"),
-            Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
+            Span::with_root_ctxt(BytePos(21), BytePos(23)),
         );
         assert_eq!(tok1.kind, tok2.kind);
         assert_eq!(tok1.span, tok2.span);
@@ -71,7 +47,7 @@ fn t1() {
         assert_eq!(string_reader.pos.clone(), BytePos(28));
         let tok4 = Token::new(
             mk_ident("main"),
-            Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
+            Span::with_root_ctxt(BytePos(24), BytePos(28)),
         );
         assert_eq!(tok3.kind, tok4.kind);
         assert_eq!(tok3.span, tok4.span);
@@ -99,42 +75,50 @@ fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind
 }
 
 #[test]
-fn doublecolonparsing() {
+fn doublecolon_parsing() {
     with_default_globals(|| {
         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
         let sh = mk_sess(sm.clone());
-        check_tokenization(setup(&sm, &sh, "a b".to_string()),
-                        vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
+        check_tokenization(
+            setup(&sm, &sh, "a b".to_string()),
+            vec![mk_ident("a"), token::Whitespace, mk_ident("b")],
+        );
     })
 }
 
 #[test]
-fn dcparsing_2() {
+fn doublecolon_parsing_2() {
     with_default_globals(|| {
         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
         let sh = mk_sess(sm.clone());
-        check_tokenization(setup(&sm, &sh, "a::b".to_string()),
-                        vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
+        check_tokenization(
+            setup(&sm, &sh, "a::b".to_string()),
+            vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")],
+        );
     })
 }
 
 #[test]
-fn dcparsing_3() {
+fn doublecolon_parsing_3() {
     with_default_globals(|| {
         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
         let sh = mk_sess(sm.clone());
-        check_tokenization(setup(&sm, &sh, "a ::b".to_string()),
-                        vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
+        check_tokenization(
+            setup(&sm, &sh, "a ::b".to_string()),
+            vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")],
+        );
     })
 }
 
 #[test]
-fn dcparsing_4() {
+fn doublecolon_parsing_4() {
     with_default_globals(|| {
         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
         let sh = mk_sess(sm.clone());
-        check_tokenization(setup(&sm, &sh, "a:: b".to_string()),
-                        vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
+        check_tokenization(
+            setup(&sm, &sh, "a:: b".to_string()),
+            vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")],
+        );
     })
 }
 
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index 37e67a2729e..e5ba7e45309 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -39,29 +39,29 @@ struct TokenTreesReader<'a> {
 impl<'a> TokenTreesReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
     fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
-        let mut tts = Vec::new();
+        let mut buf = TokenStreamBuilder::default();
 
         self.real_token();
         while self.token != token::Eof {
-            tts.push(self.parse_token_tree()?);
+            buf.push(self.parse_token_tree()?);
         }
 
-        Ok(TokenStream::new(tts))
+        Ok(buf.into_token_stream())
     }
 
     // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
     fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
-        let mut tts = vec![];
+        let mut buf = TokenStreamBuilder::default();
         loop {
             if let token::CloseDelim(..) = self.token.kind {
-                return TokenStream::new(tts);
+                return buf.into_token_stream();
             }
 
             match self.parse_token_tree() {
-                Ok(tree) => tts.push(tree),
+                Ok(tree) => buf.push(tree),
                 Err(mut e) => {
                     e.emit();
-                    return TokenStream::new(tts);
+                    return buf.into_token_stream();
                 }
             }
         }
@@ -223,8 +223,32 @@ impl<'a> TokenTreesReader<'a> {
                 _ => {
                     self.token = token;
                     return;
-                },
+                }
+            }
+        }
+    }
+}
+
+#[derive(Default)]
+struct TokenStreamBuilder {
+    buf: Vec<TreeAndJoint>,
+}
+
+impl TokenStreamBuilder {
+    fn push(&mut self, (tree, joint): TreeAndJoint) {
+        if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() {
+            if let TokenTree::Token(token) = &tree {
+                if let Some(glued) = prev_token.glue(token) {
+                    self.buf.pop();
+                    self.buf.push((TokenTree::Token(glued), joint));
+                    return;
+                }
             }
         }
+        self.buf.push((tree, joint))
+    }
+
+    fn into_token_stream(self) -> TokenStream {
+        TokenStream::new(self.buf)
     }
 }
diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs
index eaa736c6a35..525b4215aff 100644
--- a/src/libsyntax/parse/lexer/unicode_chars.rs
+++ b/src/libsyntax/parse/lexer/unicode_chars.rs
@@ -3,7 +3,7 @@
 
 use super::StringReader;
 use errors::{Applicability, DiagnosticBuilder};
-use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION, symbol::kw};
+use syntax_pos::{BytePos, Pos, Span, symbol::kw};
 use crate::parse::token;
 
 #[rustfmt::skip] // for line breaks
@@ -343,7 +343,7 @@ crate fn check_for_substitution<'a>(
         None => return None,
     };
 
-    let span = Span::new(pos, pos + Pos::from_usize(ch.len_utf8()), NO_EXPANSION);
+    let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8()));
 
     let (ascii_name, token) = match ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) {
         Some((_ascii_char, ascii_name, token)) => (ascii_name, token),
@@ -362,10 +362,9 @@ crate fn check_for_substitution<'a>(
             ascii_char, ascii_name
         );
         err.span_suggestion(
-            Span::new(
+            Span::with_root_ctxt(
                 pos,
                 pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
-                NO_EXPANSION,
             ),
             &msg,
             format!("\"{}\"", s),
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 80aa7a35266..b1f3612a839 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -16,6 +16,7 @@ use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic,
 use rustc_data_structures::sync::{Lrc, Lock, Once};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
 use syntax_pos::edition::Edition;
+use syntax_pos::hygiene::ExpnId;
 
 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 use std::borrow::Cow;
@@ -62,7 +63,11 @@ pub struct ParseSess {
     pub let_chains_spans: Lock<Vec<Span>>,
     // Places where `async || ..` exprs were used and should be feature gated.
     pub async_closure_spans: Lock<Vec<Span>>,
+    // Places where `yield e?` exprs were used and should be feature gated.
+    pub yield_spans: Lock<Vec<Span>>,
     pub injected_crate_name: Once<Symbol>,
+    // Places where or-patterns e.g. `Some(Foo | Bar)` were used and should be feature gated.
+    pub or_pattern_spans: Lock<Vec<Span>>,
 }
 
 impl ParseSess {
@@ -86,12 +91,14 @@ impl ParseSess {
             included_mod_stack: Lock::new(vec![]),
             source_map,
             buffered_lints: Lock::new(vec![]),
-            edition: Edition::from_session(),
+            edition: ExpnId::root().expn_data().edition,
             ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
             param_attr_spans: Lock::new(Vec::new()),
             let_chains_spans: Lock::new(Vec::new()),
             async_closure_spans: Lock::new(Vec::new()),
+            yield_spans: Lock::new(Vec::new()),
             injected_crate_name: Once::new(),
+            or_pattern_spans: Lock::new(Vec::new()),
         }
     }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 1c1428c5713..89725d8b339 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -13,7 +13,6 @@ mod generics;
 use crate::ast::{self, AttrStyle, Attribute, Arg, BindingMode, StrStyle, SelfKind};
 use crate::ast::{FnDecl, Ident, IsAsync, MacDelimiter, Mutability, TyKind};
 use crate::ast::{Visibility, VisibilityKind, Unsafety, CrateSugar};
-use crate::ext::hygiene::SyntaxContext;
 use crate::source_map::{self, respan};
 use crate::parse::{SeqSep, literal, token};
 use crate::parse::lexer::UnmatchedBrace;
@@ -1101,7 +1100,7 @@ impl<'a> Parser<'a> {
 
     crate fn process_potential_macro_variable(&mut self) {
         self.token = match self.token.kind {
-            token::Dollar if self.token.span.ctxt() != SyntaxContext::empty() &&
+            token::Dollar if self.token.span.from_expansion() &&
                              self.look_ahead(1, |t| t.is_ident()) => {
                 self.bump();
                 let name = match self.token.kind {
@@ -1236,7 +1235,7 @@ impl<'a> Parser<'a> {
 
         let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
 
-        if c_variadic && args.is_empty() {
+        if c_variadic && args.len() <= 1 {
             self.span_err(sp,
                           "C-variadic function must be declared with at least one named argument");
         }
diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs
index 4432c1329cb..ccc6bd15067 100644
--- a/src/libsyntax/parse/parser/expr.rs
+++ b/src/libsyntax/parse/parser/expr.rs
@@ -8,13 +8,13 @@ use crate::ast::{self, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode};
 use crate::ast::{Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm};
 use crate::ast::{Ty, TyKind, FunctionRetTy, Arg, FnDecl};
 use crate::ast::{BinOpKind, BinOp, UnOp};
-use crate::ast::{Mac_, AnonConst, Field};
+use crate::ast::{Mac, AnonConst, Field};
 
 use crate::parse::classify;
 use crate::parse::token::{self, Token};
 use crate::parse::diagnostics::{Error};
 use crate::print::pprust;
-use crate::source_map::{self, respan, Span};
+use crate::source_map::{self, Span};
 use crate::symbol::{kw, sym};
 use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
 
@@ -224,6 +224,10 @@ impl<'a> Parser<'a> {
                 self.err_dotdotdot_syntax(self.token.span);
             }
 
+            if self.token == token::LArrow {
+                self.err_larrow_operator(self.token.span);
+            }
+
             self.bump();
             if op.is_comparison() {
                 self.check_no_chained_comparison(&lhs, &op);
@@ -993,6 +997,9 @@ impl<'a> Parser<'a> {
                     } else {
                         ex = ExprKind::Yield(None);
                     }
+
+                    let span = lo.to(hi);
+                    self.sess.yield_spans.borrow_mut().push(span);
                 } else if self.eat_keyword(kw::Let) {
                     return self.parse_let_expr(attrs);
                 } else if is_span_rust_2018 && self.eat_keyword(kw::Await) {
@@ -1007,12 +1014,13 @@ impl<'a> Parser<'a> {
                         // MACRO INVOCATION expression
                         let (delim, tts) = self.expect_delimited_token_tree()?;
                         hi = self.prev_span;
-                        ex = ExprKind::Mac(respan(lo.to(hi), Mac_ {
+                        ex = ExprKind::Mac(Mac {
                             path,
                             tts,
                             delim,
+                            span: lo.to(hi),
                             prior_type_ascription: self.last_type_ascription,
-                        }));
+                        });
                     } else if self.check(&token::OpenDelim(token::Brace)) {
                         if let Some(expr) = self.maybe_parse_struct_expr(lo, &path, &attrs) {
                             return expr;
@@ -1199,7 +1207,7 @@ impl<'a> Parser<'a> {
         if self.eat_keyword(kw::Else) || !cond.returns() {
             let sp = self.sess.source_map().next_point(lo);
             let mut err = self.diagnostic()
-                .struct_span_err(sp, "missing condition for `if` statemement");
+                .struct_span_err(sp, "missing condition for `if` expression");
             err.span_label(sp, "expected if condition here");
             return Err(err)
         }
@@ -1444,6 +1452,7 @@ impl<'a> Parser<'a> {
             guard,
             body: expr,
             span: lo.to(hi),
+            id: ast::DUMMY_NODE_ID,
         })
     }
 
@@ -1599,6 +1608,7 @@ impl<'a> Parser<'a> {
                         expr: self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()),
                         is_shorthand: false,
                         attrs: ThinVec::new(),
+                        id: ast::DUMMY_NODE_ID,
                     });
                 }
             }
@@ -1684,6 +1694,7 @@ impl<'a> Parser<'a> {
             expr,
             is_shorthand,
             attrs: attrs.into(),
+            id: ast::DUMMY_NODE_ID,
         })
     }
 
@@ -1702,6 +1713,19 @@ impl<'a> Parser<'a> {
             .emit();
     }
 
+    fn err_larrow_operator(&self, span: Span) {
+        self.struct_span_err(
+            span,
+            "unexpected token: `<-`"
+        ).span_suggestion(
+            span,
+            "if you meant to write a comparison against a negative value, add a \
+             space in between `<` and `-`",
+            "< -".to_string(),
+            Applicability::MaybeIncorrect
+        ).emit();
+    }
+
     fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
         ExprKind::AssignOp(binop, lhs, rhs)
     }
diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs
index e85ef9cc974..72819c99660 100644
--- a/src/libsyntax/parse/parser/item.rs
+++ b/src/libsyntax/parse/parser/item.rs
@@ -10,7 +10,7 @@ use crate::ast::{Visibility, VisibilityKind, Mutability, FnDecl, FnHeader};
 use crate::ast::{ForeignItem, ForeignItemKind};
 use crate::ast::{Ty, TyKind, GenericBounds, TraitRef};
 use crate::ast::{EnumDef, VariantData, StructField, AnonConst};
-use crate::ast::{Mac, Mac_, MacDelimiter};
+use crate::ast::{Mac, MacDelimiter};
 use crate::ext::base::DummyResult;
 use crate::parse::token;
 use crate::parse::parser::maybe_append;
@@ -530,12 +530,13 @@ impl<'a> Parser<'a> {
             }
 
             let hi = self.prev_span;
-            let mac = respan(mac_lo.to(hi), Mac_ {
+            let mac = Mac {
                 path,
                 tts,
                 delim,
+                span: mac_lo.to(hi),
                 prior_type_ascription: self.last_type_ascription,
-            });
+            };
             let item =
                 self.mk_item(lo.to(hi), Ident::invalid(), ItemKind::Mac(mac), visibility, attrs);
             return Ok(Some(item));
@@ -604,12 +605,13 @@ impl<'a> Parser<'a> {
                 self.expect(&token::Semi)?;
             }
 
-            Ok(Some(respan(lo.to(self.prev_span), Mac_ {
+            Ok(Some(Mac {
                 path,
                 tts,
                 delim,
+                span: lo.to(self.prev_span),
                 prior_type_ascription: self.last_type_ascription,
-            })))
+            }))
         } else {
             Ok(None)
         }
@@ -1564,14 +1566,15 @@ impl<'a> Parser<'a> {
                 None
             };
 
-            let vr = ast::Variant_ {
+            let vr = ast::Variant {
                 ident,
                 id: ast::DUMMY_NODE_ID,
                 attrs: variant_attrs,
                 data: struct_def,
                 disr_expr,
+                span: vlo.to(self.prev_span),
             };
-            variants.push(respan(vlo.to(self.prev_span), vr));
+            variants.push(vr);
 
             if !self.eat(&token::Comma) {
                 if self.token.is_ident() && !self.token.is_reserved_ident() {
diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs
index 58a7ffba948..3f6f87b1c44 100644
--- a/src/libsyntax/parse/parser/module.rs
+++ b/src/libsyntax/parse/parser/module.rs
@@ -60,7 +60,7 @@ impl<'a> Parser<'a> {
                 // Record that we fetched the mod from an external file
                 if warn {
                     let attr = attr::mk_attr_outer(
-                        attr::mk_word_item(Ident::with_empty_ctxt(sym::warn_directory_ownership)));
+                        attr::mk_word_item(Ident::with_dummy_span(sym::warn_directory_ownership)));
                     attr::mark_known(&attr);
                     attrs.push(attr);
                 }
diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs
index 5cc428a4df1..fd458aec743 100644
--- a/src/libsyntax/parse/parser/pat.rs
+++ b/src/libsyntax/parse/parser/pat.rs
@@ -2,8 +2,8 @@ use super::{Parser, PResult, PathStyle};
 
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 use crate::ptr::P;
-use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac_};
-use crate::ast::{BindingMode, Ident, Mutability, Expr, ExprKind};
+use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac};
+use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind};
 use crate::parse::token::{self};
 use crate::print::pprust;
 use crate::source_map::{respan, Span, Spanned};
@@ -14,7 +14,10 @@ use errors::{Applicability, DiagnosticBuilder};
 
 impl<'a> Parser<'a> {
     /// Parses a pattern.
-    pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
+    pub fn parse_pat(
+        &mut self,
+        expected: Option<&'static str>
+    ) -> PResult<'a, P<Pat>> {
         self.parse_pat_with_range_pat(true, expected)
     }
 
@@ -97,6 +100,34 @@ impl<'a> Parser<'a> {
         Ok(())
     }
 
+    /// Parses a pattern, that may be a or-pattern (e.g. `Some(Foo | Bar)`).
+    fn parse_pat_with_or(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
+        // Parse the first pattern.
+        let first_pat = self.parse_pat(expected)?;
+
+        // If the next token is not a `|`, this is not an or-pattern and
+        // we should exit here.
+        if !self.check(&token::BinOp(token::Or)) {
+            return Ok(first_pat)
+        }
+
+        let lo = first_pat.span;
+
+        let mut pats = vec![first_pat];
+
+        while self.eat(&token::BinOp(token::Or)) {
+            pats.push(self.parse_pat_with_range_pat(
+                true, expected
+            )?);
+        }
+
+        let or_pattern_span = lo.to(self.prev_span);
+
+        self.sess.or_pattern_spans.borrow_mut().push(or_pattern_span);
+
+        Ok(self.mk_pat(or_pattern_span, PatKind::Or(pats)))
+    }
+
     /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
     /// allowed).
     fn parse_pat_with_range_pat(
@@ -108,93 +139,52 @@ impl<'a> Parser<'a> {
         maybe_whole!(self, NtPat, |x| x);
 
         let lo = self.token.span;
-        let pat;
-        match self.token.kind {
-            token::BinOp(token::And) | token::AndAnd => {
-                // Parse &pat / &mut pat
-                self.expect_and()?;
-                let mutbl = self.parse_mutability();
-                if let token::Lifetime(name) = self.token.kind {
-                    let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name));
-                    err.span_label(self.token.span, "unexpected lifetime");
-                    return Err(err);
-                }
-                let subpat = self.parse_pat_with_range_pat(false, expected)?;
-                pat = PatKind::Ref(subpat, mutbl);
-            }
-            token::OpenDelim(token::Paren) => {
-                // Parse a tuple or parenthesis pattern.
-                let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?;
-
-                // Here, `(pat,)` is a tuple pattern.
-                // For backward compatibility, `(..)` is a tuple pattern as well.
-                pat = if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
-                    PatKind::Paren(fields.into_iter().nth(0).unwrap())
-                } else {
-                    PatKind::Tuple(fields)
-                };
-            }
+        let pat = match self.token.kind {
+            token::BinOp(token::And) | token::AndAnd => self.parse_pat_deref(expected)?,
+            token::OpenDelim(token::Paren) => self.parse_pat_tuple_or_parens()?,
             token::OpenDelim(token::Bracket) => {
                 // Parse `[pat, pat,...]` as a slice pattern.
-                let (slice, _) = self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?;
-                pat = PatKind::Slice(slice);
+                PatKind::Slice(self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?.0)
             }
             token::DotDot => {
                 self.bump();
-                pat = if self.is_pat_range_end_start() {
+                if self.is_pat_range_end_start() {
                     // Parse `..42` for recovery.
                     self.parse_pat_range_to(RangeEnd::Excluded, "..")?
                 } else {
                     // A rest pattern `..`.
                     PatKind::Rest
-                };
+                }
             }
             token::DotDotEq => {
                 // Parse `..=42` for recovery.
                 self.bump();
-                pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")?;
+                self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")?
             }
             token::DotDotDot => {
                 // Parse `...42` for recovery.
                 self.bump();
-                pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotDot), "...")?;
+                self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotDot), "...")?
             }
             // At this point, token != &, &&, (, [
             _ => if self.eat_keyword(kw::Underscore) {
                 // Parse _
-                pat = PatKind::Wild;
+                PatKind::Wild
             } else if self.eat_keyword(kw::Mut) {
-                // Parse mut ident @ pat / mut ref ident @ pat
-                let mutref_span = self.prev_span.to(self.token.span);
-                let binding_mode = if self.eat_keyword(kw::Ref) {
-                    self.diagnostic()
-                        .struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
-                        .span_suggestion(
-                            mutref_span,
-                            "try switching the order",
-                            "ref mut".into(),
-                            Applicability::MachineApplicable
-                        ).emit();
-                    BindingMode::ByRef(Mutability::Mutable)
-                } else {
-                    BindingMode::ByValue(Mutability::Mutable)
-                };
-                pat = self.parse_pat_ident(binding_mode)?;
+                self.recover_pat_ident_mut_first()?
             } else if self.eat_keyword(kw::Ref) {
                 // Parse ref ident @ pat / ref mut ident @ pat
                 let mutbl = self.parse_mutability();
-                pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
+                self.parse_pat_ident(BindingMode::ByRef(mutbl))?
             } else if self.eat_keyword(kw::Box) {
-                // Parse box pat
-                let subpat = self.parse_pat_with_range_pat(false, None)?;
-                pat = PatKind::Box(subpat);
+                // Parse `box pat`
+                PatKind::Box(self.parse_pat_with_range_pat(false, None)?)
             } else if self.token.is_ident() && !self.token.is_reserved_ident() &&
                       self.parse_as_ident() {
-                // Parse ident @ pat
+                // Parse `ident @ pat`
                 // This can give false positives and parse nullary enums,
-                // they are dealt with later in resolve
-                let binding_mode = BindingMode::ByValue(Mutability::Immutable);
-                pat = self.parse_pat_ident(binding_mode)?;
+                // they are dealt with later in resolve.
+                self.parse_pat_ident(BindingMode::ByValue(Mutability::Immutable))?
             } else if self.token.is_path_start() {
                 // Parse pattern starting with a path
                 let (qself, path) = if self.eat_lt() {
@@ -206,136 +196,189 @@ impl<'a> Parser<'a> {
                     (None, self.parse_path(PathStyle::Expr)?)
                 };
                 match self.token.kind {
-                    token::Not if qself.is_none() => {
-                        // Parse macro invocation
-                        self.bump();
-                        let (delim, tts) = self.expect_delimited_token_tree()?;
-                        let mac = respan(lo.to(self.prev_span), Mac_ {
-                            path,
-                            tts,
-                            delim,
-                            prior_type_ascription: self.last_type_ascription,
-                        });
-                        pat = PatKind::Mac(mac);
-                    }
+                    token::Not if qself.is_none() => self.parse_pat_mac_invoc(lo, path)?,
                     token::DotDotDot | token::DotDotEq | token::DotDot => {
-                        let (end_kind, form) = match self.token.kind {
-                            token::DotDot => (RangeEnd::Excluded, ".."),
-                            token::DotDotDot => (RangeEnd::Included(RangeSyntax::DotDotDot), "..."),
-                            token::DotDotEq => (RangeEnd::Included(RangeSyntax::DotDotEq), "..="),
-                            _ => panic!("can only parse `..`/`...`/`..=` for ranges \
-                                         (checked above)"),
-                        };
-                        let op_span = self.token.span;
-                        // Parse range
-                        let span = lo.to(self.prev_span);
-                        let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
-                        self.bump();
-                        let end = self.parse_pat_range_end_opt(&begin, form)?;
-                        pat = PatKind::Range(begin, end, respan(op_span, end_kind));
+                        self.parse_pat_range_starting_with_path(lo, qself, path)?
                     }
-                    token::OpenDelim(token::Brace) => {
-                        if qself.is_some() {
-                            let msg = "unexpected `{` after qualified path";
-                            let mut err = self.fatal(msg);
-                            err.span_label(self.token.span, msg);
-                            return Err(err);
-                        }
-                        // Parse struct pattern
-                        self.bump();
-                        let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
-                            e.emit();
-                            self.recover_stmt();
-                            (vec![], true)
-                        });
-                        self.bump();
-                        pat = PatKind::Struct(path, fields, etc);
-                    }
-                    token::OpenDelim(token::Paren) => {
-                        if qself.is_some() {
-                            let msg = "unexpected `(` after qualified path";
-                            let mut err = self.fatal(msg);
-                            err.span_label(self.token.span, msg);
-                            return Err(err);
-                        }
-                        // Parse tuple struct or enum pattern
-                        let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?;
-                        pat = PatKind::TupleStruct(path, fields)
-                    }
-                    _ => pat = PatKind::Path(qself, path),
+                    token::OpenDelim(token::Brace) => self.parse_pat_struct(qself, path)?,
+                    token::OpenDelim(token::Paren) => self.parse_pat_tuple_struct(qself, path)?,
+                    _ => PatKind::Path(qself, path),
                 }
             } else {
                 // Try to parse everything else as literal with optional minus
                 match self.parse_literal_maybe_minus() {
-                    Ok(begin) => {
-                        let op_span = self.token.span;
-                        if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
-                                self.check(&token::DotDotDot) {
-                            let (end_kind, form) = if self.eat(&token::DotDotDot) {
-                                (RangeEnd::Included(RangeSyntax::DotDotDot), "...")
-                            } else if self.eat(&token::DotDotEq) {
-                                (RangeEnd::Included(RangeSyntax::DotDotEq), "..=")
-                            } else if self.eat(&token::DotDot) {
-                                (RangeEnd::Excluded, "..")
-                            } else {
-                                panic!("impossible case: we already matched \
-                                        on a range-operator token")
-                            };
-                            let end = self.parse_pat_range_end_opt(&begin, form)?;
-                            pat = PatKind::Range(begin, end, respan(op_span, end_kind))
-                        } else {
-                            pat = PatKind::Lit(begin);
-                        }
-                    }
-                    Err(mut err) => {
-                        self.cancel(&mut err);
-                        let expected = expected.unwrap_or("pattern");
-                        let msg = format!(
-                            "expected {}, found {}",
-                            expected,
-                            self.this_token_descr(),
-                        );
-                        let mut err = self.fatal(&msg);
-                        err.span_label(self.token.span, format!("expected {}", expected));
-                        let sp = self.sess.source_map().start_point(self.token.span);
-                        if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-                            self.sess.expr_parentheses_needed(&mut err, *sp, None);
-                        }
-                        return Err(err);
+                    Ok(begin)
+                        if self.check(&token::DotDot)
+                            || self.check(&token::DotDotEq)
+                            || self.check(&token::DotDotDot) =>
+                    {
+                        self.parse_pat_range_starting_with_lit(begin)?
                     }
+                    Ok(begin) => PatKind::Lit(begin),
+                    Err(err) => return self.fatal_unexpected_non_pat(err, expected),
                 }
             }
-        }
+        };
 
         let pat = self.mk_pat(lo.to(self.prev_span), pat);
         let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
 
         if !allow_range_pat {
-            match pat.node {
-                PatKind::Range(
-                    _, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
-                ) => {},
-                PatKind::Range(..) => {
-                    let mut err = self.struct_span_err(
-                        pat.span,
-                        "the range pattern here has ambiguous interpretation",
-                    );
-                    err.span_suggestion(
-                        pat.span,
-                        "add parentheses to clarify the precedence",
-                        format!("({})", pprust::pat_to_string(&pat)),
-                        // "ambiguous interpretation" implies that we have to be guessing
-                        Applicability::MaybeIncorrect
-                    );
-                    return Err(err);
-                }
-                _ => {}
-            }
+            self.ban_pat_range_if_ambiguous(&pat)?
         }
 
         Ok(pat)
     }
 
+    /// Ban a range pattern if it has an ambiguous interpretation.
+    fn ban_pat_range_if_ambiguous(&self, pat: &Pat) -> PResult<'a, ()> {
+        match pat.node {
+            PatKind::Range(
+                .., Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
+            ) => return Ok(()),
+            PatKind::Range(..) => {}
+            _ => return Ok(()),
+        }
+
+        let mut err = self.struct_span_err(
+            pat.span,
+            "the range pattern here has ambiguous interpretation",
+        );
+        err.span_suggestion(
+            pat.span,
+            "add parentheses to clarify the precedence",
+            format!("({})", pprust::pat_to_string(&pat)),
+            // "ambiguous interpretation" implies that we have to be guessing
+            Applicability::MaybeIncorrect
+        );
+        Err(err)
+    }
+
+    /// Parse `&pat` / `&mut pat`.
+    fn parse_pat_deref(&mut self, expected: Option<&'static str>) -> PResult<'a, PatKind> {
+        self.expect_and()?;
+        let mutbl = self.parse_mutability();
+
+        if let token::Lifetime(name) = self.token.kind {
+            let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name));
+            err.span_label(self.token.span, "unexpected lifetime");
+            return Err(err);
+        }
+
+        let subpat = self.parse_pat_with_range_pat(false, expected)?;
+        Ok(PatKind::Ref(subpat, mutbl))
+    }
+
+    /// Parse a tuple or parenthesis pattern.
+    fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
+        let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
+            p.parse_pat_with_or(None)
+        })?;
+
+        // Here, `(pat,)` is a tuple pattern.
+        // For backward compatibility, `(..)` is a tuple pattern as well.
+        Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
+            PatKind::Paren(fields.into_iter().nth(0).unwrap())
+        } else {
+            PatKind::Tuple(fields)
+        })
+    }
+
+    /// Recover on `mut ref? ident @ pat` and suggest
+    /// that the order of `mut` and `ref` is incorrect.
+    fn recover_pat_ident_mut_first(&mut self) -> PResult<'a, PatKind> {
+        let mutref_span = self.prev_span.to(self.token.span);
+        let binding_mode = if self.eat_keyword(kw::Ref) {
+            self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
+                .span_suggestion(
+                    mutref_span,
+                    "try switching the order",
+                    "ref mut".into(),
+                    Applicability::MachineApplicable
+                )
+                .emit();
+            BindingMode::ByRef(Mutability::Mutable)
+        } else {
+            BindingMode::ByValue(Mutability::Mutable)
+        };
+        self.parse_pat_ident(binding_mode)
+    }
+
+    /// Parse macro invocation
+    fn parse_pat_mac_invoc(&mut self, lo: Span, path: Path) -> PResult<'a, PatKind> {
+        self.bump();
+        let (delim, tts) = self.expect_delimited_token_tree()?;
+        let mac = Mac {
+            path,
+            tts,
+            delim,
+            span: lo.to(self.prev_span),
+            prior_type_ascription: self.last_type_ascription,
+        };
+        Ok(PatKind::Mac(mac))
+    }
+
+    /// Parse a range pattern `$path $form $end?` where `$form = ".." | "..." | "..=" ;`.
+    /// The `$path` has already been parsed and the next token is the `$form`.
+    fn parse_pat_range_starting_with_path(
+        &mut self,
+        lo: Span,
+        qself: Option<QSelf>,
+        path: Path
+    ) -> PResult<'a, PatKind> {
+        let (end_kind, form) = match self.token.kind {
+            token::DotDot => (RangeEnd::Excluded, ".."),
+            token::DotDotDot => (RangeEnd::Included(RangeSyntax::DotDotDot), "..."),
+            token::DotDotEq => (RangeEnd::Included(RangeSyntax::DotDotEq), "..="),
+            _ => panic!("can only parse `..`/`...`/`..=` for ranges (checked above)"),
+        };
+        let op_span = self.token.span;
+        // Parse range
+        let span = lo.to(self.prev_span);
+        let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
+        self.bump();
+        let end = self.parse_pat_range_end_opt(&begin, form)?;
+        Ok(PatKind::Range(begin, end, respan(op_span, end_kind)))
+    }
+
+    /// Parse a range pattern `$literal $form $end?` where `$form = ".." | "..." | "..=" ;`.
+    /// The `$path` has already been parsed and the next token is the `$form`.
+    fn parse_pat_range_starting_with_lit(&mut self, begin: P<Expr>) -> PResult<'a, PatKind> {
+        let op_span = self.token.span;
+        let (end_kind, form) = if self.eat(&token::DotDotDot) {
+            (RangeEnd::Included(RangeSyntax::DotDotDot), "...")
+        } else if self.eat(&token::DotDotEq) {
+            (RangeEnd::Included(RangeSyntax::DotDotEq), "..=")
+        } else if self.eat(&token::DotDot) {
+            (RangeEnd::Excluded, "..")
+        } else {
+            panic!("impossible case: we already matched on a range-operator token")
+        };
+        let end = self.parse_pat_range_end_opt(&begin, form)?;
+        Ok(PatKind::Range(begin, end, respan(op_span, end_kind)))
+    }
+
+    fn fatal_unexpected_non_pat(
+        &mut self,
+        mut err: DiagnosticBuilder<'a>,
+        expected: Option<&'static str>,
+    ) -> PResult<'a, P<Pat>> {
+        self.cancel(&mut err);
+
+        let expected = expected.unwrap_or("pattern");
+        let msg = format!("expected {}, found {}", expected, self.this_token_descr());
+
+        let mut err = self.fatal(&msg);
+        err.span_label(self.token.span, format!("expected {}", expected));
+
+        let sp = self.sess.source_map().start_point(self.token.span);
+        if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
+            self.sess.expr_parentheses_needed(&mut err, *sp, None);
+        }
+
+        Err(err)
+    }
+
     // Helper function to decide whether to parse as ident binding
     // or to try to do something more complex like range patterns.
     fn parse_as_ident(&mut self) -> bool {
@@ -421,11 +464,9 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `ident` or `ident @ pat`.
-    /// used by the copy foo and ref foo patterns to give a good
+    /// Used by the copy foo and ref foo patterns to give a good
     /// error message when parsing mistakes like `ref foo(a, b)`.
-    fn parse_pat_ident(&mut self,
-                       binding_mode: ast::BindingMode)
-                       -> PResult<'a, PatKind> {
+    fn parse_pat_ident(&mut self, binding_mode: BindingMode) -> PResult<'a, PatKind> {
         let ident = self.parse_ident()?;
         let sub = if self.eat(&token::At) {
             Some(self.parse_pat(Some("binding pattern"))?)
@@ -433,23 +474,54 @@ impl<'a> Parser<'a> {
             None
         };
 
-        // just to be friendly, if they write something like
-        //   ref Some(i)
-        // we end up here with ( as the current token.  This shortly
-        // leads to a parse error.  Note that if there is no explicit
+        // Just to be friendly, if they write something like `ref Some(i)`,
+        // we end up here with `(` as the current token.
+        // This shortly leads to a parse error. Note that if there is no explicit
         // binding mode then we do not end up here, because the lookahead
-        // will direct us over to parse_enum_variant()
+        // will direct us over to `parse_enum_variant()`.
         if self.token == token::OpenDelim(token::Paren) {
             return Err(self.span_fatal(
                 self.prev_span,
-                "expected identifier, found enum pattern"))
+                "expected identifier, found enum pattern",
+            ))
         }
 
         Ok(PatKind::Ident(binding_mode, ident, sub))
     }
 
+    /// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`).
+    fn parse_pat_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+        if qself.is_some() {
+            let msg = "unexpected `{` after qualified path";
+            let mut err = self.fatal(msg);
+            err.span_label(self.token.span, msg);
+            return Err(err);
+        }
+
+        self.bump();
+        let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
+            e.emit();
+            self.recover_stmt();
+            (vec![], true)
+        });
+        self.bump();
+        Ok(PatKind::Struct(path, fields, etc))
+    }
+
+    /// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
+    fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+        if qself.is_some() {
+            let msg = "unexpected `(` after qualified path";
+            let mut err = self.fatal(msg);
+            err.span_label(self.token.span, msg);
+            return Err(err);
+        }
+        let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat_with_or(None))?;
+        Ok(PatKind::TupleStruct(path, fields))
+    }
+
     /// Parses the fields of a struct-like pattern.
-    fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<Spanned<FieldPat>>, bool)> {
+    fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<FieldPat>, bool)> {
         let mut fields = Vec::new();
         let mut etc = false;
         let mut ate_comma = true;
@@ -482,17 +554,7 @@ impl<'a> Parser<'a> {
                 etc = true;
                 let mut etc_sp = self.token.span;
 
-                if self.token == token::DotDotDot { // Issue #46718
-                    // Accept `...` as if it were `..` to avoid further errors
-                    self.struct_span_err(self.token.span, "expected field pattern, found `...`")
-                        .span_suggestion(
-                            self.token.span,
-                            "to omit remaining fields, use one fewer `.`",
-                            "..".to_owned(),
-                            Applicability::MachineApplicable
-                        )
-                        .emit();
-                }
+                self.recover_one_fewer_dotdot();
                 self.bump();  // `..` || `...`
 
                 if self.token == token::CloseDelim(token::Brace) {
@@ -574,18 +636,31 @@ impl<'a> Parser<'a> {
         return Ok((fields, etc));
     }
 
-    fn parse_pat_field(
-        &mut self,
-        lo: Span,
-        attrs: Vec<Attribute>
-    ) -> PResult<'a, Spanned<FieldPat>> {
+    /// Recover on `...` as if it were `..` to avoid further errors.
+    /// See issue #46718.
+    fn recover_one_fewer_dotdot(&self) {
+        if self.token != token::DotDotDot {
+            return;
+        }
+
+        self.struct_span_err(self.token.span, "expected field pattern, found `...`")
+            .span_suggestion(
+                self.token.span,
+                "to omit remaining fields, use one fewer `.`",
+                "..".to_owned(),
+                Applicability::MachineApplicable
+            )
+            .emit();
+    }
+
+    fn parse_pat_field(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, FieldPat> {
         // Check if a colon exists one ahead. This means we're parsing a fieldname.
         let hi;
         let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
             // Parsing a pattern of the form "fieldname: pat"
             let fieldname = self.parse_field_name()?;
             self.bump();
-            let pat = self.parse_pat(None)?;
+            let pat = self.parse_pat_with_or(None)?;
             hi = pat.span;
             (pat, fieldname, false)
         } else {
@@ -613,14 +688,13 @@ impl<'a> Parser<'a> {
             (subpat, fieldname, true)
         };
 
-        Ok(Spanned {
+        Ok(FieldPat {
+            ident: fieldname,
+            pat: subpat,
+            is_shorthand,
+            attrs: attrs.into(),
+            id: ast::DUMMY_NODE_ID,
             span: lo.to(hi),
-            node: FieldPat {
-                ident: fieldname,
-                pat: subpat,
-                is_shorthand,
-                attrs: attrs.into(),
-           }
         })
     }
 
diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs
index f182edcbff4..c911caba4cd 100644
--- a/src/libsyntax/parse/parser/stmt.rs
+++ b/src/libsyntax/parse/parser/stmt.rs
@@ -5,7 +5,7 @@ use super::path::PathStyle;
 use crate::ptr::P;
 use crate::{maybe_whole, ThinVec};
 use crate::ast::{self, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
-use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac_, MacDelimiter};
+use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
 use crate::ext::base::DummyResult;
 use crate::parse::{classify, DirectoryOwnership};
 use crate::parse::diagnostics::Error;
@@ -99,12 +99,13 @@ impl<'a> Parser<'a> {
                 MacStmtStyle::NoBraces
             };
 
-            let mac = respan(lo.to(hi), Mac_ {
+            let mac = Mac {
                 path,
                 tts,
                 delim,
+                span: lo.to(hi),
                 prior_type_ascription: self.last_type_ascription,
-            });
+            };
             let node = if delim == MacDelimiter::Brace ||
                           self.token == token::Semi || self.token == token::Eof {
                 StmtKind::Mac(P((mac, style, attrs.into())))
@@ -167,7 +168,22 @@ impl<'a> Parser<'a> {
                     if self.token == token::Semi {
                         unused_attrs(&attrs, self);
                         self.bump();
-                        return Ok(None);
+                        let mut last_semi = lo;
+                        while self.token == token::Semi {
+                            last_semi = self.token.span;
+                            self.bump();
+                        }
+                        // We are encoding a string of semicolons as an
+                        // an empty tuple that spans the excess semicolons
+                        // to preserve this info until the lint stage
+                        return Ok(Some(Stmt {
+                            id: ast::DUMMY_NODE_ID,
+                            span: lo.to(last_semi),
+                            node: StmtKind::Semi(self.mk_expr(lo.to(last_semi),
+                                ExprKind::Tup(Vec::new()),
+                                ThinVec::new()
+                            )),
+                        }));
                     }
 
                     if self.token == token::CloseDelim(token::Brace) {
diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs
index 1eb3d441e69..337702b8d30 100644
--- a/src/libsyntax/parse/parser/ty.rs
+++ b/src/libsyntax/parse/parser/ty.rs
@@ -4,9 +4,9 @@ use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath};
 use crate::ptr::P;
 use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
 use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
-use crate::ast::{Mutability, AnonConst, FnDecl, Mac_};
+use crate::ast::{Mutability, AnonConst, FnDecl, Mac};
 use crate::parse::token::{self, Token};
-use crate::source_map::{respan, Span};
+use crate::source_map::Span;
 use crate::symbol::{kw};
 
 use rustc_target::spec::abi::Abi;
@@ -175,13 +175,14 @@ impl<'a> Parser<'a> {
             if self.eat(&token::Not) {
                 // Macro invocation in type position
                 let (delim, tts) = self.expect_delimited_token_tree()?;
-                let node = Mac_ {
+                let mac = Mac {
                     path,
                     tts,
                     delim,
+                    span: lo.to(self.prev_span),
                     prior_type_ascription: self.last_type_ascription,
                 };
-                TyKind::Mac(respan(lo.to(self.prev_span), node))
+                TyKind::Mac(mac)
             } else {
                 // Just a type path or bound list (trait object type) starting with a trait.
                 //   `Type`
diff --git a/src/libsyntax/parse/tests.rs b/src/libsyntax/parse/tests.rs
index e619fd17fb5..6a789ef99d6 100644
--- a/src/libsyntax/parse/tests.rs
+++ b/src/libsyntax/parse/tests.rs
@@ -12,7 +12,7 @@ use crate::symbol::{kw, sym};
 use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
 use crate::tokenstream::{DelimSpan, TokenTree, TokenStream};
 use crate::with_default_globals;
-use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
+use syntax_pos::{Span, BytePos, Pos};
 
 use std::path::PathBuf;
 
@@ -27,7 +27,7 @@ fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
 
 // produce a syntax_pos::span
 fn sp(a: u32, b: u32) -> Span {
-    Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
+    Span::with_root_ctxt(BytePos(a), BytePos(b))
 }
 
 /// Parse a string, return an expr
@@ -172,8 +172,8 @@ fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
     impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor {
         fn visit_pat(&mut self, p: &'a ast::Pat) {
             match p.node {
-                PatKind::Ident(_ , ref spannedident, _) => {
-                    self.spans.push(spannedident.span.clone());
+                PatKind::Ident(_ , ref ident, _) => {
+                    self.spans.push(ident.span.clone());
                 }
                 _ => {
                     crate::visit::walk_pat(self, p);
@@ -273,7 +273,7 @@ fn ttdelim_span() {
             "foo!( fn main() { body } )".to_string(), &sess).unwrap();
 
         let tts: Vec<_> = match expr.node {
-            ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
+            ast::ExprKind::Mac(ref mac) => mac.stream().trees().collect(),
             _ => panic!("not a macro"),
         };
 
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index be800b4de66..1865f925165 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -551,7 +551,7 @@ impl Token {
         }
     }
 
-    crate fn glue(self, joint: Token) -> Option<Token> {
+    crate fn glue(&self, joint: &Token) -> Option<Token> {
         let kind = match self.kind {
             Eq => match joint.kind {
                 Eq => EqEq,