about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs58
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs48
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs2
-rw-r--r--compiler/rustc_parse/src/lib.rs49
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs10
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs16
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs118
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs138
-rw-r--r--compiler/rustc_parse/src/parser/item.rs105
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs28
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs21
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs140
-rw-r--r--compiler/rustc_parse/src/parser/path.rs84
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs25
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs36
15 files changed, 613 insertions, 265 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 1c2f9a9645f..1e65cc27154 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -1,12 +1,14 @@
-use rustc_ast::ast::AttrStyle;
+use rustc_ast::ast::{self, AttrStyle};
 use rustc_ast::token::{self, CommentKind, Token, TokenKind};
 use rustc_ast::tokenstream::{Spacing, TokenStream};
 use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult};
 use rustc_lexer::unescape::{self, Mode};
 use rustc_lexer::{Base, DocStyle, RawStrError};
+use rustc_session::lint::builtin::RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX;
+use rustc_session::lint::BuiltinLintDiagnostics;
 use rustc_session::parse::ParseSess;
 use rustc_span::symbol::{sym, Symbol};
-use rustc_span::{BytePos, Pos, Span};
+use rustc_span::{edition::Edition, BytePos, Pos, Span};
 
 use tracing::debug;
 
@@ -166,12 +168,18 @@ impl<'a> StringReader<'a> {
                 self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
             }
             rustc_lexer::TokenKind::Whitespace => return None,
-            rustc_lexer::TokenKind::Ident | rustc_lexer::TokenKind::RawIdent => {
+            rustc_lexer::TokenKind::Ident
+            | rustc_lexer::TokenKind::RawIdent
+            | rustc_lexer::TokenKind::UnknownPrefix => {
                 let is_raw_ident = token == rustc_lexer::TokenKind::RawIdent;
+                let is_unknown_prefix = token == rustc_lexer::TokenKind::UnknownPrefix;
                 let mut ident_start = start;
                 if is_raw_ident {
                     ident_start = ident_start + BytePos(2);
                 }
+                if is_unknown_prefix {
+                    self.report_unknown_prefix(start);
+                }
                 let sym = nfc_normalize(self.str_from(ident_start));
                 let span = self.mk_sp(start, self.pos);
                 self.sess.symbol_gallery.insert(sym, span);
@@ -491,6 +499,50 @@ impl<'a> StringReader<'a> {
         FatalError.raise()
     }
 
+    // RFC 3101 introduced the idea of (reserved) prefixes. As of Rust 2021,
+    // using a (unknown) prefix is an error. In earlier editions, however, they
+    // only result in a (allowed by default) lint, and are treated as regular
+    // identifier tokens.
+    fn report_unknown_prefix(&self, start: BytePos) {
+        let prefix_span = self.mk_sp(start, self.pos);
+        let prefix_str = self.str_from_to(start, self.pos);
+        let msg = format!("prefix `{}` is unknown", prefix_str);
+
+        let expn_data = prefix_span.ctxt().outer_expn_data();
+
+        if expn_data.edition >= Edition::Edition2021 {
+            // In Rust 2021, this is a hard error.
+            let mut err = self.sess.span_diagnostic.struct_span_err(prefix_span, &msg);
+            err.span_label(prefix_span, "unknown prefix");
+            if prefix_str == "rb" {
+                err.span_suggestion_verbose(
+                    prefix_span,
+                    "use `br` for a raw byte string",
+                    "br".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            } else if expn_data.is_root() {
+                err.span_suggestion_verbose(
+                    prefix_span.shrink_to_hi(),
+                    "consider inserting whitespace here",
+                    " ".into(),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            err.note("prefixed identifiers and literals are reserved since Rust 2021");
+            err.emit();
+        } else {
+            // Before Rust 2021, only emit a lint for migration.
+            self.sess.buffer_lint_with_diagnostic(
+                &RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
+                prefix_span,
+                ast::CRATE_NODE_ID,
+                &msg,
+                BuiltinLintDiagnostics::ReservedPrefix(prefix_span),
+            );
+        }
+    }
+
     /// Note: It was decided to not add a test case, because it would be too big.
     /// <https://github.com/rust-lang/rust/pull/50296#issuecomment-392135180>
     fn report_too_many_hashes(&self, start: BytePos, found: usize) -> ! {
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index a580f0c55d0..aa6b424ce2b 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -153,16 +153,37 @@ pub(crate) fn emit_unescape_error(
         EscapeError::NonAsciiCharInByte => {
             assert!(mode.is_bytes());
             let (c, span) = last_char();
-            handler
-                .struct_span_err(span, "non-ASCII character in byte constant")
-                .span_label(span, "byte constant must be ASCII")
-                .span_suggestion(
+            let mut err = handler.struct_span_err(span, "non-ASCII character in byte constant");
+            err.span_label(span, "byte constant must be ASCII");
+            if (c as u32) <= 0xFF {
+                err.span_suggestion(
                     span,
-                    "use a \\xHH escape for a non-ASCII byte",
+                    &format!(
+                        "if you meant to use the unicode code point for '{}', use a \\xHH escape",
+                        c
+                    ),
                     format!("\\x{:X}", c as u32),
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+                    Applicability::MaybeIncorrect,
+                );
+            } else if matches!(mode, Mode::Byte) {
+                err.span_label(span, "this multibyte character does not fit into a single byte");
+            } else if matches!(mode, Mode::ByteStr) {
+                let mut utf8 = String::new();
+                utf8.push(c);
+                err.span_suggestion(
+                    span,
+                    &format!(
+                        "if you meant to use the UTF-8 encoding of '{}', use \\xHH escapes",
+                        c
+                    ),
+                    utf8.as_bytes()
+                        .iter()
+                        .map(|b: &u8| format!("\\x{:X}", *b))
+                        .fold("".to_string(), |a, c| a + &c),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            err.emit();
         }
         EscapeError::NonAsciiCharInByteString => {
             assert!(mode.is_bytes());
@@ -253,6 +274,17 @@ pub(crate) fn emit_unescape_error(
             let msg = "invalid trailing slash in literal";
             handler.struct_span_err(span, msg).span_label(span, msg).emit();
         }
+        EscapeError::UnskippedWhitespaceWarning => {
+            let (c, char_span) = last_char();
+            let msg =
+                format!("non-ASCII whitespace symbol '{}' is not skipped", c.escape_unicode());
+            handler.struct_span_warn(span, &msg).span_label(char_span, &msg).emit();
+        }
+        EscapeError::MultipleSkippedLinesWarning => {
+            let msg = "multiple lines skipped by escaped newline";
+            let bottom_msg = "skipping everything up to and including this point";
+            handler.struct_span_warn(span, msg).span_label(span, bottom_msg).emit();
+        }
     }
 }
 
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 40e2e34aa05..3eebc088f3f 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -1,5 +1,5 @@
 // Characters and their corresponding confusables were collected from
-// http://www.unicode.org/Public/security/10.0.0/confusables.txt
+// https://www.unicode.org/Public/security/10.0.0/confusables.txt
 
 use super::StringReader;
 use crate::token;
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 077b19fa959..07f972c2fa8 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -2,10 +2,7 @@
 
 #![feature(array_windows)]
 #![feature(crate_visibility_modifier)]
-#![feature(bindings_after_at)]
-#![feature(iter_order_by)]
-#![cfg_attr(bootstrap, feature(or_patterns))]
-#![feature(box_syntax)]
+#![cfg_attr(bootstrap, feature(bindings_after_at))]
 #![feature(box_patterns)]
 #![recursion_limit = "256"]
 
@@ -16,9 +13,10 @@ use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
 use rustc_ast::tokenstream::{Spacing, TokenStream};
 use rustc_ast::AstLike;
 use rustc_ast::Attribute;
+use rustc_ast::{AttrItem, MetaItem};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
-use rustc_errors::{Diagnostic, FatalError, Level, PResult};
+use rustc_errors::{Applicability, Diagnostic, FatalError, Level, PResult};
 use rustc_session::parse::ParseSess;
 use rustc_span::{FileName, SourceFile, Span};
 
@@ -327,3 +325,44 @@ pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream {
     let filename = FileName::macro_expansion_source_code(&source);
     parse_stream_from_source_str(filename, source, sess, Some(nt.span()))
 }
+
+pub fn parse_cfg_attr(
+    attr: &Attribute,
+    parse_sess: &ParseSess,
+) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> {
+    match attr.get_normal_item().args {
+        ast::MacArgs::Delimited(dspan, delim, ref tts) if !tts.is_empty() => {
+            let msg = "wrong `cfg_attr` delimiters";
+            crate::validate_attr::check_meta_bad_delim(parse_sess, dspan, delim, msg);
+            match parse_in(parse_sess, tts.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
+                Ok(r) => return Some(r),
+                Err(mut e) => {
+                    e.help(&format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP))
+                        .note(CFG_ATTR_NOTE_REF)
+                        .emit();
+                }
+            }
+        }
+        _ => error_malformed_cfg_attr_missing(attr.span, parse_sess),
+    }
+    None
+}
+
+const CFG_ATTR_GRAMMAR_HELP: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]";
+const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
+    <https://doc.rust-lang.org/reference/conditional-compilation.html\
+    #the-cfg_attr-attribute>";
+
+fn error_malformed_cfg_attr_missing(span: Span, parse_sess: &ParseSess) {
+    parse_sess
+        .span_diagnostic
+        .struct_span_err(span, "malformed `cfg_attr` attribute input")
+        .span_suggestion(
+            span,
+            "missing condition and attribute",
+            CFG_ATTR_GRAMMAR_HELP.to_string(),
+            Applicability::HasPlaceholders,
+        )
+        .note(CFG_ATTR_NOTE_REF)
+        .emit();
+}
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index ee6ff4dba39..e9f0038b2d6 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -32,7 +32,6 @@ impl<'a> Parser<'a> {
         let mut just_parsed_doc_comment = false;
         let start_pos = self.token_cursor.num_next_calls;
         loop {
-            debug!("parse_outer_attributes: self.token={:?}", self.token);
             let attr = if self.check(&token::Pound) {
                 let inner_error_reason = if just_parsed_doc_comment {
                     "an inner attribute is not permitted following an outer doc comment"
@@ -65,7 +64,14 @@ impl<'a> Parser<'a> {
                 }
                 self.bump();
                 just_parsed_doc_comment = true;
-                Some(attr::mk_doc_comment(comment_kind, attr_style, data, self.prev_token.span))
+                // Always make an outer attribute - this allows us to recover from a misplaced
+                // inner attribute.
+                Some(attr::mk_doc_comment(
+                    comment_kind,
+                    ast::AttrStyle::Outer,
+                    data,
+                    self.prev_token.span,
+                ))
             } else {
                 None
             };
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 35759a396e8..9f06bdcc135 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -301,7 +301,7 @@ impl<'a> Parser<'a> {
         // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
         // then extend the range of captured tokens to include it, since the parser
         // was not actually bumped past it. When the `LazyTokenStream` gets converted
-        // into a `AttrAnnotatedTokenStream`, we will create the proper token.
+        // into an `AttrAnnotatedTokenStream`, we will create the proper token.
         if self.token_cursor.break_last_token {
             assert_eq!(
                 trailing,
@@ -320,7 +320,7 @@ impl<'a> Parser<'a> {
         } else {
             // Grab any replace ranges that occur *inside* the current AST node.
             // We will perform the actual replacement when we convert the `LazyTokenStream`
-            // to a `AttrAnnotatedTokenStream`
+            // to an `AttrAnnotatedTokenStream`
             let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
             self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
                 .iter()
@@ -342,16 +342,10 @@ impl<'a> Parser<'a> {
 
         // If we support tokens at all
         if let Some(target_tokens) = ret.tokens_mut() {
-            if let Some(target_tokens) = target_tokens {
-                assert!(
-                    !self.capture_cfg,
-                    "Encountered existing tokens with capture_cfg set: {:?}",
-                    target_tokens
-                );
-            } else {
+            if target_tokens.is_none() {
                 // Store se our newly captured tokens into the AST node
                 *target_tokens = Some(tokens.clone());
-            };
+            }
         }
 
         let final_attrs = ret.attrs();
@@ -492,7 +486,7 @@ fn make_token_stream(
         if let AttrAnnotatedTokenTree::Token(last_token) = last_token {
             let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
 
-            // A 'unglued' token is always two ASCII characters
+            // An 'unglued' token is always two ASCII characters
             let mut first_span = last_token.span.shrink_to_lo();
             first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
 
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 72fdc78c30c..273fbea3580 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -242,6 +242,63 @@ impl<'a> Parser<'a> {
         expected.sort_by_cached_key(|x| x.to_string());
         expected.dedup();
 
+        let sm = self.sess.source_map();
+        let msg = format!("expected `;`, found {}", super::token_descr(&self.token));
+        let appl = Applicability::MachineApplicable;
+        if expected.contains(&TokenType::Token(token::Semi)) {
+            if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
+                // Likely inside a macro, can't provide meaningful suggestions.
+            } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
+                // The current token is in the same line as the prior token, not recoverable.
+            } else if [token::Comma, token::Colon].contains(&self.token.kind)
+                && self.prev_token.kind == token::CloseDelim(token::Paren)
+            {
+                // Likely typo: The current token is on a new line and is expected to be
+                // `.`, `;`, `?`, or an operator after a close delimiter token.
+                //
+                // let a = std::process::Command::new("echo")
+                //         .arg("1")
+                //         ,arg("2")
+                //         ^
+                // https://github.com/rust-lang/rust/issues/72253
+            } else if self.look_ahead(1, |t| {
+                t == &token::CloseDelim(token::Brace)
+                    || t.can_begin_expr() && t.kind != token::Colon
+            }) && [token::Comma, token::Colon].contains(&self.token.kind)
+            {
+                // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
+                // either `,` or `:`, and the next token could either start a new statement or is a
+                // block close. For example:
+                //
+                //   let x = 32:
+                //   let y = 42;
+                self.bump();
+                let sp = self.prev_token.span;
+                self.struct_span_err(sp, &msg)
+                    .span_suggestion_short(sp, "change this to `;`", ";".to_string(), appl)
+                    .emit();
+                return Ok(false);
+            } else if self.look_ahead(0, |t| {
+                t == &token::CloseDelim(token::Brace)
+                    || (
+                        t.can_begin_expr() && t != &token::Semi && t != &token::Pound
+                        // Avoid triggering with too many trailing `#` in raw string.
+                    )
+            }) {
+                // Missing semicolon typo. This is triggered if the next token could either start a
+                // new statement or is a block close. For example:
+                //
+                //   let x = 32
+                //   let y = 42;
+                let sp = self.prev_token.span.shrink_to_hi();
+                self.struct_span_err(sp, &msg)
+                    .span_label(self.token.span, "unexpected token")
+                    .span_suggestion_short(sp, "add `;` here", ";".to_string(), appl)
+                    .emit();
+                return Ok(false);
+            }
+        }
+
         let expect = tokens_to_string(&expected[..]);
         let actual = super::token_descr(&self.token);
         let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
@@ -303,7 +360,6 @@ impl<'a> Parser<'a> {
             return Err(err);
         }
 
-        let sm = self.sess.source_map();
         if self.prev_token.span == DUMMY_SP {
             // Account for macro context where the previous span might not be
             // available to avoid incorrect output (#54841).
@@ -366,7 +422,7 @@ impl<'a> Parser<'a> {
             let mut snapshot = self.clone();
             let path =
                 Path { segments: vec![], span: self.prev_token.span.shrink_to_lo(), tokens: None };
-            let struct_expr = snapshot.parse_struct_expr(path, AttrVec::new(), false);
+            let struct_expr = snapshot.parse_struct_expr(None, path, AttrVec::new(), false);
             let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
             return Some(match (struct_expr, block_tail) {
                 (Ok(expr), Err(mut err)) => {
@@ -1144,62 +1200,6 @@ impl<'a> Parser<'a> {
         if self.eat(&token::Semi) {
             return Ok(());
         }
-        let sm = self.sess.source_map();
-        let msg = format!("expected `;`, found {}", super::token_descr(&self.token));
-        let appl = Applicability::MachineApplicable;
-        if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
-            // Likely inside a macro, can't provide meaningful suggestions.
-            return self.expect(&token::Semi).map(drop);
-        } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
-            // The current token is in the same line as the prior token, not recoverable.
-        } else if [token::Comma, token::Colon].contains(&self.token.kind)
-            && self.prev_token.kind == token::CloseDelim(token::Paren)
-        {
-            // Likely typo: The current token is on a new line and is expected to be
-            // `.`, `;`, `?`, or an operator after a close delimiter token.
-            //
-            // let a = std::process::Command::new("echo")
-            //         .arg("1")
-            //         ,arg("2")
-            //         ^
-            // https://github.com/rust-lang/rust/issues/72253
-            self.expect(&token::Semi)?;
-            return Ok(());
-        } else if self.look_ahead(1, |t| {
-            t == &token::CloseDelim(token::Brace) || t.can_begin_expr() && t.kind != token::Colon
-        }) && [token::Comma, token::Colon].contains(&self.token.kind)
-        {
-            // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
-            // either `,` or `:`, and the next token could either start a new statement or is a
-            // block close. For example:
-            //
-            //   let x = 32:
-            //   let y = 42;
-            self.bump();
-            let sp = self.prev_token.span;
-            self.struct_span_err(sp, &msg)
-                .span_suggestion_short(sp, "change this to `;`", ";".to_string(), appl)
-                .emit();
-            return Ok(());
-        } else if self.look_ahead(0, |t| {
-            t == &token::CloseDelim(token::Brace)
-                || (
-                    t.can_begin_expr() && t != &token::Semi && t != &token::Pound
-                    // Avoid triggering with too many trailing `#` in raw string.
-                )
-        }) {
-            // Missing semicolon typo. This is triggered if the next token could either start a
-            // new statement or is a block close. For example:
-            //
-            //   let x = 32
-            //   let y = 42;
-            let sp = self.prev_token.span.shrink_to_hi();
-            self.struct_span_err(sp, &msg)
-                .span_label(self.token.span, "unexpected token")
-                .span_suggestion_short(sp, "add `;` here", ";".to_string(), appl)
-                .emit();
-            return Ok(());
-        }
         self.expect(&token::Semi).map(drop) // Error unconditionally
     }
 
@@ -1770,7 +1770,7 @@ impl<'a> Parser<'a> {
         let mut err = self.struct_span_err(span, &msg);
         let sp = self.sess.source_map().start_point(self.token.span);
         if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            self.sess.expr_parentheses_needed(&mut err, *sp, None);
+            self.sess.expr_parentheses_needed(&mut err, *sp);
         }
         err.span_label(span, "expected expression");
         err
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 56c97b59476..326c8f81ffb 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,4 +1,4 @@
-use super::pat::{RecoverComma, PARAM_EXPECTED};
+use super::pat::{RecoverColon, RecoverComma, PARAM_EXPECTED};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{AttrWrapper, BlockMode, ForceCollect, Parser, PathStyle, Restrictions, TokenType};
 use super::{SemiColonMode, SeqSep, TokenExpectType, TrailingToken};
@@ -15,6 +15,8 @@ use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty
 use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
 use rustc_ast_pretty::pprust;
 use rustc_errors::{Applicability, DiagnosticBuilder, PResult};
+use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
+use rustc_session::lint::BuiltinLintDiagnostics;
 use rustc_span::edition::LATEST_STABLE_EDITION;
 use rustc_span::source_map::{self, Span, Spanned};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
@@ -94,17 +96,7 @@ impl<'a> Parser<'a> {
 
     /// Parses an expression, forcing tokens to be collected
     pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
-        // If we have outer attributes, then the call to `collect_tokens_trailing_token`
-        // will be made for us.
-        if matches!(self.token.kind, TokenKind::Pound | TokenKind::DocComment(..)) {
-            self.parse_expr()
-        } else {
-            // If we don't have outer attributes, then we need to ensure
-            // that collection happens by using `collect_tokens_no_attrs`.
-            // Expression don't support custom inner attributes, so `parse_expr`
-            // will never try to collect tokens if we don't have outer attributes.
-            self.collect_tokens_no_attrs(|this| this.parse_expr())
-        }
+        self.collect_tokens_no_attrs(|this| this.parse_expr())
     }
 
     pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> {
@@ -368,7 +360,7 @@ impl<'a> Parser<'a> {
             &format!("expected expression, found `{}`", pprust::token_to_string(&self.token),),
         );
         err.span_label(self.token.span, "expected expression");
-        self.sess.expr_parentheses_needed(&mut err, lhs.span, Some(pprust::expr_to_string(&lhs)));
+        self.sess.expr_parentheses_needed(&mut err, lhs.span);
         err.emit();
     }
 
@@ -441,7 +433,8 @@ impl<'a> Parser<'a> {
         let span = self.mk_expr_sp(&lhs, lhs.span, rhs_span);
         let limits =
             if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed };
-        Ok(self.mk_expr(span, self.mk_range(Some(lhs), rhs, limits), AttrVec::new()))
+        let range = self.mk_range(Some(lhs), rhs, limits);
+        Ok(self.mk_expr(span, range, AttrVec::new()))
     }
 
     fn is_at_start_of_range_notation_rhs(&self) -> bool {
@@ -489,7 +482,8 @@ impl<'a> Parser<'a> {
             } else {
                 (lo, None)
             };
-            Ok(this.mk_expr(span, this.mk_range(None, opt_end, limits), attrs.into()))
+            let range = this.mk_range(None, opt_end, limits);
+            Ok(this.mk_expr(span, range, attrs.into()))
         })
     }
 
@@ -704,20 +698,18 @@ impl<'a> Parser<'a> {
                         let expr =
                             mk_expr(self, lhs, self.mk_ty(path.span, TyKind::Path(None, path)));
 
-                        let expr_str = self
-                            .span_to_snippet(expr.span)
-                            .unwrap_or_else(|_| pprust::expr_to_string(&expr));
-
                         self.struct_span_err(self.token.span, &msg)
                             .span_label(
                                 self.look_ahead(1, |t| t.span).to(span_after_type),
                                 "interpreted as generic arguments",
                             )
                             .span_label(self.token.span, format!("not interpreted as {}", op_noun))
-                            .span_suggestion(
-                                expr.span,
+                            .multipart_suggestion(
                                 &format!("try {} the cast value", op_verb),
-                                format!("({})", expr_str),
+                                vec![
+                                    (expr.span.shrink_to_lo(), "(".to_string()),
+                                    (expr.span.shrink_to_hi(), ")".to_string()),
+                                ],
                                 Applicability::MachineApplicable,
                             )
                             .emit();
@@ -1100,7 +1092,7 @@ impl<'a> Parser<'a> {
         // added to the return value after the fact.
         //
         // Therefore, prevent sub-parser from parsing
-        // attributes by giving them a empty "already-parsed" list.
+        // attributes by giving them an empty "already-parsed" list.
         let attrs = AttrVec::new();
 
         // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
@@ -1118,9 +1110,6 @@ impl<'a> Parser<'a> {
             self.parse_closure_expr(attrs)
         } else if self.check(&token::OpenDelim(token::Bracket)) {
             self.parse_array_or_repeat_expr(attrs)
-        } else if self.eat_lt() {
-            let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
-            Ok(self.mk_expr(lo.to(path.span), ExprKind::Path(Some(qself), path), attrs))
         } else if self.check_path() {
             self.parse_path_start_expr(attrs)
         } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
@@ -1272,12 +1261,20 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_path_start_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
-        let path = self.parse_path(PathStyle::Expr)?;
+        let (qself, path) = if self.eat_lt() {
+            let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+            (Some(qself), path)
+        } else {
+            (None, self.parse_path(PathStyle::Expr)?)
+        };
         let lo = path.span;
 
         // `!`, as an operator, is prefix, so we know this isn't that.
         let (hi, kind) = if self.eat(&token::Not) {
             // MACRO INVOCATION expression
+            if qself.is_some() {
+                self.struct_span_err(path.span, "macros cannot use qualified paths").emit();
+            }
             let mac = MacCall {
                 path,
                 args: self.parse_mac_args()?,
@@ -1285,13 +1282,16 @@ impl<'a> Parser<'a> {
             };
             (self.prev_token.span, ExprKind::MacCall(mac))
         } else if self.check(&token::OpenDelim(token::Brace)) {
-            if let Some(expr) = self.maybe_parse_struct_expr(&path, &attrs) {
+            if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) {
+                if qself.is_some() {
+                    self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
+                }
                 return expr;
             } else {
-                (path.span, ExprKind::Path(None, path))
+                (path.span, ExprKind::Path(qself, path))
             }
         } else {
-            (path.span, ExprKind::Path(None, path))
+            (path.span, ExprKind::Path(qself, path))
         };
 
         let expr = self.mk_expr(lo.to(hi), kind, attrs);
@@ -1377,14 +1377,59 @@ impl<'a> Parser<'a> {
         self.maybe_recover_from_bad_qpath(expr, true)
     }
 
-    /// Parse `"('label ":")? break expr?`.
+    /// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten.
+    /// If the label is followed immediately by a `:` token, the label and `:` are
+    /// parsed as part of the expression (i.e. a labeled loop). The language team has
+    /// decided in #87026 to require parentheses as a visual aid to avoid confusion if
+    /// the break expression of an unlabeled break is a labeled loop (as in
+    /// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value
+    /// expression only gets a warning for compatibility reasons; and a labeled break
+    /// with a labeled loop does not even get a warning because there is no ambiguity.
     fn parse_break_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
-        let label = self.eat_label();
-        let kind = if self.token != token::OpenDelim(token::Brace)
+        let mut label = self.eat_label();
+        let kind = if label.is_some() && self.token == token::Colon {
+            // The value expression can be a labeled loop, see issue #86948, e.g.:
+            // `loop { break 'label: loop { break 'label 42; }; }`
+            let lexpr = self.parse_labeled_expr(label.take().unwrap(), AttrVec::new(), true)?;
+            self.struct_span_err(
+                lexpr.span,
+                "parentheses are required around this expression to avoid confusion with a labeled break expression",
+            )
+            .multipart_suggestion(
+                "wrap the expression in parentheses",
+                vec![
+                    (lexpr.span.shrink_to_lo(), "(".to_string()),
+                    (lexpr.span.shrink_to_hi(), ")".to_string()),
+                ],
+                Applicability::MachineApplicable,
+            )
+            .emit();
+            Some(lexpr)
+        } else if self.token != token::OpenDelim(token::Brace)
             || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
         {
-            self.parse_expr_opt()?
+            let expr = self.parse_expr_opt()?;
+            if let Some(ref expr) = expr {
+                if label.is_some()
+                    && matches!(
+                        expr.kind,
+                        ExprKind::While(_, _, None)
+                            | ExprKind::ForLoop(_, _, _, None)
+                            | ExprKind::Loop(_, None)
+                            | ExprKind::Block(_, None)
+                    )
+                {
+                    self.sess.buffer_lint_with_diagnostic(
+                        BREAK_WITH_LABEL_AND_LOOP,
+                        lo.to(expr.span),
+                        ast::CRATE_NODE_ID,
+                        "this labeled break expression is easy to confuse with an unlabeled break with a labeled value expression",
+                        BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span),
+                    );
+                }
+            }
+            expr
         } else {
             None
         };
@@ -1815,14 +1860,14 @@ impl<'a> Parser<'a> {
     /// The `let` token has already been eaten.
     fn parse_let_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
-        let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes)?;
+        let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
         self.expect(&token::Eq)?;
         let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
             this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
         })?;
         let span = lo.to(expr.span);
         self.sess.gated_spans.gate(sym::let_chains, span);
-        Ok(self.mk_expr(span, ExprKind::Let(pat, expr), attrs))
+        Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span), attrs))
     }
 
     /// Parses an `else { ... }` expression (`else` token already eaten).
@@ -1878,7 +1923,7 @@ impl<'a> Parser<'a> {
             _ => None,
         };
 
-        let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes)?;
+        let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
         if !self.eat_keyword(kw::In) {
             self.error_missing_in_for_loop();
         }
@@ -1947,7 +1992,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a `match ... { ... }` expression (`match` token already eaten).
-    fn parse_match_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_match_expr(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
         let match_span = self.prev_token.span;
         let lo = self.prev_token.span;
         let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
@@ -1962,6 +2007,7 @@ impl<'a> Parser<'a> {
             }
             return Err(e);
         }
+        attrs.extend(self.parse_inner_attributes()?);
 
         let mut arms: Vec<Arm> = Vec::new();
         while self.token != token::CloseDelim(token::Brace) {
@@ -2084,7 +2130,7 @@ impl<'a> Parser<'a> {
         let attrs = self.parse_outer_attributes()?;
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let lo = this.token.span;
-            let pat = this.parse_pat_allow_top_alt(None, RecoverComma::Yes)?;
+            let pat = this.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
             let guard = if this.eat_keyword(kw::If) {
                 let if_span = this.prev_token.span;
                 let cond = this.parse_expr()?;
@@ -2118,7 +2164,7 @@ impl<'a> Parser<'a> {
                     let span = body.span;
                     return Ok((
                         ast::Arm {
-                            attrs,
+                            attrs: attrs.into(),
                             pat,
                             guard,
                             body,
@@ -2172,7 +2218,7 @@ impl<'a> Parser<'a> {
 
             Ok((
                 ast::Arm {
-                    attrs,
+                    attrs: attrs.into(),
                     pat,
                     guard,
                     body: expr,
@@ -2257,6 +2303,7 @@ impl<'a> Parser<'a> {
 
     fn maybe_parse_struct_expr(
         &mut self,
+        qself: Option<&ast::QSelf>,
         path: &ast::Path,
         attrs: &AttrVec,
     ) -> Option<PResult<'a, P<Expr>>> {
@@ -2265,7 +2312,7 @@ impl<'a> Parser<'a> {
             if let Err(err) = self.expect(&token::OpenDelim(token::Brace)) {
                 return Some(Err(err));
             }
-            let expr = self.parse_struct_expr(path.clone(), attrs.clone(), true);
+            let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true);
             if let (Ok(expr), false) = (&expr, struct_allowed) {
                 // This is a struct literal, but we don't can't accept them here.
                 self.error_struct_lit_not_allowed_here(path.span, expr.span);
@@ -2288,6 +2335,7 @@ impl<'a> Parser<'a> {
     /// Precondition: already parsed the '{'.
     pub(super) fn parse_struct_expr(
         &mut self,
+        qself: Option<ast::QSelf>,
         pth: ast::Path,
         attrs: AttrVec,
         recover: bool,
@@ -2385,7 +2433,7 @@ impl<'a> Parser<'a> {
         let expr = if recover_async {
             ExprKind::Err
         } else {
-            ExprKind::Struct(P(ast::StructExpr { path: pth, fields, rest: base }))
+            ExprKind::Struct(P(ast::StructExpr { qself, path: pth, fields, rest: base }))
         };
         Ok(self.mk_expr(span, expr, attrs))
     }
@@ -2516,13 +2564,13 @@ impl<'a> Parser<'a> {
     }
 
     fn mk_range(
-        &self,
+        &mut self,
         start: Option<P<Expr>>,
         end: Option<P<Expr>>,
         limits: RangeLimits,
     ) -> ExprKind {
         if end.is_none() && limits == RangeLimits::Closed {
-            self.error_inclusive_range_with_no_end(self.prev_token.span);
+            self.inclusive_range_with_incorrect_end(self.prev_token.span);
             ExprKind::Err
         } else {
             ExprKind::Range(start, end, limits)
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index b2b578f1ed4..e5537d43eba 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -221,7 +221,7 @@ impl<'a> Parser<'a> {
         } else if self.check_fn_front_matter(def_final) {
             // FUNCTION ITEM
             let (ident, sig, generics, body) = self.parse_fn(attrs, req_name, lo)?;
-            (ident, ItemKind::Fn(box FnKind(def(), sig, generics, body)))
+            (ident, ItemKind::Fn(Box::new(FnKind(def(), sig, generics, body))))
         } else if self.eat_keyword(kw::Extern) {
             if self.eat_keyword(kw::Crate) {
                 // EXTERN CRATE
@@ -548,7 +548,7 @@ impl<'a> Parser<'a> {
                 };
                 let trait_ref = TraitRef { path, ref_id: ty_first.id };
 
-                ItemKind::Impl(box ImplKind {
+                ItemKind::Impl(Box::new(ImplKind {
                     unsafety,
                     polarity,
                     defaultness,
@@ -557,11 +557,11 @@ impl<'a> Parser<'a> {
                     of_trait: Some(trait_ref),
                     self_ty: ty_second,
                     items: impl_items,
-                })
+                }))
             }
             None => {
                 // impl Type
-                ItemKind::Impl(box ImplKind {
+                ItemKind::Impl(Box::new(ImplKind {
                     unsafety,
                     polarity,
                     defaultness,
@@ -570,7 +570,7 @@ impl<'a> Parser<'a> {
                     of_trait: None,
                     self_ty: ty_first,
                     items: impl_items,
-                })
+                }))
             }
         };
 
@@ -710,7 +710,7 @@ impl<'a> Parser<'a> {
             // It's a normal trait.
             tps.where_clause = self.parse_where_clause()?;
             let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?;
-            Ok((ident, ItemKind::Trait(box TraitKind(is_auto, unsafety, tps, bounds, items))))
+            Ok((ident, ItemKind::Trait(Box::new(TraitKind(is_auto, unsafety, tps, bounds, items)))))
         }
     }
 
@@ -769,7 +769,7 @@ impl<'a> Parser<'a> {
         let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
         self.expect_semi()?;
 
-        Ok((ident, ItemKind::TyAlias(box TyAliasKind(def, generics, bounds, default))))
+        Ok((ident, ItemKind::TyAlias(Box::new(TyAliasKind(def, generics, bounds, default)))))
     }
 
     /// Parses a `UseTree`.
@@ -1107,8 +1107,7 @@ impl<'a> Parser<'a> {
                 e
             })?;
 
-        let enum_definition =
-            EnumDef { variants: variants.into_iter().filter_map(|v| v).collect() };
+        let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
         Ok((id, ItemKind::Enum(enum_definition, generics)))
     }
 
@@ -1143,7 +1142,7 @@ impl<'a> Parser<'a> {
                     ident,
                     vis,
                     id: DUMMY_NODE_ID,
-                    attrs: variant_attrs,
+                    attrs: variant_attrs.into(),
                     data: struct_def,
                     disr_expr,
                     span: vlo.to(this.prev_token.span),
@@ -1236,7 +1235,7 @@ impl<'a> Parser<'a> {
         Ok((class_name, ItemKind::Union(vdata, generics)))
     }
 
-    fn parse_record_struct_body(
+    pub(super) fn parse_record_struct_body(
         &mut self,
         adt_ty: &str,
     ) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
@@ -1286,7 +1285,7 @@ impl<'a> Parser<'a> {
                         ident: None,
                         id: DUMMY_NODE_ID,
                         ty,
-                        attrs,
+                        attrs: attrs.into(),
                         is_placeholder: false,
                     },
                     TrailingToken::MaybeComma,
@@ -1460,7 +1459,7 @@ impl<'a> Parser<'a> {
             vis,
             id: DUMMY_NODE_ID,
             ty,
-            attrs,
+            attrs: attrs.into(),
             is_placeholder: false,
         })
     }
@@ -1470,19 +1469,28 @@ impl<'a> Parser<'a> {
     fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
         let (ident, is_raw) = self.ident_or_err()?;
         if !is_raw && ident.is_reserved() {
-            let err = if self.check_fn_front_matter(false) {
-                let _ = self.parse_fn(&mut Vec::new(), |_| true, lo);
-                let mut err = self.struct_span_err(
-                    lo.to(self.prev_token.span),
-                    &format!("functions are not allowed in {} definitions", adt_ty),
-                );
-                err.help("unlike in C++, Java, and C#, functions are declared in `impl` blocks");
-                err.help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information");
-                err
+            if ident.name == kw::Underscore {
+                self.sess.gated_spans.gate(sym::unnamed_fields, lo);
             } else {
-                self.expected_ident_found()
-            };
-            return Err(err);
+                let err = if self.check_fn_front_matter(false) {
+                    // We use `parse_fn` to get a span for the function
+                    if let Err(mut db) = self.parse_fn(&mut Vec::new(), |_| true, lo) {
+                        db.delay_as_bug();
+                    }
+                    let mut err = self.struct_span_err(
+                        lo.to(self.prev_token.span),
+                        &format!("functions are not allowed in {} definitions", adt_ty),
+                    );
+                    err.help(
+                        "unlike in C++, Java, and C#, functions are declared in `impl` blocks",
+                    );
+                    err.help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information");
+                    err
+                } else {
+                    self.expected_ident_found()
+                };
+                return Err(err);
+            }
         }
         self.bump();
         Ok(ident)
@@ -1706,13 +1714,11 @@ impl<'a> Parser<'a> {
                     // the AST for typechecking.
                     err.span_label(ident.span, "while parsing this `fn`");
                     err.emit();
-                    (Vec::new(), None)
                 } else {
                     return Err(err);
                 }
-            } else {
-                unreachable!()
             }
+            (Vec::new(), None)
         };
         attrs.extend(inner_attrs);
         Ok(body)
@@ -1762,8 +1768,14 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> {
         let sp_start = self.token.span;
         let constness = self.parse_constness();
+
+        let async_start_sp = self.token.span;
         let asyncness = self.parse_asyncness();
+
+        let unsafe_start_sp = self.token.span;
         let unsafety = self.parse_unsafety();
+
+        let ext_start_sp = self.token.span;
         let ext = self.parse_extern();
 
         if let Async::Yes { span, .. } = asyncness {
@@ -1778,11 +1790,44 @@ impl<'a> Parser<'a> {
                 Ok(true) => {}
                 Ok(false) => unreachable!(),
                 Err(mut err) => {
+                    // Qualifier keywords ordering check
+
+                    // This will allow the machine fix to directly place the keyword in the correct place
+                    let current_qual_sp = if self.check_keyword(kw::Const) {
+                        Some(async_start_sp)
+                    } else if self.check_keyword(kw::Async) {
+                        Some(unsafe_start_sp)
+                    } else if self.check_keyword(kw::Unsafe) {
+                        Some(ext_start_sp)
+                    } else {
+                        None
+                    };
+
+                    if let Some(current_qual_sp) = current_qual_sp {
+                        let current_qual_sp = current_qual_sp.to(self.prev_token.span);
+                        if let Ok(current_qual) = self.span_to_snippet(current_qual_sp) {
+                            let invalid_qual_sp = self.token.uninterpolated_span();
+                            let invalid_qual = self.span_to_snippet(invalid_qual_sp).unwrap();
+
+                            err.span_suggestion(
+                                current_qual_sp.to(invalid_qual_sp),
+                                &format!("`{}` must come before `{}`", invalid_qual, current_qual),
+                                format!("{} {}", invalid_qual, current_qual),
+                                Applicability::MachineApplicable,
+                            ).note("keyword order for functions declaration is `default`, `pub`, `const`, `async`, `unsafe`, `extern`");
+                        }
+                    }
                     // Recover incorrect visibility order such as `async pub`.
-                    if self.check_keyword(kw::Pub) {
+                    else if self.check_keyword(kw::Pub) {
                         let sp = sp_start.to(self.prev_token.span);
                         if let Ok(snippet) = self.span_to_snippet(sp) {
-                            let vis = self.parse_visibility(FollowedByType::No)?;
+                            let vis = match self.parse_visibility(FollowedByType::No) {
+                                Ok(v) => v,
+                                Err(mut d) => {
+                                    d.cancel();
+                                    return Err(err);
+                                }
+                            };
                             let vs = pprust::vis_to_string(&vis);
                             let vs = vs.trim_end();
                             err.span_suggestion(
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 35cfaae13a4..4c3c140d171 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -14,7 +14,7 @@ use crate::lexer::UnmatchedBrace;
 pub use attr_wrapper::AttrWrapper;
 pub use diagnostics::AttemptLocalParseRecovery;
 use diagnostics::Error;
-pub use pat::RecoverComma;
+pub use pat::{RecoverColon, RecoverComma};
 pub use path::PathStyle;
 
 use rustc_ast::ptr::P;
@@ -63,6 +63,7 @@ enum BlockMode {
 
 /// Whether or not we should force collection of tokens for an AST node,
 /// regardless of whether or not it has attributes
+#[derive(Clone, Copy, PartialEq)]
 pub enum ForceCollect {
     Yes,
     No,
@@ -151,7 +152,7 @@ pub struct Parser<'a> {
 /// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
 /// In this case, we use a `ReplaceRange` to replace the entire inner AST node
 /// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
-/// on a `AttrAnnotatedTokenStream`
+/// on an `AttrAnnotatedTokenStream`
 ///
 /// 2. When we parse an inner attribute while collecting tokens. We
 /// remove inner attributes from the token stream entirely, and
@@ -164,7 +165,7 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
 
 /// Controls how we capture tokens. Capturing can be expensive,
 /// so we try to avoid performing capturing in cases where
-/// we will never need a `AttrAnnotatedTokenStream`
+/// we will never need an `AttrAnnotatedTokenStream`
 #[derive(Copy, Clone)]
 pub enum Capturing {
     /// We aren't performing any capturing - this is the default mode.
@@ -1065,24 +1066,11 @@ impl<'a> Parser<'a> {
             } else if !delimited_only {
                 if self.eat(&token::Eq) {
                     let eq_span = self.prev_token.span;
-                    let mut is_interpolated_expr = false;
-                    if let token::Interpolated(nt) = &self.token.kind {
-                        if let token::NtExpr(..) = **nt {
-                            is_interpolated_expr = true;
-                        }
-                    }
 
                     // Collect tokens because they are used during lowering to HIR.
                     let expr = self.parse_expr_force_collect()?;
                     let span = expr.span;
 
-                    match &expr.kind {
-                        // Not gated to support things like `doc = $expr` that work on stable.
-                        _ if is_interpolated_expr => {}
-                        ExprKind::Lit(lit) if lit.kind.is_unsuffixed() => {}
-                        _ => self.sess.gated_spans.gate(sym::extended_key_value_attributes, span),
-                    }
-
                     let token_kind = token::Interpolated(Lrc::new(token::NtExpr(expr)));
                     MacArgs::Eq(eq_span, Token::new(token_kind, span))
                 } else {
@@ -1374,10 +1362,10 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
     }
 }
 
-/// A helper struct used when building a `AttrAnnotatedTokenStream` from
+/// A helper struct used when building an `AttrAnnotatedTokenStream` from
 /// a `LazyTokenStream`. Both delimiter and non-delimited tokens
 /// are stored as `FlatToken::Token`. A vector of `FlatToken`s
-/// is then 'parsed' to build up a `AttrAnnotatedTokenStream` with nested
+/// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested
 /// `AttrAnnotatedTokenTree::Delimited` tokens
 #[derive(Debug, Clone)]
 pub enum FlatToken {
@@ -1387,10 +1375,10 @@ pub enum FlatToken {
     /// Holds the `AttributesData` for an AST node. The
     /// `AttributesData` is inserted directly into the
     /// constructed `AttrAnnotatedTokenStream` as
-    /// a `AttrAnnotatedTokenTree::Attributes`
+    /// an `AttrAnnotatedTokenTree::Attributes`
     AttrTarget(AttributesData),
     /// A special 'empty' token that is ignored during the conversion
-    /// to a `AttrAnnotatedTokenStream`. This is used to simplify the
+    /// to an `AttrAnnotatedTokenStream`. This is used to simplify the
     /// handling of replace ranges.
     Empty,
 }
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 0c43e304f1e..313d9db58fc 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -1,10 +1,11 @@
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Nonterminal, NonterminalKind, Token};
+use rustc_ast::AstLike;
 use rustc_ast_pretty::pprust;
 use rustc_errors::PResult;
 use rustc_span::symbol::{kw, Ident};
 
-use crate::parser::pat::RecoverComma;
+use crate::parser::pat::{RecoverColon, RecoverComma};
 use crate::parser::{FollowedByType, ForceCollect, Parser, PathStyle};
 
 impl<'a> Parser<'a> {
@@ -102,7 +103,7 @@ impl<'a> Parser<'a> {
         // which requires having captured tokens available. Since we cannot determine
         // in advance whether or not a proc-macro will be (transitively) invoked,
         // we always capture tokens for any `Nonterminal` which needs them.
-        Ok(match kind {
+        let mut nt = match kind {
             NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
                 Some(item) => token::NtItem(item),
                 None => {
@@ -124,7 +125,7 @@ impl<'a> Parser<'a> {
                 token::NtPat(self.collect_tokens_no_attrs(|this| match kind {
                     NonterminalKind::PatParam { .. } => this.parse_pat_no_top_alt(None),
                     NonterminalKind::PatWithOr { .. } => {
-                        this.parse_pat_allow_top_alt(None, RecoverComma::No)
+                        this.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
                     }
                     _ => unreachable!(),
                 })?)
@@ -169,7 +170,19 @@ impl<'a> Parser<'a> {
                     return Err(self.struct_span_err(self.token.span, msg));
                 }
             }
-        })
+        };
+
+        // If tokens are supported at all, they should be collected.
+        if matches!(nt.tokens_mut(), Some(None)) {
+            panic!(
+                "Missing tokens for nt {:?} at {:?}: {:?}",
+                nt,
+                nt.span(),
+                pprust::nonterminal_to_string(&nt)
+            );
+        }
+
+        Ok(nt)
     }
 }
 
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 0abefbd6a12..b03b5459981 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -24,6 +24,13 @@ pub enum RecoverComma {
     No,
 }
 
+/// Whether or not to recover a `:` when parsing patterns that were meant to be paths.
+#[derive(PartialEq, Copy, Clone)]
+pub enum RecoverColon {
+    Yes,
+    No,
+}
+
 /// The result of `eat_or_separator`. We want to distinguish which case we are in to avoid
 /// emitting duplicate diagnostics.
 #[derive(Debug, Clone, Copy)]
@@ -58,8 +65,9 @@ impl<'a> Parser<'a> {
         &mut self,
         expected: Expected,
         rc: RecoverComma,
+        ra: RecoverColon,
     ) -> PResult<'a, P<Pat>> {
-        self.parse_pat_allow_top_alt_inner(expected, rc).map(|(pat, _)| pat)
+        self.parse_pat_allow_top_alt_inner(expected, rc, ra).map(|(pat, _)| pat)
     }
 
     /// Returns the pattern and a bool indicating whether we recovered from a trailing vert (true =
@@ -68,6 +76,7 @@ impl<'a> Parser<'a> {
         &mut self,
         expected: Expected,
         rc: RecoverComma,
+        ra: RecoverColon,
     ) -> PResult<'a, (P<Pat>, bool)> {
         // Keep track of whether we recovered from a trailing vert so that we can avoid duplicated
         // suggestions (which bothers rustfix).
@@ -89,6 +98,56 @@ impl<'a> Parser<'a> {
             // If we parsed a leading `|` which should be gated,
             // then we should really gate the leading `|`.
             // This complicated procedure is done purely for diagnostics UX.
+            let mut first_pat = first_pat;
+
+            if let (RecoverColon::Yes, token::Colon) = (ra, &self.token.kind) {
+                if matches!(
+                    first_pat.kind,
+                    PatKind::Ident(BindingMode::ByValue(Mutability::Not), _, None)
+                        | PatKind::Path(..)
+                ) && self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
+                {
+                    // The pattern looks like it might be a path with a `::` -> `:` typo:
+                    // `match foo { bar:baz => {} }`
+                    let span = self.token.span;
+                    // We only emit "unexpected `:`" error here if we can successfully parse the
+                    // whole pattern correctly in that case.
+                    let snapshot = self.clone();
+
+                    // Create error for "unexpected `:`".
+                    match self.expected_one_of_not_found(&[], &[]) {
+                        Err(mut err) => {
+                            self.bump(); // Skip the `:`.
+                            match self.parse_pat_no_top_alt(expected) {
+                                Err(mut inner_err) => {
+                                    // Carry on as if we had not done anything, callers will emit a
+                                    // reasonable error.
+                                    inner_err.cancel();
+                                    err.cancel();
+                                    *self = snapshot;
+                                }
+                                Ok(pat) => {
+                                    // We've parsed the rest of the pattern.
+                                    err.span_suggestion(
+                                        span,
+                                        "maybe write a path separator here",
+                                        "::".to_string(),
+                                        Applicability::MachineApplicable,
+                                    );
+                                    err.emit();
+                                    first_pat =
+                                        self.mk_pat(first_pat.span.to(pat.span), PatKind::Wild);
+                                }
+                            }
+                        }
+                        _ => {
+                            // Carry on as if we had not done anything. This should be unreachable.
+                            *self = snapshot;
+                        }
+                    };
+                }
+            }
+
             if let Some(leading_vert_span) = leading_vert_span {
                 // If there was a leading vert, treat this as an or-pattern. This improves
                 // diagnostics.
@@ -140,7 +199,8 @@ impl<'a> Parser<'a> {
         // We use `parse_pat_allow_top_alt` regardless of whether we actually want top-level
         // or-patterns so that we can detect when a user tries to use it. This allows us to print a
         // better error message.
-        let (pat, trailing_vert) = self.parse_pat_allow_top_alt_inner(expected, rc)?;
+        let (pat, trailing_vert) =
+            self.parse_pat_allow_top_alt_inner(expected, rc, RecoverColon::No)?;
         let colon = self.eat(&token::Colon);
 
         if let PatKind::Or(pats) = &pat.kind {
@@ -350,7 +410,7 @@ impl<'a> Parser<'a> {
         } else if self.check(&token::OpenDelim(token::Bracket)) {
             // Parse `[pat, pat,...]` as a slice pattern.
             let (pats, _) = self.parse_delim_comma_seq(token::Bracket, |p| {
-                p.parse_pat_allow_top_alt(None, RecoverComma::No)
+                p.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
             })?;
             PatKind::Slice(pats)
         } else if self.check(&token::DotDot) && !self.is_pat_range_end_start(1) {
@@ -563,8 +623,9 @@ impl<'a> Parser<'a> {
 
     /// Parse a tuple or parenthesis pattern.
     fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
-        let (fields, trailing_comma) =
-            self.parse_paren_comma_seq(|p| p.parse_pat_allow_top_alt(None, RecoverComma::No))?;
+        let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
+            p.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
+        })?;
 
         // Here, `(pat,)` is a tuple pattern.
         // For backward compatibility, `(..)` is a tuple pattern as well.
@@ -702,7 +763,7 @@ impl<'a> Parser<'a> {
 
         let sp = self.sess.source_map().start_point(self.token.span);
         if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            self.sess.expr_parentheses_needed(&mut err, *sp, None);
+            self.sess.expr_parentheses_needed(&mut err, *sp);
         }
 
         Err(err)
@@ -715,7 +776,6 @@ impl<'a> Parser<'a> {
         } else if self.eat(&token::DotDotEq) {
             RangeEnd::Included(RangeSyntax::DotDotEq)
         } else if self.eat(&token::DotDot) {
-            self.sess.gated_spans.gate(sym::exclusive_range_pattern, self.prev_token.span);
             RangeEnd::Excluded
         } else {
             return None;
@@ -735,18 +795,50 @@ impl<'a> Parser<'a> {
             Some(self.parse_pat_range_end()?)
         } else {
             // Parsing e.g. `X..`.
-            self.sess.gated_spans.gate(sym::half_open_range_patterns, begin.span.to(re.span));
             if let RangeEnd::Included(_) = re.node {
                 // FIXME(Centril): Consider semantic errors instead in `ast_validation`.
-                // Possibly also do this for `X..=` in *expression* contexts.
-                self.error_inclusive_range_with_no_end(re.span);
+                self.inclusive_range_with_incorrect_end(re.span);
             }
             None
         };
         Ok(PatKind::Range(Some(begin), end, re))
     }
 
-    pub(super) fn error_inclusive_range_with_no_end(&self, span: Span) {
+    pub(super) fn inclusive_range_with_incorrect_end(&mut self, span: Span) {
+        let tok = &self.token;
+
+        // If the user typed "..==" instead of "..=", we want to give them
+        // a specific error message telling them to use "..=".
+        // Otherwise, we assume that they meant to type a half open exclusive
+        // range and give them an error telling them to do that instead.
+        if matches!(tok.kind, token::Eq) && tok.span.lo() == span.hi() {
+            let span_with_eq = span.to(tok.span);
+
+            // Ensure the user doesn't receive unhelpful unexpected token errors
+            self.bump();
+            if self.is_pat_range_end_start(0) {
+                let _ = self.parse_pat_range_end();
+            }
+
+            self.error_inclusive_range_with_extra_equals(span_with_eq);
+        } else {
+            self.error_inclusive_range_with_no_end(span);
+        }
+    }
+
+    fn error_inclusive_range_with_extra_equals(&self, span: Span) {
+        self.struct_span_err(span, "unexpected `=` after inclusive range")
+            .span_suggestion_short(
+                span,
+                "use `..=` instead",
+                "..=".to_string(),
+                Applicability::MaybeIncorrect,
+            )
+            .note("inclusive ranges end with a single equals sign (`..=`)")
+            .emit();
+    }
+
+    fn error_inclusive_range_with_no_end(&self, span: Span) {
         struct_span_err!(self.sess.span_diagnostic, span, E0586, "inclusive range with no end")
             .span_suggestion_short(
                 span,
@@ -859,7 +951,8 @@ impl<'a> Parser<'a> {
     /// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`).
     fn parse_pat_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
         if qself.is_some() {
-            return self.error_qpath_before_pat(&path, "{");
+            // Feature gate the use of qualified paths in patterns
+            self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
         }
         self.bump();
         let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
@@ -869,27 +962,18 @@ impl<'a> Parser<'a> {
             (vec![], true)
         });
         self.bump();
-        Ok(PatKind::Struct(path, fields, etc))
+        Ok(PatKind::Struct(qself, path, fields, etc))
     }
 
     /// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
     fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+        let (fields, _) = self.parse_paren_comma_seq(|p| {
+            p.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)
+        })?;
         if qself.is_some() {
-            return self.error_qpath_before_pat(&path, "(");
+            self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
         }
-        let (fields, _) =
-            self.parse_paren_comma_seq(|p| p.parse_pat_allow_top_alt(None, RecoverComma::No))?;
-        Ok(PatKind::TupleStruct(path, fields))
-    }
-
-    /// Error when there's a qualified path, e.g. `<Foo as Bar>::Baz`
-    /// as the path of e.g., a tuple or record struct pattern.
-    fn error_qpath_before_pat(&mut self, path: &Path, token: &str) -> PResult<'a, PatKind> {
-        let msg = &format!("unexpected `{}` after qualified path", token);
-        let mut err = self.struct_span_err(self.token.span, msg);
-        err.span_label(self.token.span, msg);
-        err.span_label(path.span, "the qualified path");
-        Err(err)
+        Ok(PatKind::TupleStruct(qself, path, fields))
     }
 
     /// Parses the fields of a struct-like pattern.
@@ -1044,7 +1128,7 @@ impl<'a> Parser<'a> {
             // Parsing a pattern of the form `fieldname: pat`.
             let fieldname = self.parse_field_name()?;
             self.bump();
-            let pat = self.parse_pat_allow_top_alt(None, RecoverComma::No)?;
+            let pat = self.parse_pat_allow_top_alt(None, RecoverComma::No, RecoverColon::No)?;
             hi = pat.span;
             (pat, fieldname, false)
         } else {
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 9cc600d9ede..953c6915068 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -352,49 +352,59 @@ impl<'a> Parser<'a> {
         debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
         match self.parse_angle_args() {
             Ok(args) => Ok(args),
-            Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
-                // Cancel error from being unable to find `>`. We know the error
-                // must have been this due to a non-zero unmatched angle bracket
-                // count.
-                e.cancel();
-
+            Err(mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
                 // Swap `self` with our backup of the parser state before attempting to parse
                 // generic arguments.
                 let snapshot = mem::replace(self, snapshot.unwrap());
 
-                debug!(
-                    "parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
-                     snapshot.count={:?}",
-                    snapshot.unmatched_angle_bracket_count,
-                );
-
                 // Eat the unmatched angle brackets.
-                for _ in 0..snapshot.unmatched_angle_bracket_count {
-                    self.eat_lt();
-                }
-
-                // Make a span over ${unmatched angle bracket count} characters.
-                let span = lo.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count));
-                self.struct_span_err(
-                    span,
-                    &format!(
-                        "unmatched angle bracket{}",
-                        pluralize!(snapshot.unmatched_angle_bracket_count)
-                    ),
-                )
-                .span_suggestion(
-                    span,
-                    &format!(
-                        "remove extra angle bracket{}",
-                        pluralize!(snapshot.unmatched_angle_bracket_count)
-                    ),
-                    String::new(),
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+                let all_angle_brackets = (0..snapshot.unmatched_angle_bracket_count)
+                    .fold(true, |a, _| a && self.eat_lt());
+
+                if !all_angle_brackets {
+                    // If there are other tokens in between the extraneous `<`s, we cannot simply
+                    // suggest to remove them. This check also prevents us from accidentally ending
+                    // up in the middle of a multibyte character (issue #84104).
+                    let _ = mem::replace(self, snapshot);
+                    Err(e)
+                } else {
+                    // Cancel error from being unable to find `>`. We know the error
+                    // must have been this due to a non-zero unmatched angle bracket
+                    // count.
+                    e.cancel();
+
+                    debug!(
+                        "parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
+                         snapshot.count={:?}",
+                        snapshot.unmatched_angle_bracket_count,
+                    );
+
+                    // Make a span over ${unmatched angle bracket count} characters.
+                    // This is safe because `all_angle_brackets` ensures that there are only `<`s,
+                    // i.e. no multibyte characters, in this range.
+                    let span =
+                        lo.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count));
+                    self.struct_span_err(
+                        span,
+                        &format!(
+                            "unmatched angle bracket{}",
+                            pluralize!(snapshot.unmatched_angle_bracket_count)
+                        ),
+                    )
+                    .span_suggestion(
+                        span,
+                        &format!(
+                            "remove extra angle bracket{}",
+                            pluralize!(snapshot.unmatched_angle_bracket_count)
+                        ),
+                        String::new(),
+                        Applicability::MachineApplicable,
+                    )
+                    .emit();
 
-                // Try again without unmatched angle bracket characters.
-                self.parse_angle_args()
+                    // Try again without unmatched angle bracket characters.
+                    self.parse_angle_args()
+                }
             }
             Err(e) => Err(e),
         }
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index b40eed8c5d1..9ef3f61ec34 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -73,7 +73,11 @@ impl<'a> Parser<'a> {
             // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
             // that starts like a path (1 token), but it fact not a path.
             // Also, we avoid stealing syntax from `parse_item_`.
-            self.parse_stmt_path_start(lo, attrs, force_collect)?
+            if force_collect == ForceCollect::Yes {
+                self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))
+            } else {
+                self.parse_stmt_path_start(lo, attrs)
+            }?
         } else if let Some(item) =
             self.parse_item_common(attrs.clone(), false, true, |_| true, force_collect)?
         {
@@ -85,7 +89,13 @@ impl<'a> Parser<'a> {
             self.mk_stmt(lo, StmtKind::Empty)
         } else if self.token != token::CloseDelim(token::Brace) {
             // Remainder are line-expr stmts.
-            let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))?;
+            let e = if force_collect == ForceCollect::Yes {
+                self.collect_tokens_no_attrs(|this| {
+                    this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
+                })
+            } else {
+                self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
+            }?;
             self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
         } else {
             self.error_outer_attrs(&attrs.take_for_recovery());
@@ -93,13 +103,8 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    fn parse_stmt_path_start(
-        &mut self,
-        lo: Span,
-        attrs: AttrWrapper,
-        force_collect: ForceCollect,
-    ) -> PResult<'a, Stmt> {
-        let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
+    fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
+        let stmt = self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let path = this.parse_path(PathStyle::Expr)?;
 
             if this.eat(&token::Not) {
@@ -112,7 +117,7 @@ impl<'a> Parser<'a> {
             }
 
             let expr = if this.eat(&token::OpenDelim(token::Brace)) {
-                this.parse_struct_expr(path, AttrVec::new(), true)?
+                this.parse_struct_expr(None, path, AttrVec::new(), true)?
             } else {
                 let hi = this.prev_token.span;
                 this.mk_expr(lo.to(hi), ExprKind::Path(None, path), AttrVec::new())
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index d537741c749..1fbf01b1b97 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -226,6 +226,19 @@ impl<'a> Parser<'a> {
             }
         } else if self.eat_keyword(kw::Impl) {
             self.parse_impl_ty(&mut impl_dyn_multi)?
+        } else if self.token.is_keyword(kw::Union)
+            && self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
+        {
+            self.bump();
+            let (fields, recovered) = self.parse_record_struct_body("union")?;
+            let span = lo.to(self.prev_token.span);
+            self.sess.gated_spans.gate(sym::unnamed_fields, span);
+            TyKind::AnonymousUnion(fields, recovered)
+        } else if self.eat_keyword(kw::Struct) {
+            let (fields, recovered) = self.parse_record_struct_body("struct")?;
+            let span = lo.to(self.prev_token.span);
+            self.sess.gated_spans.gate(sym::unnamed_fields, span);
+            TyKind::AnonymousStruct(fields, recovered)
         } else if self.is_explicit_dyn_type() {
             self.parse_dyn_ty(&mut impl_dyn_multi)?
         } else if self.eat_lt() {
@@ -321,7 +334,6 @@ impl<'a> Parser<'a> {
         mut bounds: GenericBounds,
         plus: bool,
     ) -> PResult<'a, TyKind> {
-        assert_ne!(self.token, token::Question);
         if plus {
             self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
             bounds.append(&mut self.parse_generic_bounds(Some(self.prev_token.span))?);
@@ -381,7 +393,7 @@ impl<'a> Parser<'a> {
         let and_span = self.prev_token.span;
         let mut opt_lifetime =
             if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
-        let mutbl = self.parse_mutability();
+        let mut mutbl = self.parse_mutability();
         if self.token.is_lifetime() && mutbl == Mutability::Mut && opt_lifetime.is_none() {
             // A lifetime is invalid here: it would be part of a bare trait bound, which requires
             // it to be followed by a plus, but we disallow plus in the pointee type.
@@ -405,6 +417,26 @@ impl<'a> Parser<'a> {
 
                 opt_lifetime = Some(self.expect_lifetime());
             }
+        } else if self.token.is_keyword(kw::Dyn)
+            && mutbl == Mutability::Not
+            && self.look_ahead(1, |t| t.is_keyword(kw::Mut))
+        {
+            // We have `&dyn mut ...`, which is invalid and should be `&mut dyn ...`.
+            let span = and_span.to(self.look_ahead(1, |t| t.span));
+            let mut err = self.struct_span_err(span, "`mut` must precede `dyn`");
+            err.span_suggestion(
+                span,
+                "place `mut` before `dyn`",
+                "&mut dyn".to_string(),
+                Applicability::MachineApplicable,
+            );
+            err.emit();
+
+            // Recovery
+            mutbl = Mutability::Mut;
+            let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing);
+            self.bump();
+            self.bump_with((dyn_tok, dyn_tok_sp));
         }
         let ty = self.parse_ty_no_plus()?;
         Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }))