about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs81
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs36
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs32
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html2
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs13
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lexed_str.rs13
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests.rs17
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rast4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast11
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast11
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs51
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs20
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs10
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs10
-rw-r--r--src/tools/rust-analyzer/editors/code/.vscodeignore1
-rw-r--r--src/tools/rust-analyzer/editors/code/package.json7
-rw-r--r--src/tools/rust-analyzer/editors/code/walkthrough-setup-tips.md10
-rw-r--r--src/tools/rust-analyzer/rust-bors.toml1
-rw-r--r--src/tools/rust-analyzer/xtask/src/release/changelog.rs5
39 files changed, 352 insertions, 168 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index b87e94f9c51..6117664c64c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -1598,6 +1598,10 @@ impl ExprCollector<'_> {
                 for (id, _) in current_is_used.into_iter() {
                     binding_list.check_is_used(self, id);
                 }
+                if let &[pat] = &*pats {
+                    // Leading pipe without real OR pattern. Leaving an one-item OR pattern may confuse later stages.
+                    return pat;
+                }
                 Pat::Or(pats.into())
             }
             ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat(), binding_list),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index 450a15bd66e..d5b94f0ae44 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -122,7 +122,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
 
         let mut expn_text = String::new();
         if let Some(err) = exp.err {
-            format_to!(expn_text, "/* error: {} */", err.render_to_string(&db).0);
+            format_to!(expn_text, "/* error: {} */", err.render_to_string(&db).message);
         }
         let (parse, token_map) = exp.value;
         if expect_errors {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index 266851e3c0d..d491042eabf 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -1043,12 +1043,12 @@ impl HasResolver for ModuleId {
     fn resolver(self, db: &dyn DefDatabase) -> Resolver {
         let mut def_map = self.def_map(db);
         let mut module_id = self.local_id;
-        let mut modules: SmallVec<[_; 1]> = smallvec![];
 
         if !self.is_block_module() {
             return Resolver { scopes: vec![], module_scope: ModuleItemMap { def_map, module_id } };
         }
 
+        let mut modules: SmallVec<[_; 1]> = smallvec![];
         while let Some(parent) = def_map.parent() {
             let block_def_map = mem::replace(&mut def_map, parent.def_map(db));
             modules.push(block_def_map);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index 5d5f72490d0..7d2f556406d 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -165,40 +165,73 @@ pub enum ExpandErrorKind {
 }
 
 impl ExpandError {
-    pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
+    pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> RenderedExpandError {
         self.inner.0.render_to_string(db)
     }
 }
 
+pub struct RenderedExpandError {
+    pub message: String,
+    pub error: bool,
+    pub kind: &'static str,
+}
+
+impl RenderedExpandError {
+    const GENERAL_KIND: &str = "macro-error";
+}
+
 impl ExpandErrorKind {
-    pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) {
+    pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> RenderedExpandError {
         match self {
-            ExpandErrorKind::ProcMacroAttrExpansionDisabled => {
-                ("procedural attribute macro expansion is disabled".to_owned(), false)
-            }
-            ExpandErrorKind::MacroDisabled => {
-                ("proc-macro is explicitly disabled".to_owned(), false)
-            }
+            ExpandErrorKind::ProcMacroAttrExpansionDisabled => RenderedExpandError {
+                message: "procedural attribute macro expansion is disabled".to_owned(),
+                error: false,
+                kind: "proc-macros-disabled",
+            },
+            ExpandErrorKind::MacroDisabled => RenderedExpandError {
+                message: "proc-macro is explicitly disabled".to_owned(),
+                error: false,
+                kind: "proc-macro-disabled",
+            },
             &ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
                 match db.proc_macros().get_error_for_crate(def_crate) {
-                    Some((e, hard_err)) => (e.to_owned(), hard_err),
-                    None => (
-                        format!(
-                            "internal error: proc-macro map is missing error entry for crate {def_crate:?}"
-                        ),
-                        true,
-                    ),
+                    Some((e, hard_err)) => RenderedExpandError {
+                        message: e.to_owned(),
+                        error: hard_err,
+                        kind: RenderedExpandError::GENERAL_KIND,
+                    },
+                    None => RenderedExpandError {
+                        message: format!("internal error: proc-macro map is missing error entry for crate {def_crate:?}"),
+                        error: true,
+                        kind: RenderedExpandError::GENERAL_KIND,
+                    },
                 }
             }
-            ExpandErrorKind::MacroDefinition => {
-                ("macro definition has parse errors".to_owned(), true)
-            }
-            ExpandErrorKind::Mbe(e) => (e.to_string(), true),
-            ExpandErrorKind::RecursionOverflow => {
-                ("overflow expanding the original macro".to_owned(), true)
-            }
-            ExpandErrorKind::Other(e) => ((**e).to_owned(), true),
-            ExpandErrorKind::ProcMacroPanic(e) => (format!("proc-macro panicked: {e}"), true),
+            ExpandErrorKind::MacroDefinition => RenderedExpandError {
+                message: "macro definition has parse errors".to_owned(),
+                error: true,
+                kind: RenderedExpandError::GENERAL_KIND,
+            },
+            ExpandErrorKind::Mbe(e) => RenderedExpandError {
+                message: e.to_string(),
+                error: true,
+                kind: RenderedExpandError::GENERAL_KIND,
+            },
+            ExpandErrorKind::RecursionOverflow => RenderedExpandError {
+                message: "overflow expanding the original macro".to_owned(),
+                error: true,
+                kind: RenderedExpandError::GENERAL_KIND,
+            },
+            ExpandErrorKind::Other(e) => RenderedExpandError {
+                message: (**e).to_owned(),
+                error: true,
+                kind: RenderedExpandError::GENERAL_KIND,
+            },
+            ExpandErrorKind::ProcMacroPanic(e) => RenderedExpandError {
+                message: format!("proc-macro panicked: {e}"),
+                error: true,
+                kind: RenderedExpandError::GENERAL_KIND,
+            },
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 2fedffe047d..8297acde857 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -165,6 +165,7 @@ pub struct MacroError {
     pub precise_location: Option<TextRange>,
     pub message: String,
     pub error: bool,
+    pub kind: &'static str,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 4a18795e7d6..f9b51b65709 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -58,7 +58,8 @@ use hir_def::{
     TypeOrConstParamId, TypeParamId, UnionId,
 };
 use hir_expand::{
-    attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult,
+    attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, RenderedExpandError,
+    ValueResult,
 };
 use hir_ty::{
     all_super_traits, autoderef, check_orphan_rules,
@@ -838,7 +839,7 @@ fn macro_call_diagnostics(
         let file_id = loc.kind.file_id();
         let node =
             InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
-        let (message, error) = err.render_to_string(db.upcast());
+        let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast());
         let precise_location = if err.span().anchor.file_id == file_id {
             Some(
                 err.span().range
@@ -850,7 +851,7 @@ fn macro_call_diagnostics(
         } else {
             None
         };
-        acc.push(MacroError { node, precise_location, message, error }.into());
+        acc.push(MacroError { node, precise_location, message, error, kind }.into());
     }
 
     if !parse_errors.is_empty() {
@@ -916,13 +917,14 @@ fn emit_def_diagnostic_(
 
         DefDiagnosticKind::MacroError { ast, path, err } => {
             let item = ast.to_ptr(db.upcast());
-            let (message, error) = err.render_to_string(db.upcast());
+            let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast());
             acc.push(
                 MacroError {
                     node: InFile::new(ast.file_id, item.syntax_node_ptr()),
                     precise_location: None,
                     message: format!("{}: {message}", path.display(db.upcast(), edition)),
                     error,
+                    kind,
                 }
                 .into(),
             )
@@ -1811,7 +1813,8 @@ impl DefWithBody {
                     InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
                 }
                 BodyDiagnostic::MacroError { node, err } => {
-                    let (message, error) = err.render_to_string(db.upcast());
+                    let RenderedExpandError { message, error, kind } =
+                        err.render_to_string(db.upcast());
 
                     let precise_location = if err.span().anchor.file_id == node.file_id {
                         Some(
@@ -1829,6 +1832,7 @@ impl DefWithBody {
                         precise_location,
                         message,
                         error,
+                        kind,
                     }
                     .into()
                 }
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 860754d5e70..6e298d95862 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -936,16 +936,7 @@ impl<'db> SemanticsImpl<'db> {
             }
         }
 
-        let (file_id, tokens) = stack.first()?;
-        // make sure we pick the token in the expanded include if we encountered an include,
-        // otherwise we'll get the wrong semantics
-        let sa =
-            tokens.first()?.0.parent().and_then(|parent| {
-                self.analyze_impl(InFile::new(*file_id, &parent), None, false)
-            })?;
-
         let mut m_cache = self.macro_call_cache.borrow_mut();
-        let def_map = sa.resolver.def_map();
 
         // Filters out all tokens that contain the given range (usually the macro call), any such
         // token is redundant as the corresponding macro call has already been processed
@@ -954,6 +945,10 @@ impl<'db> SemanticsImpl<'db> {
         };
 
         while let Some((expansion, ref mut tokens)) = stack.pop() {
+            // Reverse the tokens so we prefer first tokens (to accommodate for popping from the
+            // back)
+            // alternatively we could pop from the front but that would shift the content on every pop
+            tokens.reverse();
             while let Some((token, ctx)) = tokens.pop() {
                 let was_not_remapped = (|| {
                     // First expand into attribute invocations
@@ -1024,8 +1019,16 @@ impl<'db> SemanticsImpl<'db> {
                                         ) {
                                         call.as_macro_file()
                                     } else {
-                                        // FIXME: This is wrong, the SourceAnalyzer might be invalid here
-                                        sa.expand(self.db, mcall.as_ref())?
+                                        token
+                                            .parent()
+                                            .and_then(|parent| {
+                                                self.analyze_impl(
+                                                    InFile::new(expansion, &parent),
+                                                    None,
+                                                    false,
+                                                )
+                                            })?
+                                            .expand(self.db, mcall.as_ref())?
                                     };
                                     m_cache.insert(mcall, it);
                                     it
@@ -1095,9 +1098,16 @@ impl<'db> SemanticsImpl<'db> {
                                 attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
                             // Not an attribute, nor a derive, so it's either an intert attribute or a derive helper
                             // Try to resolve to a derive helper and downmap
+                            let resolver = &token
+                                .parent()
+                                .and_then(|parent| {
+                                    self.analyze_impl(InFile::new(expansion, &parent), None, false)
+                                })?
+                                .resolver;
                             let id = self.db.ast_id_map(expansion).ast_id(&adt);
-                            let helpers =
-                                def_map.derive_helpers_in_scope(InFile::new(expansion, id))?;
+                            let helpers = resolver
+                                .def_map()
+                                .derive_helpers_in_scope(InFile::new(expansion, id))?;
 
                             if !helpers.is_empty() {
                                 let text_range = attr.syntax().text_range();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
index 0e9c463e024..94274f6d17c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -41,7 +41,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
         macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>();
 
     acc.add(
-        AssistId("remove_dbg", AssistKind::Refactor),
+        AssistId("remove_dbg", AssistKind::QuickFix),
         "Remove dbg!()",
         replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?,
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index db789cfa334..648bf358b4b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -34,6 +34,9 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
 pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let pipe_token = ctx.find_token_syntax_at_offset(T![|])?;
     let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update();
+    if or_pat.leading_pipe().is_some_and(|it| it == pipe_token) {
+        return None;
+    }
     let match_arm = ast::MatchArm::cast(or_pat.syntax().parent()?)?;
     let match_arm_body = match_arm.expr()?;
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index 22620816d50..8aaf5d6fff2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -301,6 +301,7 @@ mod handlers {
             inline_call::inline_into_callers,
             inline_const_as_literal::inline_const_as_literal,
             inline_local_variable::inline_local_variable,
+            inline_macro::inline_macro,
             inline_type_alias::inline_type_alias,
             inline_type_alias::inline_type_alias_uses,
             into_to_qualified_from::into_to_qualified_from,
@@ -326,6 +327,7 @@ mod handlers {
             raw_string::add_hash,
             raw_string::make_usual_string,
             raw_string::remove_hash,
+            remove_dbg::remove_dbg,
             remove_mut::remove_mut,
             remove_unused_imports::remove_unused_imports,
             remove_unused_param::remove_unused_param,
@@ -381,9 +383,6 @@ mod handlers {
             generate_getter_or_setter::generate_setter,
             generate_delegate_methods::generate_delegate_methods,
             generate_deref::generate_deref,
-            //
-            remove_dbg::remove_dbg,
-            inline_macro::inline_macro,
             // Are you sure you want to add new assist here, and not to the
             // sorted list above?
         ]
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index aed093f0ebf..b764f852f04 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -293,3 +293,35 @@ impl SnippetCap {
         }
     }
 }
+
+pub struct Ranker<'a> {
+    pub kind: parser::SyntaxKind,
+    pub text: &'a str,
+    pub ident_kind: bool,
+}
+
+impl<'a> Ranker<'a> {
+    pub const MAX_RANK: usize = 0b1110;
+
+    pub fn from_token(token: &'a syntax::SyntaxToken) -> Self {
+        let kind = token.kind();
+        Ranker { kind, text: token.text(), ident_kind: kind.is_any_identifier() }
+    }
+
+    /// A utility function that ranks a token again a given kind and text, returning a number that
+    /// represents how close the token is to the given kind and text.
+    pub fn rank_token(&self, tok: &syntax::SyntaxToken) -> usize {
+        let tok_kind = tok.kind();
+
+        let exact_same_kind = tok_kind == self.kind;
+        let both_idents = exact_same_kind || (tok_kind.is_any_identifier() && self.ident_kind);
+        let same_text = tok.text() == self.text;
+        // anything that mapped into a token tree has likely no semantic information
+        let no_tt_parent =
+            tok.parent().map_or(false, |it| it.kind() != parser::SyntaxKind::TOKEN_TREE);
+        (both_idents as usize)
+            | ((exact_same_kind as usize) << 1)
+            | ((same_text as usize) << 2)
+            | ((no_tt_parent as usize) << 3)
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index 6a976697c80..e177b72e4d4 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -3,14 +3,19 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
 // Diagnostic: macro-error
 //
 // This diagnostic is shown for macro expansion errors.
+
+// Diagnostic: proc-macros-disabled
+//
+// This diagnostic is shown for proc macros where proc macros have been disabled.
+
+// Diagnostic: proc-macro-disabled
+//
+// This diagnostic is shown for proc macros that has been specifically disabled via `rust-analyzer.procMacro.ignored`.
 pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
     // Use more accurate position if available.
     let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
     Diagnostic::new(
-        DiagnosticCode::Ra(
-            "macro-error",
-            if d.error { Severity::Error } else { Severity::WeakWarning },
-        ),
+        DiagnosticCode::Ra(d.kind, if d.error { Severity::Error } else { Severity::WeakWarning }),
         d.message.clone(),
         display_range,
     )
diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
index 1b82c00d1dc..e5b4ed17b2a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
@@ -510,6 +510,7 @@ fn caller$0() {
             expect![[]],
         );
     }
+
     #[test]
     fn test_call_hierarchy_in_macros_incoming_different_files() {
         check_hierarchy(
@@ -591,9 +592,9 @@ macro_rules! call {
 "#,
             expect!["callee Function FileId(0) 22..37 30..36"],
             expect![[r#"
-                callee Function FileId(0) 38..52 44..50 : FileId(0):44..50
                 caller Function FileId(0) 38..52 : FileId(0):44..50
-                caller Function FileId(1) 130..136 130..136 : FileId(0):44..50"#]],
+                caller Function FileId(1) 130..136 130..136 : FileId(0):44..50
+                callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]],
             expect![[]],
         );
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index 124db2985bf..6cac4f1ee48 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -11,7 +11,7 @@ use ide_db::{
     defs::{Definition, IdentClass, NameRefClass, OperatorClass},
     famous_defs::FamousDefs,
     helpers::pick_best_token,
-    FileRange, FxIndexSet, RootDatabase,
+    FileRange, FxIndexSet, Ranker, RootDatabase,
 };
 use itertools::{multizip, Itertools};
 use span::Edition;
@@ -182,27 +182,13 @@ fn hover_offset(
     // equivalency is more important
     let mut descended = sema.descend_into_macros(original_token.clone());
 
-    let kind = original_token.kind();
-    let text = original_token.text();
-    let ident_kind = kind.is_any_identifier();
-
-    descended.sort_by_cached_key(|tok| {
-        let tok_kind = tok.kind();
-
-        let exact_same_kind = tok_kind == kind;
-        let both_idents = exact_same_kind || (tok_kind.is_any_identifier() && ident_kind);
-        let same_text = tok.text() == text;
-        // anything that mapped into a token tree has likely no semantic information
-        let no_tt_parent = tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE);
-        !((both_idents as usize)
-            | ((exact_same_kind as usize) << 1)
-            | ((same_text as usize) << 2)
-            | ((no_tt_parent as usize) << 3))
-    });
+    let ranker = Ranker::from_token(&original_token);
+
+    descended.sort_by_cached_key(|tok| !ranker.rank_token(tok));
 
     let mut res = vec![];
     for token in descended {
-        let is_same_kind = token.kind() == kind;
+        let is_same_kind = token.kind() == ranker.kind;
         let lint_hover = (|| {
             // FIXME: Definition should include known lints and the like instead of having this special case here
             let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index ef835f5bef8..3e402630419 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -289,7 +289,7 @@ m!(ab$0c);
             *abc*
 
             ```rust
-            test::module
+            test
             ```
 
             ```rust
@@ -298,11 +298,11 @@ m!(ab$0c);
 
             ---
 
-            Inner
+            Outer
             ---
 
             ```rust
-            test
+            test::module
             ```
 
             ```rust
@@ -311,7 +311,7 @@ m!(ab$0c);
 
             ---
 
-            Outer
+            Inner
         "#]],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index e7cb8a253f4..339315db571 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -1701,14 +1701,14 @@ fn f() {
 }
             "#,
             expect![[r#"
-                func Function FileId(0) 137..146 140..144
+                func Function FileId(0) 137..146 140..144 module
 
-                FileId(0) 161..165
+                FileId(0) 181..185
 
 
-                func Function FileId(0) 137..146 140..144 module
+                func Function FileId(0) 137..146 140..144
 
-                FileId(0) 181..185
+                FileId(0) 161..165
             "#]],
         )
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 961b2a4c938..0747d1b404b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -16,7 +16,7 @@ mod tests;
 use std::ops::ControlFlow;
 
 use hir::{InRealFile, Name, Semantics};
-use ide_db::{FxHashMap, RootDatabase, SymbolKind};
+use ide_db::{FxHashMap, Ranker, RootDatabase, SymbolKind};
 use span::EditionedFileId;
 use syntax::{
     ast::{self, IsString},
@@ -397,13 +397,12 @@ fn traverse(
                 Some(AttrOrDerive::Derive(_)) => inside_attribute,
                 None => false,
             };
+
         let descended_element = if in_macro {
             // Attempt to descend tokens into macro-calls.
             let res = match element {
                 NodeOrToken::Token(token) if token.kind() != COMMENT => {
-                    let kind = token.kind();
-                    let text = token.text();
-                    let ident_kind = kind.is_any_identifier();
+                    let ranker = Ranker::from_token(&token);
 
                     let mut t = None;
                     let mut r = 0;
@@ -412,21 +411,9 @@ fn traverse(
                         |tok, _ctx| {
                             // FIXME: Consider checking ctx transparency for being opaque?
                             let tok = tok.value;
-                            let tok_kind = tok.kind();
-
-                            let exact_same_kind = tok_kind == kind;
-                            let both_idents =
-                                exact_same_kind || (tok_kind.is_any_identifier() && ident_kind);
-                            let same_text = tok.text() == text;
-                            // anything that mapped into a token tree has likely no semantic information
-                            let no_tt_parent =
-                                tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE);
-                            let my_rank = (both_idents as usize)
-                                | ((exact_same_kind as usize) << 1)
-                                | ((same_text as usize) << 2)
-                                | ((no_tt_parent as usize) << 3);
-
-                            if my_rank > 0b1110 {
+                            let my_rank = ranker.rank_token(&tok);
+
+                            if my_rank >= Ranker::MAX_RANK {
                                 // a rank of 0b1110 means that we have found a maximally interesting
                                 // token so stop early.
                                 t = Some(tok);
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
index d07ba74db24..361dcd1bc37 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
@@ -50,4 +50,4 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
 <span class="brace">}</span>
 
 <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="module attribute crate_root library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">issue_18089</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">fn</span> <span class="function declaration">template</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre>
\ No newline at end of file
+<span class="keyword">fn</span> <span class="macro declaration">template</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre>
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
index 3db1a10ca5c..ed01fca2acd 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
@@ -21,7 +21,8 @@ const RANGE_PAT_END_FIRST: TokenSet =
     expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[T![-], T![const]]));
 
 pub(crate) fn pattern(p: &mut Parser<'_>) {
-    pattern_r(p, PAT_RECOVERY_SET);
+    let m = p.start();
+    pattern_r(p, m, false, PAT_RECOVERY_SET);
 }
 
 /// Parses a pattern list separated by pipes `|`.
@@ -36,8 +37,9 @@ pub(crate) fn pattern_single(p: &mut Parser<'_>) {
 /// Parses a pattern list separated by pipes `|`
 /// using the given `recovery_set`.
 pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
-    p.eat(T![|]);
-    pattern_r(p, recovery_set);
+    let m = p.start();
+    let has_leading_pipe = p.eat(T![|]);
+    pattern_r(p, m, has_leading_pipe, recovery_set);
 }
 
 // test or_pattern
@@ -51,11 +53,10 @@ pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
 // }
 /// Parses a pattern list separated by pipes `|`, with no leading `|`,using the
 /// given `recovery_set`.
-fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
-    let m = p.start();
+fn pattern_r(p: &mut Parser<'_>, m: Marker, has_leading_pipe: bool, recovery_set: TokenSet) {
     pattern_single_r(p, recovery_set);
 
-    if !p.at(T![|]) {
+    if !p.at(T![|]) && !has_leading_pipe {
         m.abandon(p);
         return;
     }
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
index 5322463a713..3c0eb1b42a6 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -39,7 +39,9 @@ impl<'a> LexedStr<'a> {
             conv.offset = shebang_len;
         };
 
-        for token in rustc_lexer::tokenize(&text[conv.offset..]) {
+        // Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer
+        // but we want to split it to two in edition <2024.
+        while let Some(token) = rustc_lexer::tokenize(&text[conv.offset..]).next() {
             let token_text = &text[conv.offset..][..token.len as usize];
 
             conv.extend_token(&token.kind, token_text);
@@ -158,7 +160,7 @@ impl<'a> Converter<'a> {
         }
     }
 
-    fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, token_text: &str) {
+    fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, mut token_text: &str) {
         // A note on an intended tradeoff:
         // We drop some useful information here (see patterns with double dots `..`)
         // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
@@ -189,10 +191,15 @@ impl<'a> Converter<'a> {
                 rustc_lexer::TokenKind::RawIdent => IDENT,
 
                 rustc_lexer::TokenKind::GuardedStrPrefix if self.edition.at_least_2024() => {
+                    // FIXME: rustc does something better for recovery.
                     err = "Invalid string literal (reserved syntax)";
                     ERROR
                 }
-                rustc_lexer::TokenKind::GuardedStrPrefix => POUND,
+                rustc_lexer::TokenKind::GuardedStrPrefix => {
+                    // The token is `#"` or `##`, split it into two.
+                    token_text = &token_text[1..];
+                    POUND
+                }
 
                 rustc_lexer::TokenKind::Literal { kind, .. } => {
                     self.extend_literal(token_text.len(), kind);
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs
index e7bccb6685c..4b19ddc752a 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs
@@ -15,11 +15,20 @@ use crate::{Edition, LexedStr, TopEntryPoint};
 #[path = "../test_data/generated/runner.rs"]
 mod runner;
 
+fn infer_edition(file_path: &Path) -> Edition {
+    let file_content = std::fs::read_to_string(file_path).unwrap();
+    if let Some(edition) = file_content.strip_prefix("//@ edition: ") {
+        edition[..4].parse().expect("invalid edition directive")
+    } else {
+        Edition::CURRENT
+    }
+}
+
 #[test]
 fn lex_ok() {
     for case in TestCase::list("lexer/ok") {
         let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
-        let actual = lex(&case.text);
+        let actual = lex(&case.text, infer_edition(&case.rs));
         expect_file![case.rast].assert_eq(&actual)
     }
 }
@@ -28,13 +37,13 @@ fn lex_ok() {
 fn lex_err() {
     for case in TestCase::list("lexer/err") {
         let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
-        let actual = lex(&case.text);
+        let actual = lex(&case.text, infer_edition(&case.rs));
         expect_file![case.rast].assert_eq(&actual)
     }
 }
 
-fn lex(text: &str) -> String {
-    let lexed = LexedStr::new(Edition::CURRENT, text);
+fn lex(text: &str, edition: Edition) -> String {
+    let lexed = LexedStr::new(edition, text);
 
     let mut res = String::new();
     for i in 0..lexed.len() {
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs
index c56bf0b6448..7076e03ba4b 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs
@@ -195,6 +195,38 @@ fn macro_pattern() {
             error 0: expected pattern
         "#]],
     );
+
+    check(
+        TopEntryPoint::Pattern,
+        "| 42 | 43",
+        expect![[r#"
+            OR_PAT
+              PIPE "|"
+              WHITESPACE " "
+              LITERAL_PAT
+                LITERAL
+                  INT_NUMBER "42"
+              WHITESPACE " "
+              PIPE "|"
+              WHITESPACE " "
+              LITERAL_PAT
+                LITERAL
+                  INT_NUMBER "43"
+        "#]],
+    );
+
+    check(
+        TopEntryPoint::Pattern,
+        "| 42",
+        expect![[r#"
+            OR_PAT
+              PIPE "|"
+              WHITESPACE " "
+              LITERAL_PAT
+                LITERAL
+                  INT_NUMBER "42"
+        "#]],
+    );
 }
 
 #[test]
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rast
new file mode 100644
index 00000000000..1bdd6720441
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rast
@@ -0,0 +1,4 @@
+COMMENT "//@ edition: 2021"
+WHITESPACE "\n\n"
+POUND "#"
+STRING "\"foo\""
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rs
new file mode 100644
index 00000000000..f00f949f0db
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rs
@@ -0,0 +1,3 @@
+//@ edition: 2021
+
+#"foo"
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast
index 8189cf0a8e5..92210281629 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast
@@ -102,9 +102,9 @@ SOURCE_FILE
                 COMMA ","
               WHITESPACE "\n        "
               MATCH_ARM
-                PIPE "|"
-                WHITESPACE " "
                 OR_PAT
+                  PIPE "|"
+                  WHITESPACE " "
                   IDENT_PAT
                     NAME
                       IDENT "X"
@@ -132,11 +132,12 @@ SOURCE_FILE
                 COMMA ","
               WHITESPACE "\n        "
               MATCH_ARM
-                PIPE "|"
-                WHITESPACE " "
-                IDENT_PAT
-                  NAME
-                    IDENT "X"
+                OR_PAT
+                  PIPE "|"
+                  WHITESPACE " "
+                  IDENT_PAT
+                    NAME
+                      IDENT "X"
                 WHITESPACE " "
                 FAT_ARROW "=>"
                 WHITESPACE " "
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast
index dff72ba886f..06c30bba59f 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast
@@ -43,11 +43,12 @@ SOURCE_FILE
           WHITESPACE " "
           SLICE_PAT
             L_BRACK "["
-            PIPE "|"
-            WHITESPACE " "
-            IDENT_PAT
-              NAME
-                IDENT "a"
+            OR_PAT
+              PIPE "|"
+              WHITESPACE " "
+              IDENT_PAT
+                NAME
+                  IDENT "a"
             COMMA ","
             WHITESPACE " "
             REST_PAT
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast
index 1a01e0f6938..c7cd11f774b 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast
@@ -91,9 +91,9 @@ SOURCE_FILE
           WHITESPACE " "
           TUPLE_PAT
             L_PAREN "("
-            PIPE "|"
-            WHITESPACE " "
             OR_PAT
+              PIPE "|"
+              WHITESPACE " "
               IDENT_PAT
                 NAME
                   IDENT "a"
@@ -105,11 +105,12 @@ SOURCE_FILE
                   IDENT "a"
             COMMA ","
             WHITESPACE " "
-            PIPE "|"
-            WHITESPACE " "
-            IDENT_PAT
-              NAME
-                IDENT "b"
+            OR_PAT
+              PIPE "|"
+              WHITESPACE " "
+              IDENT_PAT
+                NAME
+                  IDENT "b"
             R_PAREN ")"
           WHITESPACE " "
           EQ "="
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast
index 55baf2fdcb4..96353f46976 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast
@@ -110,11 +110,12 @@ SOURCE_FILE
                 NAME_REF
                   IDENT "S"
             L_PAREN "("
-            PIPE "|"
-            WHITESPACE " "
-            IDENT_PAT
-              NAME
-                IDENT "a"
+            OR_PAT
+              PIPE "|"
+              WHITESPACE " "
+              IDENT_PAT
+                NAME
+                  IDENT "a"
             R_PAREN ")"
           WHITESPACE " "
           EQ "="
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
index 21f95a945d9..03759b036b4 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -5,7 +5,7 @@ use std::{
 };
 
 use ide::Cancelled;
-use lsp_server::ExtractError;
+use lsp_server::{ExtractError, Response, ResponseError};
 use serde::{de::DeserializeOwned, Serialize};
 use stdx::thread::ThreadIntent;
 
@@ -117,15 +117,20 @@ impl RequestDispatcher<'_> {
             }
             return self;
         }
-        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::Worker, f)
+        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(
+            ThreadIntent::Worker,
+            f,
+            Self::content_modified_error,
+        )
     }
 
     /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
-    /// ready this will return a default constructed [`R::Result`].
-    pub(crate) fn on_or<const ALLOW_RETRYING: bool, R>(
+    /// ready this will return a `default` constructed [`R::Result`].
+    pub(crate) fn on_with<R>(
         &mut self,
         f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
         default: impl FnOnce() -> R::Result,
+        on_cancelled: fn() -> ResponseError,
     ) -> &mut Self
     where
         R: lsp_types::request::Request<
@@ -141,7 +146,7 @@ impl RequestDispatcher<'_> {
             }
             return self;
         }
-        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::Worker, f)
+        self.on_with_thread_intent::<true, false, R>(ThreadIntent::Worker, f, on_cancelled)
     }
 
     /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
@@ -160,7 +165,11 @@ impl RequestDispatcher<'_> {
             }
             return self;
         }
-        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::Worker, f)
+        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(
+            ThreadIntent::Worker,
+            f,
+            Self::content_modified_error,
+        )
     }
 
     /// Dispatches a latency-sensitive request onto the thread pool. When the VFS is marked not
@@ -183,7 +192,11 @@ impl RequestDispatcher<'_> {
             }
             return self;
         }
-        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::LatencySensitive, f)
+        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(
+            ThreadIntent::LatencySensitive,
+            f,
+            Self::content_modified_error,
+        )
     }
 
     /// Formatting requests should never block on waiting a for task thread to open up, editors will wait
@@ -198,7 +211,11 @@ impl RequestDispatcher<'_> {
         R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
         R::Result: Serialize,
     {
-        self.on_with_thread_intent::<false, false, R>(ThreadIntent::LatencySensitive, f)
+        self.on_with_thread_intent::<false, false, R>(
+            ThreadIntent::LatencySensitive,
+            f,
+            Self::content_modified_error,
+        )
     }
 
     pub(crate) fn finish(&mut self) {
@@ -217,6 +234,7 @@ impl RequestDispatcher<'_> {
         &mut self,
         intent: ThreadIntent,
         f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
+        on_cancelled: fn() -> ResponseError,
     ) -> &mut Self
     where
         R: lsp_types::request::Request + 'static,
@@ -245,11 +263,10 @@ impl RequestDispatcher<'_> {
             match thread_result_to_response::<R>(req.id.clone(), result) {
                 Ok(response) => Task::Response(response),
                 Err(_cancelled) if ALLOW_RETRYING => Task::Retry(req),
-                Err(_cancelled) => Task::Response(lsp_server::Response::new_err(
-                    req.id,
-                    lsp_server::ErrorCode::ContentModified as i32,
-                    "content modified".to_owned(),
-                )),
+                Err(_cancelled) => {
+                    let error = on_cancelled();
+                    Task::Response(Response { id: req.id, result: None, error: Some(error) })
+                }
             }
         });
 
@@ -280,6 +297,14 @@ impl RequestDispatcher<'_> {
             }
         }
     }
+
+    fn content_modified_error() -> ResponseError {
+        ResponseError {
+            code: lsp_server::ErrorCode::ContentModified as i32,
+            message: "content modified".to_owned(),
+            data: None,
+        }
+    }
 }
 
 fn thread_result_to_response<R>(
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index 35c61a336e7..fa584ab4d21 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -479,12 +479,8 @@ pub(crate) fn handle_document_diagnostics(
     snap: GlobalStateSnapshot,
     params: lsp_types::DocumentDiagnosticParams,
 ) -> anyhow::Result<lsp_types::DocumentDiagnosticReportResult> {
-    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
-    let source_root = snap.analysis.source_root_id(file_id)?;
-    let line_index = snap.file_line_index(file_id)?;
-    let config = snap.config.diagnostics(Some(source_root));
-    if !config.enabled {
-        return Ok(lsp_types::DocumentDiagnosticReportResult::Report(
+    const EMPTY: lsp_types::DocumentDiagnosticReportResult =
+        lsp_types::DocumentDiagnosticReportResult::Report(
             lsp_types::DocumentDiagnosticReport::Full(
                 lsp_types::RelatedFullDocumentDiagnosticReport {
                     related_documents: None,
@@ -494,8 +490,18 @@ pub(crate) fn handle_document_diagnostics(
                     },
                 },
             ),
-        ));
+        );
+
+    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+    let source_root = snap.analysis.source_root_id(file_id)?;
+    if !snap.analysis.is_local_source_root(source_root)? {
+        return Ok(EMPTY);
+    }
+    let config = snap.config.diagnostics(Some(source_root));
+    if !config.enabled {
+        return Ok(EMPTY);
     }
+    let line_index = snap.file_line_index(file_id)?;
     let supports_related = snap.config.text_document_diagnostic_related_document_support();
 
     let mut related_documents = FxHashMap::default();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index c531cd8d114..9a51df80fe1 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -1092,7 +1092,7 @@ impl GlobalState {
             .on_latency_sensitive::<NO_RETRY, lsp_request::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)
             // FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change.
             // All other request handlers
-            .on_or::<NO_RETRY, lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
+            .on_with::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
                 lsp_types::DocumentDiagnosticReport::Full(
                     lsp_types::RelatedFullDocumentDiagnosticReport {
                         related_documents: None,
@@ -1102,7 +1102,13 @@ impl GlobalState {
                         },
                     },
                 ),
-            ))
+            ), || lsp_server::ResponseError {
+                code: lsp_server::ErrorCode::ServerCancelled as i32,
+                message: "server cancelled the request".to_owned(),
+                data: serde_json::to_value(lsp_types::DiagnosticServerCancellationData {
+                    retrigger_request: true
+                }).ok(),
+            })
             .on::<RETRY, lsp_request::DocumentSymbolRequest>(handlers::handle_document_symbol)
             .on::<RETRY, lsp_request::FoldingRangeRequest>(handlers::handle_folding_range)
             .on::<NO_RETRY, lsp_request::SignatureHelpRequest>(handlers::handle_signature_help)
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index 3609da0bb26..02c59646a99 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -736,7 +736,7 @@ PathPat =
   Path
 
 OrPat =
-  (Pat ('|' Pat)* '|'?)
+  '|'? (Pat ('|' Pat)*)
 
 BoxPat =
   'box' Pat
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index b9eb1abb112..23d2b355a94 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -1283,6 +1283,8 @@ pub struct OrPat {
 impl OrPat {
     #[inline]
     pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+    #[inline]
+    pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) }
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index 11f5e662e3d..6ec73e76f78 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -1140,3 +1140,13 @@ impl From<ast::AssocItem> for ast::AnyHasAttrs {
         Self::new(node)
     }
 }
+
+impl ast::OrPat {
+    pub fn leading_pipe(&self) -> Option<SyntaxToken> {
+        self.syntax
+            .children_with_tokens()
+            .find(|it| !it.kind().is_trivia())
+            .and_then(NodeOrToken::into_token)
+            .filter(|it| it.kind() == T![|])
+    }
+}
diff --git a/src/tools/rust-analyzer/editors/code/.vscodeignore b/src/tools/rust-analyzer/editors/code/.vscodeignore
index 09dc27056b3..1712a1477e6 100644
--- a/src/tools/rust-analyzer/editors/code/.vscodeignore
+++ b/src/tools/rust-analyzer/editors/code/.vscodeignore
@@ -12,3 +12,4 @@
 !ra_syntax_tree.tmGrammar.json
 !server
 !README.md
+!walkthrough-setup-tips.md
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index e55eceff781..6eebdf9f016 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -3224,10 +3224,9 @@
                     {
                         "id": "setup",
                         "title": "Useful Setup Tips",
-                        "description": "There are a couple of things you might want to configure upfront to your tastes. We'll name a few here but be sure to check out the docs linked below!\n\n**Marking library sources as readonly**\n\nAdding the following to your settings.json will mark all Rust library sources as readonly:\n```json\n\"files.readonlyInclude\": {\n    \"**/.cargo/registry/src/**/*.rs\": true,\n    \"**/lib/rustlib/src/rust/library/**/*.rs\": true,\n},\n```\n\n**Check on Save**\n\nBy default, rust-analyzer will run `cargo check` on your codebase when you save a file, rendering diagnostics emitted by `cargo check` within your code. This can potentially collide with other `cargo` commands running concurrently, blocking them from running for a certain amount of time. In these cases it is recommended to disable the `rust-analyzer.checkOnSave` configuration and running the `rust-analyzer: Run flycheck` command on-demand instead.",
+                        "description": "There are a couple of things you might want to configure upfront to your tastes. We'll name a few here but be sure to check out the docs linked below!\n\n**Marking library sources as readonly**\n\nAdding the snippet on the right to your settings.json will mark all Rust library sources as readonly.\n\n**Check on Save**\n\nBy default, rust-analyzer will run ``cargo check`` on your codebase when you save a file, rendering diagnostics emitted by ``cargo check`` within your code. This can potentially collide with other ``cargo`` commands running concurrently, blocking them from running for a certain amount of time. In these cases it is recommended to disable the ``rust-analyzer.checkOnSave`` configuration and running the ``rust-analyzer: Run flycheck`` command on-demand instead.",
                         "media": {
-                            "image": "./icon.png",
-                            "altText": "rust-analyzer logo"
+                            "markdown": "./walkthrough-setup-tips.md"
                         }
                     },
                     {
@@ -3245,7 +3244,7 @@
                     {
                         "id": "faq",
                         "title": "FAQ",
-                        "description": "What are these code hints that are being inserted into my code?\n\nThese hints are called inlay hints which rust-analyzer support and are enabled by default in VSCode. If you wish to disable them you can do so via the `editor.inlayHints.enabled` setting.",
+                        "description": "What are these code hints that are being inserted into my code?\n\nThese hints are called inlay hints which rust-analyzer support and are enabled by default in VSCode. If you wish to disable them you can do so via the ``editor.inlayHints.enabled`` setting.",
                         "media": {
                             "image": "icon.png",
                             "altText": "rust-analyzer logo"
diff --git a/src/tools/rust-analyzer/editors/code/walkthrough-setup-tips.md b/src/tools/rust-analyzer/editors/code/walkthrough-setup-tips.md
new file mode 100644
index 00000000000..fda4ac80023
--- /dev/null
+++ b/src/tools/rust-analyzer/editors/code/walkthrough-setup-tips.md
@@ -0,0 +1,10 @@
+# Settings Example
+
+Add the following to settings.json to mark Rust library sources as read-only:
+
+```json
+"files.readonlyInclude": {
+  "**/.cargo/registry/src/**/*.rs": true,
+  "**/lib/rustlib/src/rust/library/**/*.rs": true,
+},
+```
diff --git a/src/tools/rust-analyzer/rust-bors.toml b/src/tools/rust-analyzer/rust-bors.toml
deleted file mode 100644
index c31ba66c50f..00000000000
--- a/src/tools/rust-analyzer/rust-bors.toml
+++ /dev/null
@@ -1 +0,0 @@
-timeout = 3600
diff --git a/src/tools/rust-analyzer/xtask/src/release/changelog.rs b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
index 086a4d463ea..343a9efbbc8 100644
--- a/src/tools/rust-analyzer/xtask/src/release/changelog.rs
+++ b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
@@ -128,9 +128,10 @@ fn unescape(s: &str) -> String {
 }
 
 fn parse_pr_number(s: &str) -> Option<u32> {
-    const BORS_PREFIX: &str = "Merge #";
+    const GITHUB_PREFIX: &str = "Merge pull request #";
     const HOMU_PREFIX: &str = "Auto merge of #";
-    if let Some(s) = s.strip_prefix(BORS_PREFIX) {
+    if let Some(s) = s.strip_prefix(GITHUB_PREFIX) {
+        let s = if let Some(space) = s.find(' ') { &s[..space] } else { s };
         s.parse().ok()
     } else if let Some(s) = s.strip_prefix(HOMU_PREFIX) {
         if let Some(space) = s.find(' ') {