about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--crates/hir-expand/src/db.rs6
-rw-r--r--crates/hir-expand/src/eager.rs19
-rw-r--r--crates/rust-analyzer/src/reload.rs16
3 files changed, 23 insertions, 18 deletions
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index c8de6954ee7..8de13a39c08 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -102,6 +102,7 @@ pub trait ExpandDatabase: SourceDatabase {
     #[salsa::transparent]
     fn parse_or_expand_with_err(&self, file_id: HirFileId) -> ExpandResult<Parse<SyntaxNode>>;
     /// Implementation for the macro case.
+    // This query is LRU cached
     fn parse_macro_expansion(
         &self,
         macro_file: MacroFile,
@@ -130,11 +131,12 @@ pub trait ExpandDatabase: SourceDatabase {
     fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
 
     /// Expand macro call to a token tree.
+    // This query is LRU cached
     fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
     /// Special case of the previous query for procedural macros. We can't LRU
     /// proc macros, since they are not deterministic in general, and
-    /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
-    /// heroically debugged this once!
+    /// non-determinism breaks salsa in a very, very, very bad way.
+    /// @edwin0cheng heroically debugged this once!
     fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
     /// Firewall query that returns the errors from the `parse_macro_expansion` query.
     fn parse_macro_expansion_error(
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index b108e920443..d81bb647194 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -54,8 +54,8 @@ pub fn expand_eager_macro(
     let expand_to = ExpandTo::from_call_site(&macro_call.value);
 
     // Note:
-    // When `lazy_expand` is called, its *parent* file must be already exists.
-    // Here we store an eager macro id for the argument expanded subtree here
+    // When `lazy_expand` is called, its *parent* file must already exist.
+    // Here we store an eager macro id for the argument expanded subtree
     // for that purpose.
     let arg_id = db.intern_macro_call(MacroCallLoc {
         def,
@@ -79,7 +79,11 @@ pub fn expand_eager_macro(
     let Some(value ) = value else {
         return Ok(ExpandResult { value: None, err })
     };
-    let subtree = to_subtree(&value);
+    let subtree = {
+        let mut subtree = mbe::syntax_node_to_token_tree(&value).0;
+        subtree.delimiter = crate::tt::Delimiter::unspecified();
+        subtree
+    };
 
     let res = eager.expand(db, arg_id, &subtree);
     if err.is_none() {
@@ -100,12 +104,6 @@ pub fn expand_eager_macro(
     Ok(ExpandResult { value: Some(db.intern_macro_call(loc)), err })
 }
 
-fn to_subtree(node: &SyntaxNode) -> crate::tt::Subtree {
-    let mut subtree = mbe::syntax_node_to_token_tree(node).0;
-    subtree.delimiter = crate::tt::Delimiter::unspecified();
-    subtree
-}
-
 fn lazy_expand(
     db: &dyn ExpandDatabase,
     def: &MacroDefId,
@@ -121,7 +119,8 @@ fn lazy_expand(
         MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
     );
 
-    db.parse_or_expand_with_err(id.as_file()).map(|parse| InFile::new(id.as_file(), parse))
+    let file_id = id.as_file();
+    db.parse_or_expand_with_err(file_id).map(|parse| InFile::new(file_id, parse))
 }
 
 fn eager_macro_recur(
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 1c5de2f596a..ef319d9b77f 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -215,13 +215,17 @@ impl GlobalState {
                 let mut i = 0;
                 while i < workspaces.len() {
                     if let Ok(w) = &workspaces[i] {
-                        if let Some(dupe) = workspaces[i + 1..]
+                        let dupes: Vec<_> = workspaces
                             .iter()
-                            .filter_map(|it| it.as_ref().ok())
-                            .position(|ws| ws.eq_ignore_build_data(w))
-                        {
-                            _ = workspaces.remove(dupe);
-                        }
+                            .enumerate()
+                            .skip(i + 1)
+                            .filter_map(|(i, it)| {
+                                it.as_ref().ok().filter(|ws| ws.eq_ignore_build_data(w)).map(|_| i)
+                            })
+                            .collect();
+                        dupes.into_iter().rev().for_each(|d| {
+                            _ = workspaces.remove(d);
+                        });
                     }
                     i += 1;
                 }