about summary refs log tree commit diff
diff options
context:
space:
mode:
authorLukas Wirth <lukastw97@gmail.com>2021-12-20 13:19:48 +0100
committerLukas Wirth <lukastw97@gmail.com>2021-12-20 13:19:48 +0100
commit37a87708aee788de10352603baff82f6d2192fad (patch)
tree3804f8fcdee46b29ce478ea706d59b0819451759
parent2ca3834c9f15027fa68a0d0f70f7abf75a26d750 (diff)
downloadrust-37a87708aee788de10352603baff82f6d2192fad.tar.gz
rust-37a87708aee788de10352603baff82f6d2192fad.zip
internal: Don't kick off inference in Semantics::descend_into_macros_impl
-rw-r--r--crates/hir/src/semantics.rs35
-rw-r--r--crates/hir/src/source_analyzer.rs26
2 files changed, 48 insertions, 13 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index d27869450e5..02da397b0ce 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -528,7 +528,7 @@ impl<'db> SemanticsImpl<'db> {
         if first == last {
             self.descend_into_macros_impl(
                 first,
-                |InFile { value, .. }| {
+                &mut |InFile { value, .. }| {
                     if let Some(node) = value.ancestors().find_map(N::cast) {
                         res.push(node)
                     }
@@ -540,7 +540,7 @@ impl<'db> SemanticsImpl<'db> {
             let mut scratch: SmallVec<[_; 1]> = smallvec![];
             self.descend_into_macros_impl(
                 first,
-                |token| {
+                &mut |token| {
                     scratch.push(token);
                 },
                 false,
@@ -549,7 +549,7 @@ impl<'db> SemanticsImpl<'db> {
             let mut scratch = scratch.into_iter();
             self.descend_into_macros_impl(
                 last,
-                |InFile { value: last, file_id: last_fid }| {
+                &mut |InFile { value: last, file_id: last_fid }| {
                     if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
                         if first_fid == last_fid {
                             if let Some(p) = first.parent() {
@@ -574,20 +574,20 @@ impl<'db> SemanticsImpl<'db> {
 
     fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         let mut res = smallvec![];
-        self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
+        self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false);
         res
     }
 
     fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
         let mut res = token.clone();
-        self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
+        self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true);
         res
     }
 
     fn descend_into_macros_impl(
         &self,
         token: SyntaxToken,
-        mut f: impl FnMut(InFile<SyntaxToken>),
+        f: &mut dyn FnMut(InFile<SyntaxToken>),
         single: bool,
     ) {
         let _p = profile::span("descend_into_macros");
@@ -595,7 +595,7 @@ impl<'db> SemanticsImpl<'db> {
             Some(it) => it,
             None => return,
         };
-        let sa = self.analyze(&parent);
+        let sa = self.analyze_no_infer(&parent);
         let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
         let mut cache = self.expansion_info_cache.borrow_mut();
         let mut mcache = self.macro_call_cache.borrow_mut();
@@ -927,14 +927,23 @@ impl<'db> SemanticsImpl<'db> {
     }
 
     fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
-        self.analyze_impl(node, None)
+        self.analyze_impl(node, None, true)
     }
 
     fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
-        self.analyze_impl(node, Some(offset))
+        self.analyze_impl(node, Some(offset), true)
     }
 
-    fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
+    fn analyze_no_infer(&self, node: &SyntaxNode) -> SourceAnalyzer {
+        self.analyze_impl(node, None, false)
+    }
+
+    fn analyze_impl(
+        &self,
+        node: &SyntaxNode,
+        offset: Option<TextSize>,
+        infer_body: bool,
+    ) -> SourceAnalyzer {
         let _p = profile::span("Semantics::analyze_impl");
         let node = self.find_file(node.clone());
         let node = node.as_ref();
@@ -946,7 +955,11 @@ impl<'db> SemanticsImpl<'db> {
 
         let resolver = match container {
             ChildContainer::DefWithBodyId(def) => {
-                return SourceAnalyzer::new_for_body(self.db, def, node, offset)
+                return if infer_body {
+                    SourceAnalyzer::new_for_body(self.db, def, node, offset)
+                } else {
+                    SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+                }
             }
             ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
             ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 4f987db651f..2d779393f09 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -50,7 +50,7 @@ impl SourceAnalyzer {
     pub(crate) fn new_for_body(
         db: &dyn HirDatabase,
         def: DefWithBodyId,
-        node: InFile<&SyntaxNode>,
+        node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
         offset: Option<TextSize>,
     ) -> SourceAnalyzer {
         let (body, source_map) = db.body_with_source_map(def);
@@ -65,7 +65,29 @@ impl SourceAnalyzer {
             body: Some(body),
             body_source_map: Some(source_map),
             infer: Some(db.infer(def)),
-            file_id: node.file_id,
+            file_id,
+        }
+    }
+
+    pub(crate) fn new_for_body_no_infer(
+        db: &dyn HirDatabase,
+        def: DefWithBodyId,
+        node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+        offset: Option<TextSize>,
+    ) -> SourceAnalyzer {
+        let (body, source_map) = db.body_with_source_map(def);
+        let scopes = db.expr_scopes(def);
+        let scope = match offset {
+            None => scope_for(&scopes, &source_map, node),
+            Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)),
+        };
+        let resolver = resolver_for_scope(db.upcast(), def, scope);
+        SourceAnalyzer {
+            resolver,
+            body: Some(body),
+            body_source_map: Some(source_map),
+            infer: None,
+            file_id,
         }
     }