about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs52
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs28
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs21
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ptr.rs2
6 files changed, 125 insertions, 41 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
index d04225b8722..4894c7a9311 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
@@ -5,19 +5,20 @@ use cfg::CfgExpr;
 use either::Either;
 use intern::{sym, Symbol};
 use mbe::{expect_fragment, DelimiterKind};
-use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
+use span::{Edition, EditionedFileId, Span};
 use stdx::format_to;
 use syntax::{
     format_smolstr,
     unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
 };
-use syntax_bridge::parse_to_token_tree;
+use syntax_bridge::syntax_node_to_token_tree;
 
 use crate::{
     builtin::quote::{dollar_crate, quote},
     db::ExpandDatabase,
     hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
     name,
+    span_map::SpanMap,
     tt::{self, DelimSpan},
     ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
 };
@@ -739,18 +740,14 @@ fn include_expand(
             return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
         }
     };
-    match parse_to_token_tree(
-        file_id.edition(),
-        SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
-        SyntaxContextId::ROOT,
-        &db.file_text(file_id.file_id()),
-    ) {
-        Some(it) => ExpandResult::ok(it),
-        None => ExpandResult::new(
-            tt::Subtree::empty(DelimSpan { open: span, close: span }),
-            ExpandError::other(span, "failed to parse included file"),
-        ),
-    }
+    let span_map = db.real_span_map(file_id);
+    // FIXME: Parse errors
+    ExpandResult::ok(syntax_node_to_token_tree(
+        &db.parse(file_id).syntax_node(),
+        SpanMap::RealSpanMap(span_map),
+        span,
+        syntax_bridge::DocCommentDesugarMode::ProcMacro,
+    ))
 }
 
 pub fn include_input_to_file_id(
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index b27f1fbb5db..3eac33ce990 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -892,29 +892,8 @@ impl<'db> SemanticsImpl<'db> {
         f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
     ) -> Option<T> {
         let _p = tracing::info_span!("descend_into_macros_impl").entered();
-        let (sa, span, file_id) = token
-            .parent()
-            .and_then(|parent| {
-                self.analyze_impl(InRealFile::new(file_id, &parent).into(), None, false)
-            })
-            .and_then(|sa| {
-                let file_id = sa.file_id.file_id()?;
-                Some((
-                    sa,
-                    self.db.real_span_map(file_id).span_for_range(token.text_range()),
-                    HirFileId::from(file_id),
-                ))
-            })?;
 
-        let mut m_cache = self.macro_call_cache.borrow_mut();
-        let def_map = sa.resolver.def_map();
-
-        // A stack of tokens to process, along with the file they came from
-        // These are tracked to know which macro calls we still have to look into
-        // the tokens themselves aren't that interesting as the span that is being used to map
-        // things down never changes.
-        let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
-            vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])];
+        let span = self.db.real_span_map(file_id).span_for_range(token.text_range());
 
         // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
         let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
@@ -926,7 +905,6 @@ impl<'db> SemanticsImpl<'db> {
                         .map(SmallVec::<[_; 2]>::from_iter),
                 )
             })?;
-
             // we have found a mapping for the token if the vec is non-empty
             let res = mapped_tokens.is_empty().not().then_some(());
             // requeue the tokens we got from mapping our current token down
@@ -934,6 +912,33 @@ impl<'db> SemanticsImpl<'db> {
             res
         };
 
+        // A stack of tokens to process, along with the file they came from
+        // These are tracked to know which macro calls we still have to look into
+        // the tokens themselves aren't that interesting as the span that is being used to map
+        // things down never changes.
+        let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
+        let include = self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, file_id);
+        match include {
+            Some(include) => {
+                // include! inputs are always from real files, so they only need to be handled once upfront
+                process_expansion_for_token(&mut stack, include)?;
+            }
+            None => {
+                stack.push((file_id.into(), smallvec![(token, SyntaxContextId::ROOT)]));
+            }
+        }
+
+        let (file_id, tokens) = stack.first()?;
+        // make sure we pick the token in the expanded include if we encountered an include,
+        // otherwise we'll get the wrong semantics
+        let sa =
+            tokens.first()?.0.parent().and_then(|parent| {
+                self.analyze_impl(InFile::new(*file_id, &parent), None, false)
+            })?;
+
+        let mut m_cache = self.macro_call_cache.borrow_mut();
+        let def_map = sa.resolver.def_map();
+
         // Filters out all tokens that contain the given range (usually the macro call), any such
         // token is redundant as the corresponding macro call has already been processed
         let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
@@ -1011,6 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
                                         ) {
                                         call.as_macro_file()
                                     } else {
+                                        // FIXME: This is wrong, the SourceAnalyzer might be invalid here
                                         sa.expand(self.db, mcall.as_ref())?
                                     };
                                     m_cache.insert(mcall, it);
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index fd6d52d6c9d..389778b44ed 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -104,7 +104,7 @@ use hir_expand::{
 };
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
-use span::{FileId, MacroFileId};
+use span::{EditionedFileId, FileId, MacroFileId};
 use stdx::impl_from;
 use syntax::{
     ast::{self, HasName},
@@ -118,9 +118,27 @@ pub(super) struct SourceToDefCache {
     pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
     expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
     pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
+    pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>,
 }
 
 impl SourceToDefCache {
+    pub(super) fn get_or_insert_include_for(
+        &mut self,
+        db: &dyn HirDatabase,
+        file: EditionedFileId,
+    ) -> Option<MacroFileId> {
+        if let Some(&m) = self.included_file_cache.get(&file) {
+            return m;
+        }
+        self.included_file_cache.insert(file, None);
+        for &crate_id in db.relevant_crates(file.into()).iter() {
+            db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
+                self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
+            });
+        }
+        self.included_file_cache.get(&file).copied().flatten()
+    }
+
     pub(super) fn get_or_insert_expansion(
         &mut self,
         sema: &SemanticsImpl<'_>,
@@ -163,9 +181,13 @@ impl SourceToDefCtx<'_, '_> {
                             .include_macro_invoc(crate_id)
                             .iter()
                             .filter(|&&(_, file_id)| file_id == file)
-                            .flat_map(|(call, _)| {
+                            .flat_map(|&(macro_call_id, file_id)| {
+                                self.cache
+                                    .included_file_cache
+                                    .insert(file_id, Some(MacroFileId { macro_call_id }));
                                 modules(
-                                    call.lookup(self.db.upcast())
+                                    macro_call_id
+                                        .lookup(self.db.upcast())
                                         .kind
                                         .file_id()
                                         .original_file(self.db.upcast())
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index c61b2ba84f2..4cbcb6ed050 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -505,6 +505,44 @@ fn foo() {
     }
 
     #[test]
+    fn goto_def_in_included_file_inside_mod() {
+        check(
+            r#"
+//- minicore:include
+//- /main.rs
+mod a {
+    include!("b.rs");
+}
+//- /b.rs
+fn func_in_include() {
+ //^^^^^^^^^^^^^^^
+}
+fn foo() {
+    func_in_include$0();
+}
+"#,
+        );
+
+        check(
+            r#"
+//- minicore:include
+//- /main.rs
+mod a {
+    include!("a.rs");
+}
+//- /a.rs
+fn func_in_include() {
+ //^^^^^^^^^^^^^^^
+}
+
+fn foo() {
+    func_in_include$0();
+}
+"#,
+        );
+    }
+
+    #[test]
     fn goto_def_if_items_same_name() {
         check(
             r#"
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index 64d717f88dd..e7cb8a253f4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -2750,4 +2750,25 @@ impl Foo {
             "#]],
         );
     }
+
+    #[test]
+    fn goto_ref_on_included_file() {
+        check(
+            r#"
+//- minicore:include
+//- /lib.rs
+include!("foo.rs");
+fn howdy() {
+    let _ = FOO;
+}
+//- /foo.rs
+const FOO$0: i32 = 0;
+"#,
+            expect![[r#"
+                FOO Const FileId(1) 0..19 6..9
+
+                FileId(0) 45..48
+            "#]],
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
index ed4894f9b9c..11b79e4e0ed 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
@@ -27,7 +27,7 @@ pub struct AstPtr<N: AstNode> {
     _ty: PhantomData<fn() -> N>,
 }
 
-impl<N: AstNode + std::fmt::Debug> std::fmt::Debug for AstPtr<N> {
+impl<N: AstNode> std::fmt::Debug for AstPtr<N> {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_tuple("AstPtr").field(&self.raw).finish()
     }