about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2023-02-05 13:46:22 +0000
committerbors <bors@rust-lang.org>2023-02-05 13:46:22 +0000
commit0b32b65ca6aee21f352d178eeb0b85e092a93177 (patch)
tree33a4f95a78a8dc7037ee205eb406272fad87622d
parent3bc33c7e9f041c5aef69a5c33b3d29d19a341ece (diff)
parenta4d0b5c522405fd2351c56f48c68544b3130a513 (diff)
downloadrust-0b32b65ca6aee21f352d178eeb0b85e092a93177.tar.gz
rust-0b32b65ca6aee21f352d178eeb0b85e092a93177.zip
Auto merge of #14043 - lowr:fix/completion-within-str-that-changed, r=Veykril
fix: consider relative offset to fake ident token in expansion for completion

Fixes #13836

When we check if the offset of fake ident token is out of bounds in the "actual expansion" (i.e. expansion without the fake ident token), we should take relative offset to it into account to match [how we'd get token after expansion](https://github.com/rust-lang/rust-analyzer/blob/f1b257f4eb4fef74b42fd7135d1cf3884e8b51c9/crates/ide-completion/src/context/analysis.rs#L53-L54).
-rw-r--r--crates/base-db/src/fixture.rs62
-rw-r--r--crates/ide-completion/src/context/analysis.rs9
-rw-r--r--crates/ide-completion/src/tests/proc_macros.rs30
3 files changed, 96 insertions, 5 deletions
diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs
index 60d1e488d8a..8a7e9dfadfe 100644
--- a/crates/base-db/src/fixture.rs
+++ b/crates/base-db/src/fixture.rs
@@ -6,7 +6,7 @@ use rustc_hash::FxHashMap;
 use test_utils::{
     extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER,
 };
-use tt::token_id::Subtree;
+use tt::token_id::{Leaf, Subtree, TokenTree};
 use vfs::{file_set::FileSet, VfsPath};
 
 use crate::{
@@ -310,7 +310,7 @@ impl ChangeFixture {
     }
 }
 
-fn default_test_proc_macros() -> [(String, ProcMacro); 4] {
+fn default_test_proc_macros() -> [(String, ProcMacro); 5] {
     [
         (
             r#"
@@ -368,6 +368,20 @@ pub fn mirror(input: TokenStream) -> TokenStream {
                 expander: Arc::new(MirrorProcMacroExpander),
             },
         ),
+        (
+            r#"
+#[proc_macro]
+pub fn shorten(input: TokenStream) -> TokenStream {
+    loop {}
+}
+"#
+            .into(),
+            ProcMacro {
+                name: "shorten".into(),
+                kind: crate::ProcMacroKind::FuncLike,
+                expander: Arc::new(ShortenProcMacroExpander),
+            },
+        ),
     ]
 }
 
@@ -508,3 +522,47 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
         Ok(traverse(input))
     }
 }
+
+// Replaces every literal with an empty string literal and every identifier with its first letter,
+// but retains all tokens' span. Useful for testing we don't assume token hasn't been modified by
+// macros even if it retains its span.
+#[derive(Debug)]
+struct ShortenProcMacroExpander;
+impl ProcMacroExpander for ShortenProcMacroExpander {
+    fn expand(
+        &self,
+        input: &Subtree,
+        _: Option<&Subtree>,
+        _: &Env,
+    ) -> Result<Subtree, ProcMacroExpansionError> {
+        return Ok(traverse(input));
+
+        fn traverse(input: &Subtree) -> Subtree {
+            let token_trees = input
+                .token_trees
+                .iter()
+                .map(|it| match it {
+                    TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(modify_leaf(leaf)),
+                    TokenTree::Subtree(subtree) => tt::TokenTree::Subtree(traverse(subtree)),
+                })
+                .collect();
+            Subtree { delimiter: input.delimiter, token_trees }
+        }
+
+        fn modify_leaf(leaf: &Leaf) -> Leaf {
+            let mut leaf = leaf.clone();
+            match &mut leaf {
+                Leaf::Literal(it) => {
+                    // XXX Currently replaces any literals with an empty string, but supporting
+                    // "shortening" other literals would be nice.
+                    it.text = "\"\"".into();
+                }
+                Leaf::Punct(_) => {}
+                Leaf::Ident(it) => {
+                    it.text = it.text.chars().take(1).collect();
+                }
+            }
+            leaf
+        }
+    }
+}
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index e34824e22ea..f606d79ad20 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -48,7 +48,9 @@ pub(super) fn expand_and_analyze(
     // make the offset point to the start of the original token, as that is what the
     // intermediate offsets calculated in expansion always points to
     let offset = offset - relative_offset;
-    let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
+    let expansion =
+        expand(sema, original_file, speculative_file, offset, fake_ident_token, relative_offset);
+
     // add the relative offset back, so that left_biased finds the proper token
     let offset = expansion.offset + relative_offset;
     let token = expansion.original_file.token_at_offset(offset).left_biased()?;
@@ -67,6 +69,7 @@ fn expand(
     mut speculative_file: SyntaxNode,
     mut offset: TextSize,
     mut fake_ident_token: SyntaxToken,
+    relative_offset: TextSize,
 ) -> ExpansionResult {
     let _p = profile::span("CompletionContext::expand");
     let mut derive_ctx = None;
@@ -97,7 +100,7 @@ fn expand(
                 // successful expansions
                 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
                     let new_offset = fake_mapped_token.text_range().start();
-                    if new_offset > actual_expansion.text_range().end() {
+                    if new_offset + relative_offset > actual_expansion.text_range().end() {
                         // offset outside of bounds from the original expansion,
                         // stop here to prevent problems from happening
                         break 'expansion;
@@ -176,7 +179,7 @@ fn expand(
                 // successful expansions
                 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
                     let new_offset = fake_mapped_token.text_range().start();
-                    if new_offset > actual_expansion.text_range().end() {
+                    if new_offset + relative_offset > actual_expansion.text_range().end() {
                         // offset outside of bounds from the original expansion,
                         // stop here to prevent problems from happening
                         break 'expansion;
diff --git a/crates/ide-completion/src/tests/proc_macros.rs b/crates/ide-completion/src/tests/proc_macros.rs
index 9eae6f84954..fec149e56a9 100644
--- a/crates/ide-completion/src/tests/proc_macros.rs
+++ b/crates/ide-completion/src/tests/proc_macros.rs
@@ -131,3 +131,33 @@ fn main() {}
         "#]],
     )
 }
+
+#[test]
+fn issue_13836_str() {
+    check(
+        r#"
+//- proc_macros: shorten
+fn main() {
+    let s = proc_macros::shorten!("text.$0");
+}
+"#,
+        expect![[r#""#]],
+    )
+}
+
+#[test]
+fn issue_13836_ident() {
+    check(
+        r#"
+//- proc_macros: shorten
+struct S;
+impl S {
+    fn foo(&self) {}
+}
+fn main() {
+    let s = proc_macros::shorten!(S.fo$0);
+}
+"#,
+        expect![[r#""#]],
+    )
+}