diff options
| author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2022-01-05 20:45:27 +0000 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2022-01-05 20:45:27 +0000 |
| commit | 735b542146eb729bbef90bdb50841ccc99ab46c5 (patch) | |
| tree | f416f54f471854bb4eb9d8742d6663a8af6a4e10 | |
| parent | 149981fbfca9ef2eb3f7636ae99ca97a6b96cb02 (diff) | |
| parent | bd913270794798eacd162596fc63473d710f2b65 (diff) | |
| download | rust-735b542146eb729bbef90bdb50841ccc99ab46c5.tar.gz rust-735b542146eb729bbef90bdb50841ccc99ab46c5.zip | |
Merge #11201
11201: fix: Fix completions not considering ancestor items for attribute search r=Veykril a=Veykril Turns out we never filled the `CompletionContext` with the attribute expansion of attributed impls and traits when typing in the assoc items, as we were only considering the assoc item to have an attribute to expand. bors r+ Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
| -rw-r--r-- | crates/ide_completion/src/context.rs | 67 |
1 files changed, 40 insertions, 27 deletions
diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs index 6e43aa608ac..2374d689cbb 100644 --- a/crates/ide_completion/src/context.rs +++ b/crates/ide_completion/src/context.rs @@ -1,5 +1,7 @@ //! See `CompletionContext` structure. +use std::iter; + use base_db::SourceDatabaseExt; use hir::{Local, Name, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo}; use ide_db::{ @@ -431,12 +433,17 @@ impl<'a> CompletionContext<'a> { mut fake_ident_token: SyntaxToken, ) { let _p = profile::span("CompletionContext::expand_and_fill"); - loop { - // Expand attributes - if let (Some(actual_item), Some(item_with_fake_ident)) = ( - find_node_at_offset::<ast::Item>(&original_file, offset), - find_node_at_offset::<ast::Item>(&speculative_file, offset), - ) { + 'expansion: loop { + let parent_item = + |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); + let ancestor_items = iter::successors( + Option::zip( + find_node_at_offset::<ast::Item>(&original_file, offset), + find_node_at_offset::<ast::Item>(&speculative_file, offset), + ), + |(a, b)| parent_item(a).zip(parent_item(b)), + ); + for (actual_item, item_with_fake_ident) in ancestor_items { match ( self.sema.expand_attr_macro(&actual_item), self.sema.speculative_expand_attr_macro( @@ -445,19 +452,22 @@ impl<'a> CompletionContext<'a> { fake_ident_token.clone(), ), ) { - (Some(actual_expansion), Some(speculative_expansion)) => { - let new_offset = speculative_expansion.1.text_range().start(); + // maybe parent items have attributes + (None, None) => (), + // successful expansions + (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { + let new_offset = fake_mapped_token.text_range().start(); if new_offset > actual_expansion.text_range().end() { - break; + break 'expansion; } original_file = actual_expansion; - speculative_file = speculative_expansion.0; - fake_ident_token = speculative_expansion.1; + speculative_file = fake_expansion; + fake_ident_token = fake_mapped_token; offset = new_offset; - continue; + continue 'expansion; } - (None, None) => (), - _ => break, + // exactly one expansion failed, inconsistent state so stop expanding completely + _ => break 'expansion, } } @@ -477,28 +487,31 @@ impl<'a> CompletionContext<'a> { None => break, }; - if let (Some(actual_expansion), Some(speculative_expansion)) = ( + match ( self.sema.expand(&actual_macro_call), self.sema.speculative_expand( &actual_macro_call, &speculative_args, - fake_ident_token, + fake_ident_token.clone(), ), ) { - let new_offset = speculative_expansion.1.text_range().start(); - if new_offset > actual_expansion.text_range().end() { - break; + // successful expansions + (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { + let new_offset = fake_mapped_token.text_range().start(); + if new_offset > actual_expansion.text_range().end() { + break; + } + original_file = actual_expansion; + speculative_file = fake_expansion; + fake_ident_token = fake_mapped_token; + offset = new_offset; + continue; } - original_file = actual_expansion; - speculative_file = speculative_expansion.0; - fake_ident_token = speculative_expansion.1; - offset = new_offset; - } else { - break; + _ => break, } - } else { - break; } + + break; } self.fill(&original_file, speculative_file, offset); |
