diff options
| author | Florian Diebold <flodiebold@gmail.com> | 2022-02-09 11:58:52 +0100 |
|---|---|---|
| committer | Florian Diebold <flodiebold@gmail.com> | 2022-02-09 11:58:52 +0100 |
| commit | ecf3cff4a6c7c06d1fe30e636d69227ab6310ebb (patch) | |
| tree | 49e23a4551b4247bb67423ee3a869606ec85e0c3 | |
| parent | 30287e6051f82085d62d38f26f42a53fa48b434c (diff) | |
| download | rust-ecf3cff4a6c7c06d1fe30e636d69227ab6310ebb.tar.gz rust-ecf3cff4a6c7c06d1fe30e636d69227ab6310ebb.zip | |
Replace expressions with errors in them
| -rw-r--r-- | crates/hir_def/src/macro_expansion_tests.rs | 2 | ||||
| -rw-r--r-- | crates/hir_expand/src/db.rs | 20 | ||||
| -rw-r--r-- | crates/hir_expand/src/fixup.rs | 78 | ||||
| -rw-r--r-- | crates/hir_expand/src/hygiene.rs | 3 | ||||
| -rw-r--r-- | crates/hir_expand/src/lib.rs | 2 | ||||
| -rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 11 | ||||
| -rw-r--r-- | crates/mbe/src/token_map.rs | 1 |
7 files changed, 84 insertions, 33 deletions
diff --git a/crates/hir_def/src/macro_expansion_tests.rs b/crates/hir_def/src/macro_expansion_tests.rs index 28a5d960859..1a7d9aa8411 100644 --- a/crates/hir_def/src/macro_expansion_tests.rs +++ b/crates/hir_def/src/macro_expansion_tests.rs @@ -186,7 +186,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let range: Range<usize> = range.into(); if show_token_ids { - if let Some((tree, map)) = arg.as_deref() { + if let Some((tree, map, _)) = arg.as_deref() { let tt_range = call.token_tree().unwrap().syntax().text_range(); let mut ranges = Vec::new(); extract_id_ranges(&mut ranges, &map, &tree); diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 7a21e3e8701..eea02898d2c 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs @@ -108,7 +108,10 @@ pub trait AstDatabase: SourceDatabase { /// Lowers syntactic macro call to a token tree representation. #[salsa::transparent] - fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; + fn macro_arg( + &self, + id: MacroCallId, + ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>>; /// Extracts syntax node, corresponding to a macro call. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. @@ -291,29 +294,27 @@ fn parse_macro_expansion( } } -fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { +fn macro_arg( + db: &dyn AstDatabase, + id: MacroCallId, +) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>> { let arg = db.macro_arg_text(id)?; let loc = db.lookup_intern_macro_call(id); let node = SyntaxNode::new_root(arg); - eprintln!("input text:\n{node}"); - eprintln!("input syntax:\n{node:#?}"); let censor = censor_for_macro_input(&loc, &node); // TODO only fixup for attribute macro input let mut fixups = fixup::fixup_syntax(&node); fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new()))); - eprintln!("fixups: {fixups:?}"); let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, fixups.replace, fixups.append); - eprintln!("fixed-up input: {}", tt); - if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included tt.delimiter = None; } - Some(Arc::new((tt, tmap))) + Some(Arc::new((tt, tmap, fixups.map))) } fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> { @@ -433,7 +434,6 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0); // Set a hard limit for the expanded tt let count = tt.count(); - // XXX: Make ExpandResult a real error and use .map_err instead? if TOKEN_LIMIT.check(count).is_err() { return ExpandResult::str_err(format!( "macro invocation exceeds token limit: produced {} tokens, limit is {}", @@ -442,7 +442,7 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar )); } - fixup::reverse_fixups(&mut tt, ¯o_arg.1); + fixup::reverse_fixups(&mut tt, ¯o_arg.1, ¯o_arg.2); ExpandResult { value: Some(Arc::new(tt)), err } } diff --git a/crates/hir_expand/src/fixup.rs b/crates/hir_expand/src/fixup.rs index a4acf9e987e..c98d20e4561 100644 --- a/crates/hir_expand/src/fixup.rs +++ b/crates/hir_expand/src/fixup.rs @@ -10,21 +10,39 @@ use tt::Subtree; pub struct SyntaxFixups { pub append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>, pub replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>, + pub map: SyntaxFixupMap, } +#[derive(Debug, PartialEq, Eq)] +pub struct SyntaxFixupMap { + original: Vec<(Subtree, TokenMap)>, +} + +const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); + pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { let mut append = FxHashMap::default(); let mut replace = FxHashMap::default(); let mut preorder = node.preorder(); - let empty_id = SyntheticTokenId(0); + let mut original = Vec::new(); while let Some(event) = preorder.next() { let node = match event { syntax::WalkEvent::Enter(node) => node, syntax::WalkEvent::Leave(_) => continue, }; - if node.kind() == SyntaxKind::ERROR { - // TODO this might not be helpful - replace.insert(node, Vec::new()); + if can_handle_error(&node) && has_error_to_handle(&node) { + // the node contains an error node, we have to completely replace it by something valid + let original_tree = mbe::syntax_node_to_token_tree(&node); + // TODO handle token ids / token map + let idx = original.len() as u32; + original.push(original_tree); + let replacement = SyntheticToken { + kind: SyntaxKind::IDENT, + text: "__ra_fixup".into(), + range: node.text_range(), + id: SyntheticTokenId(idx), + }; + replace.insert(node.clone(), vec![replacement]); preorder.skip_subtree(); continue; } @@ -39,7 +57,7 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { kind: SyntaxKind::IDENT, text: "__ra_fixup".into(), range: end_range, - id: empty_id, + id: EMPTY_ID, }, ]); } @@ -51,7 +69,7 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { kind: SyntaxKind::SEMICOLON, text: ";".into(), range: end_range, - id: empty_id, + id: EMPTY_ID, }, ]); } @@ -60,18 +78,37 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { } } } - SyntaxFixups { append, replace } + SyntaxFixups { append, replace, map: SyntaxFixupMap { original } } +} + +fn has_error(node: &SyntaxNode) -> bool { + node.children().any(|c| c.kind() == SyntaxKind::ERROR) +} + +fn can_handle_error(node: &SyntaxNode) -> bool { + ast::Expr::can_cast(node.kind()) +} + +fn has_error_to_handle(node: &SyntaxNode) -> bool { + has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c)) } -pub fn reverse_fixups(tt: &mut Subtree, token_map: &TokenMap) { - eprintln!("token_map: {:?}", token_map); +pub fn reverse_fixups(tt: &mut Subtree, token_map: &TokenMap, fixup_map: &SyntaxFixupMap) { tt.token_trees.retain(|tt| match tt { - tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()).is_none(), + tt::TokenTree::Leaf(leaf) => { + token_map.synthetic_token_id(leaf.id()).is_none() + || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID) + } _ => true, }); tt.token_trees.iter_mut().for_each(|tt| match tt { - tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map), - _ => {} + tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, fixup_map), + tt::TokenTree::Leaf(leaf) => { + if let Some(id) = token_map.synthetic_token_id(leaf.id()) { + let (original, _original_tmap) = &fixup_map.original[id.0 as usize]; + *tt = tt::TokenTree::Subtree(original.clone()); + } + } }); } @@ -84,6 +121,7 @@ mod tests { #[track_caller] fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture); + eprintln!("parse: {:#?}", parsed.syntax_node()); let fixups = super::fixup_syntax(&parsed.syntax_node()); let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored( &parsed.syntax_node(), @@ -106,7 +144,7 @@ mod tests { parse.syntax_node() ); - reverse_fixups(&mut tt, &tmap); + reverse_fixups(&mut tt, &tmap, &fixups.map); // the fixed-up + reversed version should be equivalent to the original input // (but token IDs don't matter) @@ -172,4 +210,18 @@ fn foo () {a . b ; bar () ;} "#]], ) } + + #[test] + fn extraneous_comma() { + check( + r#" +fn foo() { + bar(,); +} +"#, + expect![[r#" +fn foo () {__ra_fixup ;} +"#]], + ) + } } diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index d2b719bf570..0ac5ee83064 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs @@ -15,6 +15,7 @@ use syntax::{ use crate::{ db::{self, AstDatabase}, + fixup, name::{AsName, Name}, HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, }; @@ -127,7 +128,7 @@ struct HygieneInfo { attr_input_or_mac_def_start: Option<InFile<TextSize>>, macro_def: Arc<TokenExpander>, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, + macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>, macro_arg_shift: mbe::Shift, exp_map: Arc<mbe::TokenMap>, } diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index b23d5472426..6ec507d00f3 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs @@ -427,7 +427,7 @@ pub struct ExpansionInfo { attr_input_or_mac_def: Option<InFile<ast::TokenTree>>, macro_def: Arc<TokenExpander>, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, + macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>, /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg /// and as such we need to shift tokens if they are part of an attributes input instead of their item. macro_arg_shift: mbe::Shift, diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 7feaaaa62d8..da7fdb74ee8 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -30,8 +30,8 @@ pub fn syntax_node_to_token_tree_censored( let mut c = Convertor::new(node, global_offset, replace, append); let subtree = convert_tokens(&mut c); c.id_alloc.map.shrink_to_fit(); - always!(c.replace.is_empty()); - always!(c.append.is_empty()); + always!(c.replace.is_empty(), "replace: {:?}", c.replace); + always!(c.append.is_empty(), "append: {:?}", c.append); (subtree, c.id_alloc.map) } @@ -539,7 +539,6 @@ impl Convertor { WalkEvent::Enter(ele) => ele, WalkEvent::Leave(SyntaxElement::Node(node)) => { if let Some(mut v) = append.remove(&node) { - eprintln!("after {:?}, appending {:?}", node, v); if !v.is_empty() { v.reverse(); return (None, v); @@ -554,7 +553,6 @@ impl Convertor { SyntaxElement::Node(node) => { if let Some(mut v) = replace.remove(&node) { preorder.skip_subtree(); - eprintln!("replacing {:?} by {:?}", node, v); if !v.is_empty() { v.reverse(); return (None, v); @@ -640,8 +638,8 @@ impl TokenConvertor for Convertor { self.current = new_current; self.current_synthetic = new_synth; } - // TODO fix range? - return Some((SynToken::Synthetic(synth_token), self.range)); + let range = synth_token.range; + return Some((SynToken::Synthetic(synth_token), range)); } let curr = self.current.clone()?; @@ -675,7 +673,6 @@ impl TokenConvertor for Convertor { } if let Some(synth_token) = self.current_synthetic.last() { - // TODO fix range? return Some(SynToken::Synthetic(synth_token.clone())); } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index ee1090945cb..c923e7a69a1 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -74,6 +74,7 @@ impl TokenMap { pub(crate) fn shrink_to_fit(&mut self) { self.entries.shrink_to_fit(); + self.synthetic_entries.shrink_to_fit(); } pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { |
