diff options
| author | bors <bors@rust-lang.org> | 2023-04-22 05:23:18 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2023-04-22 05:23:18 +0000 |
| commit | fcbbec6c6eba365260451d2b8c3d72f6b84b7245 (patch) | |
| tree | 9fdf34f4a54b9416dcb5aab43a8e6b3e4ad0d77a | |
| parent | af3b6a0893cc3a05b5ddc1e9d31b2c454b480426 (diff) | |
| parent | a497e9a05ef9b3d850014851a8e0875d773698a1 (diff) | |
| download | rust-fcbbec6c6eba365260451d2b8c3d72f6b84b7245.tar.gz rust-fcbbec6c6eba365260451d2b8c3d72f6b84b7245.zip | |
Auto merge of #14625 - jhgg:fix/token-conversion-for-doc-comments, r=Veykril
mbe: fix token conversion for doc comments fixes #14611 when creating token trees for the converted doc comment, we should use the correct span in all places, rather than allowing some to remain unspecified. otherwise, things behave incorrectly.
| -rw-r--r-- | crates/ide/src/goto_definition.rs | 26 | ||||
| -rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 74 |
2 files changed, 64 insertions, 36 deletions
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 004c33a8a76..1518b495153 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -851,6 +851,32 @@ fn foo() {} } #[test] + fn goto_through_included_file_struct_with_doc_comment() { + check( + r#" +//- /main.rs +#[rustc_builtin_macro] +macro_rules! include {} + +include!("foo.rs"); + +fn f() { + let x = Foo$0; +} + +mod confuse_index { + pub struct Foo; +} + +//- /foo.rs +/// This is a doc comment +pub struct Foo; + //^^^ + "#, + ); + } + + #[test] fn goto_for_type_param() { check( r#" diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index fb531340108..8cbf0f8fc0b 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -190,20 +190,13 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree { let kind = token.kind(conv); if kind == COMMENT { - if let Some(tokens) = conv.convert_doc_comment(&token) { - // FIXME: There has to be a better way to do this - // Add the comments token id to the converted doc string + // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can + // figure out which token id to use for the doc comment, if it is converted successfully. + let next_id = conv.id_alloc().peek_next_id(); + if let Some(tokens) = conv.convert_doc_comment(&token, next_id) { let id = conv.id_alloc().alloc(range, synth_id); - result.extend(tokens.into_iter().map(|mut tt| { - if let tt::TokenTree::Subtree(sub) = &mut tt { - if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) = - sub.token_trees.get_mut(2) - { - lit.span = id - } - } - tt - })); + debug_assert_eq!(id, next_id); + result.extend(tokens); } continue; } @@ -382,49 +375,46 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { text.into() } -fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> { +fn convert_doc_comment( + token: &syntax::SyntaxToken, + span: tt::TokenId, +) -> Option<Vec<tt::TokenTree>> { cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; // Make `doc="\" Comments\"" - let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; + let meta_tkns = + vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)]; // Make `#![]` let mut token_trees = Vec::with_capacity(3); - token_trees.push(mk_punct('#')); + token_trees.push(mk_punct('#', span)); if let ast::CommentPlacement::Inner = doc { - token_trees.push(mk_punct('!')); + token_trees.push(mk_punct('!', span)); } token_trees.push(tt::TokenTree::from(tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, - kind: tt::DelimiterKind::Bracket, - }, + delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, token_trees: meta_tkns, })); return Some(token_trees); // Helper functions - fn mk_ident(s: &str) -> tt::TokenTree { - tt::TokenTree::from(tt::Leaf::from(tt::Ident { - text: s.into(), - span: tt::TokenId::unspecified(), - })) + fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree { + tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })) } - fn mk_punct(c: char) -> tt::TokenTree { + fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree { tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone, - span: tt::TokenId::unspecified(), + span, })) } - fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { - let lit = tt::Literal { text: doc_comment_text(comment), span: tt::TokenId::unspecified() }; + fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree { + let lit = tt::Literal { text: doc_comment_text(comment), span }; tt::TokenTree::from(tt::Leaf::from(lit)) } @@ -480,6 +470,10 @@ impl TokenIdAlloc { } } } + + fn peek_next_id(&self) -> tt::TokenId { + tt::TokenId(self.next_id) + } } /// A raw token (straight from lexer) converter @@ -502,7 +496,11 @@ trait SrcToken<Ctx>: std::fmt::Debug { trait TokenConverter: Sized { type Token: SrcToken<Self>; - fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>; + fn convert_doc_comment( + &self, + token: &Self::Token, + span: tt::TokenId, + ) -> Option<Vec<tt::TokenTree>>; fn bump(&mut self) -> Option<(Self::Token, TextRange)>; @@ -532,9 +530,9 @@ impl<'a> SrcToken<RawConverter<'a>> for usize { impl<'a> TokenConverter for RawConverter<'a> { type Token = usize; - fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> { + fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> { let text = self.lexed.text(token); - convert_doc_comment(&doc_comment(text)) + convert_doc_comment(&doc_comment(text), span) } fn bump(&mut self) -> Option<(Self::Token, TextRange)> { @@ -681,8 +679,12 @@ impl SrcToken<Converter> for SynToken { impl TokenConverter for Converter { type Token = SynToken; - fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { - convert_doc_comment(token.token()?) + fn convert_doc_comment( + &self, + token: &Self::Token, + span: tt::TokenId, + ) -> Option<Vec<tt::TokenTree>> { + convert_doc_comment(token.token()?, span) } fn bump(&mut self) -> Option<(Self::Token, TextRange)> { |
