diff options
223 files changed, 5681 insertions, 2886 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1f2a7796d11..be830415f9c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -38,7 +38,6 @@ jobs: - 'crates/proc-macro-api/**' - 'crates/proc-macro-srv/**' - 'crates/proc-macro-srv-cli/**' - - 'crates/proc-macro-test/**' rust: needs: changes diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index 741e559953f..e6a9917a0bf 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -67,7 +67,7 @@ jobs: other_metrics: strategy: matrix: - names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] + names: [self, rustc_tests, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] runs-on: ubuntu-latest needs: [setup_cargo, build_metrics] @@ -118,6 +118,11 @@ jobs: with: name: self-${{ github.sha }} + - name: Download rustc_tests metrics + uses: actions/download-artifact@v3 + with: + name: rustc_tests-${{ github.sha }} + - name: Download ripgrep-13.0.0 metrics uses: actions/download-artifact@v3 with: @@ -146,7 +151,7 @@ jobs: chmod 700 ~/.ssh git clone --depth 1 git@github.com:rust-analyzer/metrics.git - jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json + jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json rustc_tests.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json cd metrics git add . git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 diff --git a/Cargo.lock b/Cargo.lock index f94b855ca7d..c7d110eafb6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -74,11 +74,11 @@ dependencies = [ "profile", "rust-analyzer-salsa", "rustc-hash", + "semver", + "span", "stdx", "syntax", - "test-utils", "triomphe", - "tt", "vfs", ] @@ -516,8 +516,10 @@ dependencies = [ "rustc-dependencies", "rustc-hash", "smallvec", + "span", "stdx", "syntax", + "test-fixture", "test-utils", "tracing", "triomphe", @@ -542,6 +544,7 @@ dependencies = [ "profile", "rustc-hash", "smallvec", + "span", "stdx", "syntax", "tracing", @@ -581,6 +584,7 @@ dependencies = [ "smallvec", "stdx", "syntax", + "test-fixture", "test-utils", "tracing", "tracing-subscriber", @@ -624,6 +628,7 @@ dependencies = [ "smallvec", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", "toolchain", @@ -647,6 +652,7 @@ dependencies = [ "sourcegen", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", ] @@ -666,6 +672,7 @@ dependencies = [ "smallvec", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", ] @@ -694,8 +701,10 @@ dependencies = [ "rayon", "rustc-hash", "sourcegen", + "span", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", "tracing", @@ -720,6 +729,7 @@ dependencies = [ "sourcegen", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", ] @@ -737,6 +747,7 @@ dependencies = [ "parser", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", "triomphe", @@ -903,11 +914,13 @@ version = "0.0.0" dependencies = [ "anyhow", "crossbeam-channel", + "hir-expand", "ide", "ide-db", "itertools", "proc-macro-api", "project-model", + "span", "tracing", "tt", "vfs", @@ -932,33 +945,33 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "lsp-server" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928" +version = "0.7.6" dependencies = [ "crossbeam-channel", + "ctrlc", "log", + "lsp-types", "serde", "serde_json", ] [[package]] name = "lsp-server" -version = "0.7.5" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095" dependencies = [ "crossbeam-channel", - "ctrlc", "log", - "lsp-types", "serde", "serde_json", ] [[package]] name = "lsp-types" -version = "0.94.0" +version = "0.95.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" +checksum = "158c1911354ef73e8fe42da6b10c0484cb65c7f1007f28022e847706c1ab6984" dependencies = [ "bitflags 1.3.2", "serde", @@ -975,6 +988,7 @@ dependencies = [ "parser", "rustc-hash", "smallvec", + "span", "stdx", "syntax", "test-utils", @@ -1251,6 +1265,7 @@ dependencies = [ "serde", "serde_json", "snap", + "span", "stdx", "text-size", "tracing", @@ -1262,6 +1277,7 @@ dependencies = [ name = "proc-macro-srv" version = "0.0.0" dependencies = [ + "base-db", "expect-test", "libloading", "mbe", @@ -1270,6 +1286,7 @@ dependencies = [ "paths", "proc-macro-api", "proc-macro-test", + "span", "stdx", "tt", ] @@ -1287,15 +1304,10 @@ name = "proc-macro-test" version = "0.0.0" dependencies = [ "cargo_metadata", - "proc-macro-test-impl", "toolchain", ] [[package]] -name = "proc-macro-test-impl" -version = "0.0.0" - -[[package]] name = "proc-macro2" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1514,7 +1526,7 @@ dependencies = [ "ide-ssr", "itertools", "load-cargo", - "lsp-server 0.7.4", + "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", "mbe", "mimalloc", @@ -1535,6 +1547,7 @@ dependencies = [ "sourcegen", "stdx", "syntax", + "test-fixture", "test-utils", "tikv-jemallocator", "toolchain", @@ -1545,6 +1558,7 @@ dependencies = [ "triomphe", "vfs", "vfs-notify", + "walkdir", "winapi", "xflags", "xshell", @@ -1726,6 +1740,17 @@ dependencies = [ ] [[package]] +name = "span" +version = "0.0.0" +dependencies = [ + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rust-analyzer-salsa", + "stdx", + "syntax", + "vfs", +] + +[[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1796,6 +1821,20 @@ dependencies = [ ] [[package]] +name = "test-fixture" +version = "0.0.0" +dependencies = [ + "base-db", + "cfg", + "hir-expand", + "rustc-hash", + "span", + "stdx", + "test-utils", + "tt", +] + +[[package]] name = "test-utils" version = "0.0.0" dependencies = [ @@ -1997,6 +2036,7 @@ name = "tt" version = "0.0.0" dependencies = [ "smol_str", + "span", "stdx", "text-size", ] diff --git a/Cargo.toml b/Cargo.toml index 1213979c390..e82a14d16e1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] members = ["xtask/", "lib/*", "crates/*"] -exclude = ["crates/proc-macro-test/imp"] +exclude = ["crates/proc-macro-srv/proc-macro-test/"] resolver = "2" [workspace.package] @@ -70,10 +70,9 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" } proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" } profile = { path = "./crates/profile", version = "0.0.0" } project-model = { path = "./crates/project-model", version = "0.0.0" } -sourcegen = { path = "./crates/sourcegen", version = "0.0.0" } +span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } -test-utils = { path = "./crates/test-utils", version = "0.0.0" } text-edit = { path = "./crates/text-edit", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } @@ -82,19 +81,25 @@ vfs = { path = "./crates/vfs", version = "0.0.0" } rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" } # local crates that aren't published to crates.io. These should not have versions. -proc-macro-test = { path = "./crates/proc-macro-test" } +sourcegen = { path = "./crates/sourcegen" } +test-fixture = { path = "./crates/test-fixture" } +test-utils = { path = "./crates/test-utils" } # In-tree crates that are published separately and follow semver. See lib/README.md line-index = { version = "0.1.1" } la-arena = { version = "0.3.1" } -lsp-server = { version = "0.7.4" } +lsp-server = { version = "0.7.6" } # non-local crates anyhow = "1.0.75" +arrayvec = "0.7.4" bitflags = "2.4.1" cargo_metadata = "0.18.1" +command-group = "2.0.1" +crossbeam-channel = "0.5.8" dissimilar = "1.0.7" either = "1.9.0" +expect-test = "1.4.0" hashbrown = { version = "0.14", features = [ "inline-more", ], default-features = false } @@ -105,6 +110,7 @@ nohash-hasher = "0.2.0" rayon = "1.8.0" rust-analyzer-salsa = "0.17.0-pre.4" rustc-hash = "1.1.0" +semver = "1.0.14" serde = { version = "1.0.192", features = ["derive"] } serde_json = "1.0.108" smallvec = { version = "1.10.0", features = [ @@ -124,5 +130,6 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features = triomphe = { version = "0.1.10", default-features = false, features = ["std"] } xshell = "0.2.5" + # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.5.3", features = ["raw-api"] } diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml index 393ffe155ba..13b26655581 100644 --- a/crates/base-db/Cargo.toml +++ b/crates/base-db/Cargo.toml @@ -16,12 +16,12 @@ la-arena.workspace = true rust-analyzer-salsa.workspace = true rustc-hash.workspace = true triomphe.workspace = true +semver.workspace = true # local deps cfg.workspace = true profile.workspace = true stdx.workspace = true syntax.workspace = true -test-utils.workspace = true -tt.workspace = true vfs.workspace = true +span.workspace = true diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs index 6a3b36b2312..4332e572e20 100644 --- a/crates/base-db/src/change.rs +++ b/crates/base-db/src/change.rs @@ -7,18 +7,17 @@ use salsa::Durability; use triomphe::Arc; use vfs::FileId; -use crate::{CrateGraph, ProcMacros, SourceDatabaseExt, SourceRoot, SourceRootId}; +use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId}; /// Encapsulate a bunch of raw `.set` calls on the database. #[derive(Default)] -pub struct Change { +pub struct FileChange { pub roots: Option<Vec<SourceRoot>>, pub files_changed: Vec<(FileId, Option<Arc<str>>)>, pub crate_graph: Option<CrateGraph>, - pub proc_macros: Option<ProcMacros>, } -impl fmt::Debug for Change { +impl fmt::Debug for FileChange { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { let mut d = fmt.debug_struct("Change"); if let Some(roots) = &self.roots { @@ -34,9 +33,9 @@ impl fmt::Debug for Change { } } -impl Change { +impl FileChange { pub fn new() -> Self { - Change::default() + FileChange::default() } pub fn set_roots(&mut self, roots: Vec<SourceRoot>) { @@ -51,10 +50,6 @@ impl Change { self.crate_graph = Some(graph); } - pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) { - self.proc_macros = Some(proc_macros); - } - pub fn apply(self, db: &mut dyn SourceDatabaseExt) { let _p = profile::span("RootDatabase::apply_change"); if let Some(roots) = self.roots { @@ -79,9 +74,6 @@ impl Change { if let Some(crate_graph) = self.crate_graph { db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH); } - if let Some(proc_macros) = self.proc_macros { - db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); - } } } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index c2472363aac..e45a81238ac 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -6,22 +6,19 @@ //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. -use std::{fmt, mem, ops, panic::RefUnwindSafe, str::FromStr, sync}; +use std::{fmt, mem, ops, str::FromStr}; use cfg::CfgOptions; use la_arena::{Arena, Idx}; use rustc_hash::{FxHashMap, FxHashSet}; +use semver::Version; use syntax::SmolStr; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; -use crate::span::SpanData; - // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // then the crate for the proc-macro hasn't been build yet as the build data is missing. pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>; -pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>; - /// Files are grouped into source roots. A source root is a directory on the /// file systems which is watched for changes. Typically it corresponds to a /// Rust crate. Source roots *might* be nested: in this case, a file belongs to @@ -242,49 +239,8 @@ impl CrateDisplayName { CrateDisplayName { crate_name, canonical_name } } } - -// FIXME: These should not be defined in here? Why does base db know about proc-macros -// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either. - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(pub u32); - -#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] -pub enum ProcMacroKind { - CustomDerive, - FuncLike, - Attr, -} - -pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { - fn expand( - &self, - subtree: &tt::Subtree<SpanData>, - attrs: Option<&tt::Subtree<SpanData>>, - env: &Env, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, - ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>; -} - -#[derive(Debug)] -pub enum ProcMacroExpansionError { - Panic(String), - /// Things like "proc macro server was killed by OOM". - System(String), -} - -pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>; pub type TargetLayoutLoadResult = Result<Arc<str>, Arc<str>>; -#[derive(Debug, Clone)] -pub struct ProcMacro { - pub name: SmolStr, - pub kind: ProcMacroKind, - pub expander: sync::Arc<dyn ProcMacroExpander>, -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum ReleaseChannel { Stable, @@ -303,7 +259,7 @@ impl ReleaseChannel { pub fn from_str(str: &str) -> Option<Self> { Some(match str { - "" => ReleaseChannel::Stable, + "" | "stable" => ReleaseChannel::Stable, "nightly" => ReleaseChannel::Nightly, _ if str.starts_with("beta") => ReleaseChannel::Beta, _ => return None, @@ -334,7 +290,7 @@ pub struct CrateData { // things. This info does need to be somewhat present though as to prevent deduplication from // happening across different workspaces with different layouts. pub target_layout: TargetLayoutLoadResult, - pub channel: Option<ReleaseChannel>, + pub toolchain: Option<Version>, } impl CrateData { @@ -391,6 +347,10 @@ impl CrateData { slf_deps.eq(other_deps) } + + pub fn channel(&self) -> Option<ReleaseChannel> { + self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -398,10 +358,12 @@ pub enum Edition { Edition2015, Edition2018, Edition2021, + Edition2024, } impl Edition { pub const CURRENT: Edition = Edition::Edition2021; + pub const DEFAULT: Edition = Edition::Edition2015; } #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -472,7 +434,7 @@ impl CrateGraph { is_proc_macro: bool, origin: CrateOrigin, target_layout: Result<Arc<str>, Arc<str>>, - channel: Option<ReleaseChannel>, + toolchain: Option<Version>, ) -> CrateId { let data = CrateData { root_file_id, @@ -486,7 +448,7 @@ impl CrateGraph { origin, target_layout, is_proc_macro, - channel, + toolchain, }; self.arena.alloc(data) } @@ -784,6 +746,7 @@ impl FromStr for Edition { "2015" => Edition::Edition2015, "2018" => Edition::Edition2018, "2021" => Edition::Edition2021, + "2024" => Edition::Edition2024, _ => return Err(ParseEditionError { invalid_input: s.to_string() }), }; Ok(res) @@ -796,6 +759,7 @@ impl fmt::Display for Edition { Edition::Edition2015 => "2015", Edition::Edition2018 => "2018", Edition::Edition2021 => "2021", + Edition::Edition2024 => "2024", }) } } diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 57e7934367b..a0a55df5f99 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -4,27 +4,27 @@ mod input; mod change; -pub mod fixture; -pub mod span; use std::panic; use rustc_hash::FxHashSet; -use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; +use syntax::{ast, Parse, SourceFile}; use triomphe::Arc; pub use crate::{ - change::Change, + change::FileChange, input::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, - DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, - ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, - ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, + DependencyKind, Edition, Env, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot, + SourceRootId, TargetLayoutLoadResult, }, }; pub use salsa::{self, Cancelled}; +pub use span::{FilePosition, FileRange}; pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath}; +pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; + #[macro_export] macro_rules! impl_intern_key { ($name:ident) => { @@ -43,18 +43,6 @@ pub trait Upcast<T: ?Sized> { fn upcast(&self) -> &T; } -#[derive(Clone, Copy, Debug)] -pub struct FilePosition { - pub file_id: FileId, - pub offset: TextSize, -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct FileRange { - pub file_id: FileId, - pub range: TextRange, -} - pub const DEFAULT_PARSE_LRU_CAP: usize = 128; pub trait FileLoader { @@ -74,10 +62,6 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// The crate graph. #[salsa::input] fn crate_graph(&self) -> Arc<CrateGraph>; - - /// The proc macros. - #[salsa::input] - fn proc_macros(&self) -> Arc<ProcMacros>; } fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index 4324584df39..a90dec6b7d9 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -rustc-hash = "1.1.0" +rustc-hash.workspace = true # locals deps tt.workspace = true diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index c7ac1af934a..62fb429a63f 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,6 +1,6 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; -use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; +use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use syntax::{ast, AstNode}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; @@ -8,7 +8,7 @@ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; fn assert_parse_result(input: &str, expected: CfgExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } @@ -16,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); @@ -25,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -36,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>(); diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 4322d2d966a..6f91ea31bb2 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -13,12 +13,12 @@ doctest = false [dependencies] cargo_metadata.workspace = true -crossbeam-channel = "0.5.8" +crossbeam-channel.workspace = true tracing.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true serde_json.workspace = true serde.workspace = true -command-group = "2.0.1" +command-group.workspace = true # local deps paths.workspace = true diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 2d174517605..5e8cf0d7e4a 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -arrayvec = "0.7.2" +arrayvec.workspace = true bitflags.workspace = true cov-mark = "2.0.0-pre.1" dashmap.workspace = true @@ -23,7 +23,7 @@ indexmap.workspace = true itertools.workspace = true la-arena.workspace = true once_cell = "1.17.0" -rustc-hash = "1.1.0" +rustc-hash.workspace = true tracing.workspace = true smallvec.workspace = true hashbrown.workspace = true @@ -42,13 +42,15 @@ mbe.workspace = true cfg.workspace = true tt.workspace = true limit.workspace = true +span.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test.workspace = true # local deps test-utils.workspace = true +test-fixture.workspace = true [features] in-rust-tree = ["rustc-dependencies/in-rust-tree"] diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 942b28fc145..26f76afb1f0 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -637,9 +637,12 @@ impl<'attr> AttrQuery<'attr> { } } -fn any_has_attrs( - db: &dyn DefDatabase, - id: impl Lookup<Data = impl HasSource<Value = impl ast::HasAttrs>>, +fn any_has_attrs<'db>( + db: &(dyn DefDatabase + 'db), + id: impl Lookup< + Database<'db> = dyn DefDatabase + 'db, + Data = impl HasSource<Value = impl ast::HasAttrs>, + >, ) -> InFile<ast::AnyHasAttrs> { id.lookup(db).source(db).map(ast::AnyHasAttrs::new) } @@ -650,17 +653,17 @@ fn attrs_from_item_tree<N: ItemTreeNode>(db: &dyn DefDatabase, id: ItemTreeId<N> tree.raw_attrs(mod_item.into()).clone() } -fn attrs_from_item_tree_loc<N: ItemTreeNode>( - db: &dyn DefDatabase, - lookup: impl Lookup<Data = ItemLoc<N>>, +fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>( + db: &(dyn DefDatabase + 'db), + lookup: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = ItemLoc<N>>, ) -> RawAttrs { let id = lookup.lookup(db).id; attrs_from_item_tree(db, id) } -fn attrs_from_item_tree_assoc<N: ItemTreeNode>( - db: &dyn DefDatabase, - lookup: impl Lookup<Data = AssocItemLoc<N>>, +fn attrs_from_item_tree_assoc<'db, N: ItemTreeNode>( + db: &(dyn DefDatabase + 'db), + lookup: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<N>>, ) -> RawAttrs { let id = lookup.lookup(db).id; attrs_from_item_tree(db, id) diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-def/src/attr/builtin.rs index 15dceeb8af2..48a596f7f53 100644 --- a/crates/hir-def/src/attr/builtin.rs +++ b/crates/hir-def/src/attr/builtin.rs @@ -2,7 +2,7 @@ //! //! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`. //! -//! It was last synchronized with upstream commit e29821ff85a2a3000d226f99f62f89464028d5d6. +//! It was last synchronized with upstream commit c3def263a44e07e09ae6d57abfc8650227fb4972. //! //! The macros were adjusted to only expand to the attribute name, since that is all we need to do //! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to @@ -240,7 +240,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ template!(List: "address, kcfi, memory, thread"), DuplicatesOk, experimental!(no_sanitize) ), - gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, experimental!(coverage)), + gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)), ungated!( doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk @@ -364,7 +364,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ allow_internal_unsafe, Normal, template!(Word), WarnFollowing, "allow_internal_unsafe side-steps the unsafe_code lint", ), - ungated!(rustc_safe_intrinsic, Normal, template!(Word), DuplicatesOk), rustc_attr!(rustc_allowed_through_unstable_modules, Normal, template!(Word), WarnFollowing, "rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \ through unstable paths"), @@ -453,6 +452,12 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ ErrorFollowing, INTERNAL_UNSTABLE ), + rustc_attr!( + rustc_confusables, Normal, + template!(List: r#""name1", "name2", ..."#), + ErrorFollowing, + INTERNAL_UNSTABLE, + ), // Enumerates "identity-like" conversion methods to suggest on type mismatch. rustc_attr!( rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE @@ -488,6 +493,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!( rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE ), + // Ensure the argument to this function is &&str during const-check. + rustc_attr!( + rustc_const_panic_str, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE + ), // ========================================================================== // Internal attributes, Layout related: @@ -521,6 +530,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ "#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference." ), rustc_attr!( + rustc_never_returns_null_ptr, Normal, template!(Word), ErrorFollowing, + "#[rustc_never_returns_null_ptr] is used to mark functions returning non-null pointers." + ), + rustc_attr!( rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true, "#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`." ), @@ -533,7 +546,11 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ "#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl." ), rustc_attr!( - rustc_deny_explicit_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: false, + rustc_deny_explicit_impl, + AttributeType::Normal, + template!(List: "implement_via_object = (true|false)"), + ErrorFollowing, + @only_local: true, "#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls" ), rustc_attr!( @@ -614,6 +631,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_doc_primitive, Normal, template!(NameValueStr: "primitive name"), ErrorFollowing, r#"`rustc_doc_primitive` is a rustc internal attribute"#, ), + rustc_attr!( + rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing, + "the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe" + ), // ========================================================================== // Internal attributes, Testing: @@ -625,13 +646,16 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing), + rustc_attr!(TEST, rustc_variance_of_opaques, Normal, template!(Word), WarnFollowing), + rustc_attr!(TEST, rustc_hidden_type_of_opaques, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing), + rustc_attr!(TEST, rustc_abi, Normal, template!(List: "field1, field2, ..."), WarnFollowing), rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing), rustc_attr!( TEST, rustc_error, Normal, template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly ), - rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing), + rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing), rustc_attr!( TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index 0f98a4ec93c..1a63e96bfa9 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -1,19 +1,23 @@ //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. +use triomphe::Arc; + use base_db::FileId; -use hir_expand::span::{RealSpanMap, SpanMapRef}; +use hir_expand::span_map::{RealSpanMap, SpanMap}; use mbe::syntax_node_to_token_tree; -use syntax::{ast, AstNode}; +use syntax::{ast, AstNode, TextRange}; use crate::attr::{DocAtom, DocExpr}; fn assert_parse_result(input: &str, expected: DocExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let tt = syntax_node_to_token_tree( tt.syntax(), - SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))), + map.as_ref(), + map.span_for_range(TextRange::empty(0.into())), ); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index c6a90932015..a45ec844aba 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -8,7 +8,7 @@ use either::Either; use hir_expand::{ ast_id_map::AstIdMap, name::{name, AsName, Name}, - AstId, ExpandError, InFile, + ExpandError, InFile, }; use intern::Interned; use profile::Count; @@ -66,7 +66,7 @@ pub(super) fn lower( krate, def_map: expander.module.def_map(db), source_map: BodySourceMap::default(), - ast_id_map: db.ast_id_map(expander.current_file_id), + ast_id_map: db.ast_id_map(expander.current_file_id()), body: Body { exprs: Default::default(), pats: Default::default(), @@ -408,7 +408,7 @@ impl ExprCollector<'_> { ast::Expr::ParenExpr(e) => { let inner = self.collect_expr_opt(e.expr()); // make the paren expr point to the inner expression as well - let src = self.expander.to_source(syntax_ptr); + let src = self.expander.in_file(syntax_ptr); self.source_map.expr_map.insert(src, inner); inner } @@ -441,7 +441,7 @@ impl ExprCollector<'_> { Some(e) => self.collect_expr(e), None => self.missing_expr(), }; - let src = self.expander.to_source(AstPtr::new(&field)); + let src = self.expander.in_file(AstPtr::new(&field)); self.source_map.field_map_back.insert(expr, src); Some(RecordLitField { name, expr }) }) @@ -644,7 +644,7 @@ impl ExprCollector<'_> { Some(id) => { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro - let src = self.expander.to_source(syntax_ptr); + let src = self.expander.in_file(syntax_ptr); self.source_map.expr_map.insert(src, id); id } @@ -957,22 +957,31 @@ impl ExprCollector<'_> { T: ast::AstNode, { // File containing the macro call. Expansion errors will be attached here. - let outer_file = self.expander.current_file_id; + let outer_file = self.expander.current_file_id(); - let macro_call_ptr = self.expander.to_source(AstPtr::new(&mcall)); + let macro_call_ptr = self.expander.in_file(syntax_ptr); let module = self.expander.module.local_id; - let res = self.expander.enter_expand(self.db, mcall, |path| { - self.def_map - .resolve_path( - self.db, - module, - &path, - crate::item_scope::BuiltinShadowMode::Other, - Some(MacroSubNs::Bang), - ) - .0 - .take_macros() - }); + + let res = match self.def_map.modules[module] + .scope + .macro_invocations + .get(&InFile::new(outer_file, self.ast_id_map.ast_id_for_ptr(syntax_ptr))) + { + // fast path, macro call is in a block module + Some(&call) => Ok(self.expander.enter_expand_id(self.db, call)), + None => self.expander.enter_expand(self.db, mcall, |path| { + self.def_map + .resolve_path( + self.db, + module, + &path, + crate::item_scope::BuiltinShadowMode::Other, + Some(MacroSubNs::Bang), + ) + .0 + .take_macros() + }), + }; let res = match res { Ok(res) => res, @@ -986,7 +995,6 @@ impl ExprCollector<'_> { return collector(self, None); } }; - if record_diagnostics { match &res.err { Some(ExpandError::UnresolvedProcMacro(krate)) => { @@ -1013,10 +1021,10 @@ impl ExprCollector<'_> { Some((mark, expansion)) => { // Keep collecting even with expansion errors so we can provide completions and // other services in incomplete macro expressions. - self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id); + self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id()); let prev_ast_id_map = mem::replace( &mut self.ast_id_map, - self.db.ast_id_map(self.expander.current_file_id), + self.db.ast_id_map(self.expander.current_file_id()), ); if record_diagnostics { @@ -1066,7 +1074,7 @@ impl ExprCollector<'_> { Some(tail) => { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro - let src = self.expander.to_source(syntax_ptr); + let src = self.expander.in_file(syntax_ptr); self.source_map.expr_map.insert(src, tail); Some(tail) } @@ -1140,7 +1148,7 @@ impl ExprCollector<'_> { let block_id = if block_has_items { let file_local_id = self.ast_id_map.ast_id(&block); - let ast_id = AstId::new(self.expander.current_file_id, file_local_id); + let ast_id = self.expander.in_file(file_local_id); Some(self.db.intern_block(BlockLoc { ast_id, module: self.expander.module })) } else { None @@ -1333,7 +1341,7 @@ impl ExprCollector<'_> { let ast_pat = f.pat()?; let pat = self.collect_pat(ast_pat, binding_list); let name = f.field_name()?.as_name(); - let src = self.expander.to_source(AstPtr::new(&f)); + let src = self.expander.in_file(AstPtr::new(&f)); self.source_map.pat_field_map_back.insert(pat, src); Some(RecordFieldPat { name, pat }) }) @@ -1391,7 +1399,7 @@ impl ExprCollector<'_> { ast::Pat::MacroPat(mac) => match mac.macro_call() { Some(call) => { let macro_ptr = AstPtr::new(&call); - let src = self.expander.to_source(AstPtr::new(&Either::Left(pat))); + let src = self.expander.in_file(AstPtr::new(&Either::Left(pat))); let pat = self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { this.collect_pat_opt(expanded_pat, binding_list) @@ -1472,10 +1480,7 @@ impl ExprCollector<'_> { } self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode { - node: InFile::new( - self.expander.current_file_id, - SyntaxNodePtr::new(owner.syntax()), - ), + node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())), cfg, opts: self.expander.cfg_options().clone(), }); @@ -1514,10 +1519,7 @@ impl ExprCollector<'_> { } else { Err(BodyDiagnostic::UnreachableLabel { name, - node: InFile::new( - self.expander.current_file_id, - AstPtr::new(&lifetime), - ), + node: self.expander.in_file(AstPtr::new(&lifetime)), }) }; } @@ -1526,7 +1528,7 @@ impl ExprCollector<'_> { Err(BodyDiagnostic::UndeclaredLabel { name, - node: InFile::new(self.expander.current_file_id, AstPtr::new(&lifetime)), + node: self.expander.in_file(AstPtr::new(&lifetime)), }) } @@ -1990,7 +1992,7 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> impl ExprCollector<'_> { fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { - let src = self.expander.to_source(ptr); + let src = self.expander.in_file(ptr); let id = self.body.exprs.alloc(expr); self.source_map.expr_map_back.insert(id, src.clone()); self.source_map.expr_map.insert(src, id); @@ -2018,7 +2020,7 @@ impl ExprCollector<'_> { } fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { - let src = self.expander.to_source(ptr); + let src = self.expander.in_file(ptr); let id = self.body.pats.alloc(pat); self.source_map.pat_map_back.insert(id, src.clone()); self.source_map.pat_map.insert(src, id); @@ -2033,7 +2035,7 @@ impl ExprCollector<'_> { } fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId { - let src = self.expander.to_source(ptr); + let src = self.expander.in_file(ptr); let id = self.body.labels.alloc(label); self.source_map.label_map_back.insert(id, src.clone()); self.source_map.label_map.insert(src, id); diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index baca293e290..ab623250d40 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -267,9 +267,10 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope #[cfg(test)] mod tests { - use base_db::{fixture::WithFixture, FileId, SourceDatabase}; + use base_db::{FileId, SourceDatabase}; use hir_expand::{name::AsName, InFile}; use syntax::{algo::find_node_at_offset, ast, AstNode}; + use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId}; diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs index 2b432dfbb92..a76ddffb411 100644 --- a/crates/hir-def/src/body/tests.rs +++ b/crates/hir-def/src/body/tests.rs @@ -1,7 +1,8 @@ mod block; -use base_db::{fixture::WithFixture, SourceDatabase}; +use base_db::SourceDatabase; use expect_test::{expect, Expect}; +use test_fixture::WithFixture; use crate::{test_db::TestDB, ModuleDefId}; diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 635d13f24ad..9c183c9332b 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -16,7 +16,7 @@ use crate::{ db::DefDatabase, expander::{Expander, Mark}, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId}, - macro_call_as_call_id, macro_id_to_def_id, + macro_call_as_call_id, nameres::{ attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, @@ -720,7 +720,7 @@ impl<'a> AssocItemCollector<'a> { ) .0 .take_macros() - .map(|it| macro_id_to_def_id(self.db, it)) + .map(|it| self.db.macro_def(it)) }; match macro_call_as_call_id( self.db.upcast(), diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 31c1a713031..d5831022f28 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -1,7 +1,7 @@ //! Defines database & queries for name resolution. use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use either::Either; -use hir_expand::{db::ExpandDatabase, HirFileId}; +use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; use intern::Interned; use la_arena::ArenaMap; use syntax::{ast, AstPtr}; @@ -24,9 +24,10 @@ use crate::{ AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, - LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, - StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, - TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, + LocalFieldId, Macro2Id, Macro2Loc, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, + ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, + TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, + VariantId, }; #[salsa::query_group(InternDatabaseStorage)] @@ -110,6 +111,8 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba #[salsa::invoke(DefMap::block_def_map_query)] fn block_def_map(&self, block: BlockId) -> Arc<DefMap>; + fn macro_def(&self, m: MacroId) -> MacroDefId; + // region:data #[salsa::invoke(StructData::struct_data_query)] @@ -239,12 +242,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba #[salsa::invoke(LangItems::crate_lang_items_query)] fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>; - #[salsa::transparent] - fn crate_limits(&self, crate_id: CrateId) -> CrateLimits; - - #[salsa::transparent] - fn recursion_limit(&self, crate_id: CrateId) -> u32; - fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; } @@ -253,24 +250,6 @@ fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> { db.crate_def_map_query(krate) } -pub struct CrateLimits { - /// The maximum depth for potentially infinitely-recursive compile-time operations like macro expansion or auto-dereference. - pub recursion_limit: u32, -} - -fn crate_limits(db: &dyn DefDatabase, crate_id: CrateId) -> CrateLimits { - let def_map = db.crate_def_map(crate_id); - - CrateLimits { - // 128 is the default in rustc. - recursion_limit: def_map.recursion_limit().unwrap_or(128), - } -} - -fn recursion_limit(db: &dyn DefDatabase, crate_id: CrateId) -> u32 { - db.crate_limits(crate_id).recursion_limit -} - fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { let file = db.crate_graph()[crate_id].root_file_id; let item_tree = db.file_item_tree(file.into()); @@ -305,3 +284,78 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { false } + +fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId { + use hir_expand::InFile; + + use crate::{Lookup, MacroDefKind, MacroExpander}; + + let kind = |expander, file_id, m| { + let in_file = InFile::new(file_id, m); + match expander { + MacroExpander::Declarative => MacroDefKind::Declarative(in_file), + MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file), + MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(it, in_file), + MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(it, in_file), + MacroExpander::BuiltInEager(it) => MacroDefKind::BuiltInEager(it, in_file), + } + }; + + match id { + MacroId::Macro2Id(it) => { + let loc: Macro2Loc = it.lookup(db); + + let item_tree = loc.id.item_tree(db); + let makro = &item_tree[loc.id.value]; + MacroDefId { + krate: loc.container.krate, + kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()), + local_inner: false, + allow_internal_unsafe: loc.allow_internal_unsafe, + span: db + .span_map(loc.id.file_id()) + .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()), + edition: loc.edition, + } + } + + MacroId::MacroRulesId(it) => { + let loc: MacroRulesLoc = it.lookup(db); + + let item_tree = loc.id.item_tree(db); + let makro = &item_tree[loc.id.value]; + MacroDefId { + krate: loc.container.krate, + kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()), + local_inner: loc.flags.contains(MacroRulesLocFlags::LOCAL_INNER), + allow_internal_unsafe: loc + .flags + .contains(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE), + span: db + .span_map(loc.id.file_id()) + .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()), + edition: loc.edition, + } + } + MacroId::ProcMacroId(it) => { + let loc = it.lookup(db); + + let item_tree = loc.id.item_tree(db); + let makro = &item_tree[loc.id.value]; + MacroDefId { + krate: loc.container.krate, + kind: MacroDefKind::ProcMacro( + loc.expander, + loc.kind, + InFile::new(loc.id.file_id(), makro.ast_id), + ), + local_inner: false, + allow_internal_unsafe: false, + span: db + .span_map(loc.id.file_id()) + .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()), + edition: loc.edition, + } + } + } +} diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index 398f116d831..b83feeedc34 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -4,15 +4,15 @@ use base_db::CrateId; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ - attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId, + attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId, }; use limit::Limit; -use syntax::{ast, Parse, SyntaxNode}; +use syntax::{ast, Parse}; use crate::{ - attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall, - MacroId, ModuleId, UnresolvedMacro, + attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId, + UnresolvedMacro, }; #[derive(Debug)] @@ -20,7 +20,7 @@ pub struct Expander { cfg_options: CfgOptions, span_map: SpanMap, krate: CrateId, - pub(crate) current_file_id: HirFileId, + current_file_id: HirFileId, pub(crate) module: ModuleId, /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached. recursion_depth: u32, @@ -29,12 +29,13 @@ pub struct Expander { impl Expander { pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander { - let recursion_limit = db.recursion_limit(module.krate); - #[cfg(not(test))] - let recursion_limit = Limit::new(recursion_limit as usize); - // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug - #[cfg(test)] - let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize)); + let recursion_limit = module.def_map(db).recursion_limit() as usize; + let recursion_limit = Limit::new(if cfg!(test) { + // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug + std::cmp::min(32, recursion_limit) + } else { + recursion_limit + }); Expander { current_file_id, module, @@ -56,9 +57,9 @@ impl Expander { let mut unresolved_macro_err = None; let result = self.within_limit(db, |this| { - let macro_call = InFile::new(this.current_file_id, ¯o_call); + let macro_call = this.in_file(¯o_call); match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| { - resolver(path).map(|it| macro_id_to_def_id(db, it)) + resolver(path).map(|it| db.macro_def(it)) }) { Ok(call_id) => call_id, Err(resolve_err) => { @@ -83,17 +84,6 @@ impl Expander { self.within_limit(db, |_this| ExpandResult::ok(Some(call_id))) } - fn enter_expand_inner( - db: &dyn DefDatabase, - call_id: MacroCallId, - error: Option<ExpandError>, - ) -> ExpandResult<Option<InFile<Parse<SyntaxNode>>>> { - let macro_file = call_id.as_macro_file(); - let ExpandResult { value, err } = db.parse_macro_expansion(macro_file); - - ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) } - } - pub fn exit(&mut self, mut mark: Mark) { self.span_map = mark.span_map; self.current_file_id = mark.file_id; @@ -113,7 +103,7 @@ impl Expander { LowerCtx::new(db, self.span_map.clone(), self.current_file_id) } - pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> { + pub(crate) fn in_file<T>(&self, value: T) -> InFile<T> { InFile { file_id: self.current_file_id, value } } @@ -164,26 +154,34 @@ impl Expander { return ExpandResult { value: None, err }; }; - let res = Self::enter_expand_inner(db, call_id, err); - match res.err { - // If proc-macro is disabled or unresolved, we want to expand to a missing expression - // instead of an empty tree which might end up in an empty block. - Some(ExpandError::UnresolvedProcMacro(_)) => res.map(|_| None), - _ => res.map(|value| { - value.and_then(|InFile { file_id, value }| { - let parse = value.cast::<T>()?; + let macro_file = call_id.as_macro_file(); + let res = db.parse_macro_expansion(macro_file); + + let err = err.or(res.err); + ExpandResult { + value: match err { + // If proc-macro is disabled or unresolved, we want to expand to a missing expression + // instead of an empty tree which might end up in an empty block. + Some(ExpandError::UnresolvedProcMacro(_)) => None, + _ => (|| { + let parse = res.value.0.cast::<T>()?; self.recursion_depth += 1; - let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id)); - let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); + let old_span_map = std::mem::replace( + &mut self.span_map, + SpanMap::ExpansionSpanMap(res.value.1), + ); + let old_file_id = + std::mem::replace(&mut self.current_file_id, macro_file.into()); let mark = Mark { file_id: old_file_id, span_map: old_span_map, bomb: DropBomb::new("expansion mark dropped"), }; Some((mark, parse)) - }) - }), + })(), + }, + err, } } } diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 13af0b0218e..4737b48703d 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -585,9 +585,9 @@ fn find_local_import_locations( #[cfg(test)] mod tests { - use base_db::fixture::WithFixture; use hir_expand::db::ExpandDatabase; use syntax::ast::AstNode; + use test_fixture::WithFixture; use crate::test_db::TestDB; diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 0d95d916ff9..f5324f052e5 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -222,11 +222,10 @@ impl GenericParams { let module = loc.container.module(db); let func_data = db.function_data(id); - // Don't create an `Expander` nor call `loc.source(db)` if not needed since this - // causes a reparse after the `ItemTree` has been created. - let mut expander = Lazy::new(|| { - (module.def_map(db), Expander::new(db, loc.source(db).file_id, module)) - }); + // Don't create an `Expander` if not needed since this + // could cause a reparse after the `ItemTree` has been created due to the spanmap. + let mut expander = + Lazy::new(|| (module.def_map(db), Expander::new(db, loc.id.file_id(), module))); for param in func_data.params.iter() { generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); } diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index fbd754c30f5..0d3014bce27 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -473,8 +473,9 @@ pub fn search_dependencies( #[cfg(test)] mod tests { - use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; + use base_db::{SourceDatabase, Upcast}; use expect_test::{expect, Expect}; + use test_fixture::WithFixture; use crate::{db::DefDatabase, test_db::TestDB, ItemContainerId, Lookup}; diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index ce83cb435e2..4902f24e2e3 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -102,8 +102,10 @@ pub struct ItemScope { // FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will // be all resolved to the last one defined if shadowing happens. legacy_macros: FxHashMap<Name, SmallVec<[MacroId; 1]>>, - /// The derive macro invocations in this scope. + /// The attribute macro invocations in this scope. attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>, + /// The macro invocations in this scope. + pub macro_invocations: FxHashMap<AstId<ast::MacroCall>, MacroCallId>, /// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes /// paired with the derive macro invocations for the specific attribute. derive_macros: FxHashMap<AstId<ast::Adt>, SmallVec<[DeriveMacroInvocation; 1]>>, @@ -345,6 +347,10 @@ impl ItemScope { self.attr_macros.insert(item, call); } + pub(crate) fn add_macro_invoc(&mut self, call: AstId<ast::MacroCall>, call_id: MacroCallId) { + self.macro_invocations.insert(call, call_id); + } + pub(crate) fn attr_macro_invocs( &self, ) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ { @@ -692,6 +698,7 @@ impl ItemScope { use_imports_values, use_imports_types, use_imports_macros, + macro_invocations, } = self; types.shrink_to_fit(); values.shrink_to_fit(); @@ -709,6 +716,7 @@ impl ItemScope { derive_macros.shrink_to_fit(); extern_crate_decls.shrink_to_fit(); use_decls.shrink_to_fit(); + macro_invocations.shrink_to_fit(); } } diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 16144394e3b..20e4e44339e 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -29,6 +29,9 @@ //! //! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its //! surface syntax. +//! +//! Note that we cannot store [`span::Span`]s inside of this, as typing in an item invalidates its +//! encompassing span! mod lower; mod pretty; @@ -42,7 +45,7 @@ use std::{ }; use ast::{AstNode, HasName, StructKind}; -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, @@ -55,6 +58,7 @@ use la_arena::{Arena, Idx, IdxRange, RawIdx}; use profile::Count; use rustc_hash::FxHashMap; use smallvec::SmallVec; +use span::Span; use stdx::never; use syntax::{ast, match_ast, SyntaxKind}; use triomphe::Arc; @@ -106,11 +110,6 @@ impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> { let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}")); let syntax = db.parse_or_expand(file_id); - if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) - { - // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic - return Default::default(); - } let ctx = lower::Ctx::new(db, file_id); let mut top_attrs = None; @@ -129,6 +128,9 @@ impl ItemTree { ctx.lower_macro_stmts(stmts) }, _ => { + if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) { + return Default::default(); + } panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}"); }, } @@ -282,7 +284,7 @@ struct ItemTreeData { mods: Arena<Mod>, macro_calls: Arena<MacroCall>, macro_rules: Arena<MacroRules>, - macro_defs: Arena<MacroDef>, + macro_defs: Arena<Macro2>, vis: ItemVisibilities, } @@ -515,7 +517,7 @@ mod_items! { Mod in mods -> ast::Module, MacroCall in macro_calls -> ast::MacroCall, MacroRules in macro_rules -> ast::MacroRules, - MacroDef in macro_defs -> ast::MacroDef, + Macro2 in macro_defs -> ast::MacroDef, } macro_rules! impl_index { @@ -748,7 +750,8 @@ pub struct MacroCall { pub path: Interned<ModPath>, pub ast_id: FileAstId<ast::MacroCall>, pub expand_to: ExpandTo, - pub call_site: SyntaxContextId, + // FIXME: We need to move this out. It invalidates the item tree when typing inside the macro call. + pub call_site: Span, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -760,7 +763,7 @@ pub struct MacroRules { /// "Macros 2.0" macro definition. #[derive(Debug, Clone, Eq, PartialEq)] -pub struct MacroDef { +pub struct Macro2 { pub name: Name, pub visibility: RawVisibilityId, pub ast_id: FileAstId<ast::MacroDef>, @@ -919,7 +922,7 @@ impl ModItem { | ModItem::Impl(_) | ModItem::Mod(_) | ModItem::MacroRules(_) - | ModItem::MacroDef(_) => None, + | ModItem::Macro2(_) => None, ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)), ModItem::Const(konst) => Some(AssocItem::Const(*konst)), ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)), @@ -945,7 +948,7 @@ impl ModItem { ModItem::Mod(it) => tree[it.index()].ast_id().upcast(), ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(), ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(), - ModItem::MacroDef(it) => tree[it.index()].ast_id().upcast(), + ModItem::Macro2(it) => tree[it.index()].ast_id().upcast(), } } } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 83a2790ce8f..8e2fafe81b5 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -2,7 +2,7 @@ use std::collections::hash_map::Entry; -use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId}; +use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef, HirFileId}; use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use crate::{ @@ -549,7 +549,7 @@ impl<'a> Ctx<'a> { path, ast_id, expand_to, - call_site: span_map.span_for_range(m.syntax().text_range()).ctx, + call_site: span_map.span_for_range(m.syntax().text_range()), }; Some(id(self.data().macro_calls.alloc(res))) } @@ -562,13 +562,13 @@ impl<'a> Ctx<'a> { Some(id(self.data().macro_rules.alloc(res))) } - fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<MacroDef>> { + fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<Macro2>> { let name = m.name().map(|it| it.as_name())?; let ast_id = self.source_ast_id_map.ast_id(m); let visibility = self.lower_visibility(m); - let res = MacroDef { name, ast_id, visibility }; + let res = Macro2 { name, ast_id, visibility }; Some(id(self.data().macro_defs.alloc(res))) } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 244111d202c..6d92fce0727 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -464,8 +464,8 @@ impl Printer<'_> { let MacroRules { name, ast_id: _ } = &self.tree[it]; wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast())); } - ModItem::MacroDef(it) => { - let MacroDef { name, visibility, ast_id: _ } = &self.tree[it]; + ModItem::Macro2(it) => { + let Macro2 { name, visibility, ast_id: _ } = &self.tree[it]; self.print_visibility(*visibility); wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast())); } diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs index 96c65b941c1..f97ae0d8e43 100644 --- a/crates/hir-def/src/item_tree/tests.rs +++ b/crates/hir-def/src/item_tree/tests.rs @@ -1,5 +1,5 @@ -use base_db::fixture::WithFixture; use expect_test::{expect, Expect}; +use test_fixture::WithFixture; use crate::{db::DefDatabase, test_db::TestDB}; diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 7cf13a202e0..22ba3aab4e9 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -63,7 +63,7 @@ use std::{ panic::{RefUnwindSafe, UnwindSafe}, }; -use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind}; +use base_db::{impl_intern_key, salsa, CrateId, Edition}; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::{Attr, AttrId, AttrInput}, @@ -72,24 +72,27 @@ use hir_expand::{ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, eager::expand_eager_macro_input, + impl_intern_lookup, name::Name, - proc_macro::ProcMacroExpander, + proc_macro::{CustomProcMacroExpander, ProcMacroKind}, AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, }; use item_tree::ExternBlock; use la_arena::Idx; use nameres::DefMap; +use span::Span; use stdx::impl_from; use syntax::{ast, AstNode}; -pub use hir_expand::tt; +pub use hir_expand::{tt, Intern, Lookup}; use crate::{ builtin_type::BuiltinType, data::adt::VariantData, + db::DefDatabase, item_tree::{ - Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, + Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules, Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, }, }; @@ -101,7 +104,7 @@ pub struct CrateRootModuleId { } impl CrateRootModuleId { - pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> { + pub fn def_map(&self, db: &dyn DefDatabase) -> Arc<DefMap> { db.crate_def_map(self.krate) } @@ -163,7 +166,7 @@ pub struct ModuleId { } impl ModuleId { - pub fn def_map(self, db: &dyn db::DefDatabase) -> Arc<DefMap> { + pub fn def_map(self, db: &dyn DefDatabase) -> Arc<DefMap> { match self.block { Some(block) => db.block_def_map(block), None => db.crate_def_map(self.krate), @@ -174,7 +177,7 @@ impl ModuleId { self.krate } - pub fn name(self, db: &dyn db::DefDatabase) -> Option<Name> { + pub fn name(self, db: &dyn DefDatabase) -> Option<Name> { let def_map = self.def_map(db); let parent = def_map[self.local_id].parent?; def_map[parent].children.iter().find_map(|(name, module_id)| { @@ -186,7 +189,7 @@ impl ModuleId { }) } - pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> { + pub fn containing_module(self, db: &dyn DefDatabase) -> Option<ModuleId> { self.def_map(db).containing_module(self.local_id) } @@ -263,20 +266,7 @@ impl<N: ItemTreeNode> Hash for AssocItemLoc<N> { macro_rules! impl_intern { ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { impl_intern_key!($id); - - impl Intern for $loc { - type ID = $id; - fn intern(self, db: &dyn db::DefDatabase) -> $id { - db.$intern(self) - } - } - - impl Lookup for $id { - type Data = $loc; - fn lookup(&self, db: &dyn db::DefDatabase) -> $loc { - db.$lookup(*self) - } - } + impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup); }; } @@ -376,9 +366,10 @@ pub struct Macro2Id(salsa::InternId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Macro2Loc { pub container: ModuleId, - pub id: ItemTreeId<MacroDef>, + pub id: ItemTreeId<Macro2>, pub expander: MacroExpander, pub allow_internal_unsafe: bool, + pub edition: Edition, } impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2); @@ -389,19 +380,28 @@ pub struct MacroRulesLoc { pub container: ModuleId, pub id: ItemTreeId<MacroRules>, pub expander: MacroExpander, - pub allow_internal_unsafe: bool, - pub local_inner: bool, + pub flags: MacroRulesLocFlags, + pub edition: Edition, } impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules); +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct MacroRulesLocFlags: u8 { + const ALLOW_INTERNAL_UNSAFE = 1 << 0; + const LOCAL_INNER = 1 << 1; + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] pub struct ProcMacroId(salsa::InternId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ProcMacroLoc { pub container: CrateRootModuleId, pub id: ItemTreeId<Function>, - pub expander: ProcMacroExpander, + pub expander: CustomProcMacroExpander, pub kind: ProcMacroKind, + pub edition: Edition, } impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro); @@ -510,7 +510,7 @@ pub enum MacroId { impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId); impl MacroId { - pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool { + pub fn is_attribute(self, db: &dyn DefDatabase) -> bool { matches!(self, MacroId::ProcMacroId(it) if it.lookup(db).kind == ProcMacroKind::Attr) } } @@ -569,6 +569,8 @@ pub struct ConstBlockLoc { pub root: hir::ExprId, } +/// Something that holds types, required for the current const arg lowering implementation as they +/// need to be able to query where they are defined. #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub enum TypeOwnerId { FunctionId(FunctionId), @@ -581,9 +583,6 @@ pub enum TypeOwnerId { TypeAliasId(TypeAliasId), ImplId(ImplId), EnumVariantId(EnumVariantId), - // FIXME(const-generic-body): ModuleId should not be a type owner. This needs to be fixed to make `TypeOwnerId` actually - // useful for assigning ids to in type consts. - ModuleId(ModuleId), } impl TypeOwnerId { @@ -597,9 +596,7 @@ impl TypeOwnerId { TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it), TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it), TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it), - TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => { - return None - } + TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None, }) } } @@ -614,8 +611,7 @@ impl_from!( TraitAliasId, TypeAliasId, ImplId, - EnumVariantId, - ModuleId + EnumVariantId for TypeOwnerId ); @@ -713,17 +709,20 @@ pub struct InTypeConstLoc { pub id: AstId<ast::ConstArg>, /// The thing this const arg appears in pub owner: TypeOwnerId, - pub thing: Box<dyn OpaqueInternableThing>, + // FIXME(const-generic-body): The expected type should not be + pub expected_ty: Box<dyn OpaqueInternableThing>, } impl PartialEq for InTypeConstLoc { fn eq(&self, other: &Self) -> bool { - self.id == other.id && self.owner == other.owner && &*self.thing == &*other.thing + self.id == other.id + && self.owner == other.owner + && &*self.expected_ty == &*other.expected_ty } } impl InTypeConstId { - pub fn source(&self, db: &dyn db::DefDatabase) -> ast::ConstArg { + pub fn source(&self, db: &dyn DefDatabase) -> ast::ConstArg { let src = self.lookup(db).id; let file_id = src.file_id; let root = &db.parse_or_expand(file_id); @@ -743,7 +742,7 @@ pub enum GeneralConstId { impl_from!(ConstId, ConstBlockId, InTypeConstId for GeneralConstId); impl GeneralConstId { - pub fn generic_def(self, db: &dyn db::DefDatabase) -> Option<GenericDefId> { + pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> { match self { GeneralConstId::ConstId(it) => Some(it.into()), GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(), @@ -751,7 +750,7 @@ impl GeneralConstId { } } - pub fn name(self, db: &dyn db::DefDatabase) -> String { + pub fn name(self, db: &dyn DefDatabase) -> String { match self { GeneralConstId::ConstId(const_id) => db .const_data(const_id) @@ -934,7 +933,7 @@ pub enum VariantId { impl_from!(EnumVariantId, StructId, UnionId for VariantId); impl VariantId { - pub fn variant_data(self, db: &dyn db::DefDatabase) -> Arc<VariantData> { + pub fn variant_data(self, db: &dyn DefDatabase) -> Arc<VariantData> { match self { VariantId::StructId(it) => db.struct_data(it).variant_data.clone(), VariantId::UnionId(it) => db.union_data(it).variant_data.clone(), @@ -944,7 +943,7 @@ impl VariantId { } } - pub fn file_id(self, db: &dyn db::DefDatabase) -> HirFileId { + pub fn file_id(self, db: &dyn DefDatabase) -> HirFileId { match self { VariantId::EnumVariantId(it) => it.parent.lookup(db).id.file_id(), VariantId::StructId(it) => it.lookup(db).id.file_id(), @@ -961,22 +960,12 @@ impl VariantId { } } -trait Intern { - type ID; - fn intern(self, db: &dyn db::DefDatabase) -> Self::ID; -} - -pub trait Lookup { - type Data; - fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data; -} - pub trait HasModule { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId; + fn module(&self, db: &dyn DefDatabase) -> ModuleId; } impl HasModule for ItemContainerId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match *self { ItemContainerId::ModuleId(it) => it, ItemContainerId::ImplId(it) => it.lookup(db).container, @@ -987,13 +976,13 @@ impl HasModule for ItemContainerId { } impl<N: ItemTreeNode> HasModule for AssocItemLoc<N> { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.container.module(db) } } impl HasModule for AdtId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { AdtId::StructId(it) => it.lookup(db).container, AdtId::UnionId(it) => it.lookup(db).container, @@ -1003,13 +992,13 @@ impl HasModule for AdtId { } impl HasModule for ExternCrateId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).container } } impl HasModule for VariantId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { VariantId::EnumVariantId(it) => it.parent.lookup(db).container, VariantId::StructId(it) => it.lookup(db).container, @@ -1019,7 +1008,7 @@ impl HasModule for VariantId { } impl HasModule for MacroId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { MacroId::MacroRulesId(it) => it.lookup(db).container, MacroId::Macro2Id(it) => it.lookup(db).container, @@ -1029,7 +1018,7 @@ impl HasModule for MacroId { } impl HasModule for TypeOwnerId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { TypeOwnerId::FunctionId(it) => it.lookup(db).module(db), TypeOwnerId::StaticId(it) => it.lookup(db).module(db), @@ -1041,13 +1030,12 @@ impl HasModule for TypeOwnerId { TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db), TypeOwnerId::ImplId(it) => it.lookup(db).container, TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container, - TypeOwnerId::ModuleId(it) => *it, } } } impl HasModule for DefWithBodyId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), DefWithBodyId::StaticId(it) => it.lookup(db).module(db), @@ -1059,7 +1047,7 @@ impl HasModule for DefWithBodyId { } impl HasModule for GenericDefId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { GenericDefId::FunctionId(it) => it.lookup(db).module(db), GenericDefId::AdtId(it) => it.module(db), @@ -1074,13 +1062,13 @@ impl HasModule for GenericDefId { } impl HasModule for TypeAliasId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).module(db) } } impl HasModule for TraitId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).container } } @@ -1089,7 +1077,7 @@ impl ModuleDefId { /// Returns the module containing `self` (or `self`, if `self` is itself a module). /// /// Returns `None` if `self` refers to a primitive type. - pub fn module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> { + pub fn module(&self, db: &dyn DefDatabase) -> Option<ModuleId> { Some(match self { ModuleDefId::ModuleId(id) => *id, ModuleDefId::FunctionId(id) => id.lookup(db).module(db), @@ -1107,7 +1095,7 @@ impl ModuleDefId { } impl AttrDefId { - pub fn krate(&self, db: &dyn db::DefDatabase) -> CrateId { + pub fn krate(&self, db: &dyn DefDatabase) -> CrateId { match self { AttrDefId::ModuleId(it) => it.krate, AttrDefId::FieldId(it) => it.parent.module(db).krate, @@ -1173,7 +1161,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> { return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); }; - let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx; + let call_site = span_map.span_for_range(self.value.syntax().text_range()); macro_call_as_call_id_with_eager( db, @@ -1203,7 +1191,7 @@ impl<T: AstIdNode> AstIdWithPath<T> { fn macro_call_as_call_id( db: &dyn ExpandDatabase, call: &AstIdWithPath<ast::MacroCall>, - call_site: SyntaxContextId, + call_site: Span, expand_to: ExpandTo, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, @@ -1215,7 +1203,7 @@ fn macro_call_as_call_id( fn macro_call_as_call_id_with_eager( db: &dyn ExpandDatabase, call: &AstIdWithPath<ast::MacroCall>, - call_site: SyntaxContextId, + call_site: Span, expand_to: ExpandTo, krate: CrateId, resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>, @@ -1245,83 +1233,12 @@ fn macro_call_as_call_id_with_eager( Ok(res) } -pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId { - match id { - MacroId::Macro2Id(it) => { - let loc = it.lookup(db); - - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - let in_file = |m: FileAstId<ast::MacroDef>| InFile::new(loc.id.file_id(), m.upcast()); - MacroDefId { - krate: loc.container.krate, - kind: match loc.expander { - MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)), - MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)), - MacroExpander::BuiltInAttr(it) => { - MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInDerive(it) => { - MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInEager(it) => { - MacroDefKind::BuiltInEager(it, in_file(makro.ast_id)) - } - }, - local_inner: false, - allow_internal_unsafe: loc.allow_internal_unsafe, - } - } - MacroId::MacroRulesId(it) => { - let loc = it.lookup(db); - - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - let in_file = |m: FileAstId<ast::MacroRules>| InFile::new(loc.id.file_id(), m.upcast()); - MacroDefId { - krate: loc.container.krate, - kind: match loc.expander { - MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)), - MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)), - MacroExpander::BuiltInAttr(it) => { - MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInDerive(it) => { - MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInEager(it) => { - MacroDefKind::BuiltInEager(it, in_file(makro.ast_id)) - } - }, - local_inner: loc.local_inner, - allow_internal_unsafe: loc.allow_internal_unsafe, - } - } - MacroId::ProcMacroId(it) => { - let loc = it.lookup(db); - - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - MacroDefId { - krate: loc.container.krate, - kind: MacroDefKind::ProcMacro( - loc.expander, - loc.kind, - InFile::new(loc.id.file_id(), makro.ast_id), - ), - local_inner: false, - allow_internal_unsafe: false, - } - } - } -} - fn derive_macro_as_call_id( - db: &dyn db::DefDatabase, + db: &dyn DefDatabase, item_attr: &AstIdWithPath<ast::Adt>, derive_attr_index: AttrId, derive_pos: u32, - call_site: SyntaxContextId, + call_site: Span, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { @@ -1342,7 +1259,7 @@ fn derive_macro_as_call_id( } fn attr_macro_as_call_id( - db: &dyn db::DefDatabase, + db: &dyn DefDatabase, item_attr: &AstIdWithPath<ast::Item>, macro_attr: &Attr, krate: CrateId, @@ -1351,7 +1268,7 @@ fn attr_macro_as_call_id( let arg = match macro_attr.input.as_deref() { Some(AttrInput::TokenTree(tt)) => { let mut tt = tt.as_ref().clone(); - tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span); Some(tt) } @@ -1366,7 +1283,7 @@ fn attr_macro_as_call_id( attr_args: arg.map(Arc::new), invoc_attr_index: macro_attr.id, }, - macro_attr.ctxt, + macro_attr.span, ) } diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs index a3505b65fe7..395b69d284f 100644 --- a/crates/hir-def/src/lower.rs +++ b/crates/hir-def/src/lower.rs @@ -3,7 +3,7 @@ use std::cell::OnceCell; use hir_expand::{ ast_id_map::{AstIdMap, AstIdNode}, - span::{SpanMap, SpanMapRef}, + span_map::{SpanMap, SpanMapRef}, AstId, HirFileId, InFile, }; use syntax::ast; diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 514219ee715..d4798f4507d 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -163,31 +163,43 @@ fn main() { ""; } fn test_assert_expand() { check( r#" -#[rustc_builtin_macro] -macro_rules! assert { - ($cond:expr) => ({ /* compiler built-in */ }); - ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ }) -} - +//- minicore: assert fn main() { assert!(true, "{} {:?}", arg1(a, b, c), arg2); } "#, - expect![[r##" -#[rustc_builtin_macro] -macro_rules! assert { - ($cond:expr) => ({ /* compiler built-in */ }); - ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ }) + expect![[r#" +fn main() { + { + if !(true ) { + $crate::panic::panic_2021!("{} {:?}", arg1(a, b, c), arg2); + } + }; +} +"#]], + ); } +// FIXME: This is the wrong expansion, see FIXME on `builtin_fn_macro::use_panic_2021` +#[test] +fn test_assert_expand_2015() { + check( + r#" +//- minicore: assert +//- /main.rs edition:2015 +fn main() { + assert!(true, "{} {:?}", arg1(a, b, c), arg2); +} +"#, + expect![[r#" fn main() { { if !(true ) { - $crate::panic!("{} {:?}", arg1(a, b, c), arg2); + $crate::panic::panic_2021!("{} {:?}", arg1(a, b, c), arg2); } }; } -"##]], +"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 9bf2a50d57c..f2046bfbce4 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1218,8 +1218,10 @@ m! { macro_rules! m { ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} ) } -#[doc = " Single Line Doc 1"] -#[doc = "\n MultiLines Doc\n "] fn bar() {} +#[doc = r" Single Line Doc 1"] +#[doc = r" + MultiLines Doc + "] fn bar() {} "##]], ); } @@ -1260,8 +1262,10 @@ m! { macro_rules! m { ($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} ) } -#[doc = " 錦瑟無端五十弦,一弦一柱思華年。"] -#[doc = "\n 莊生曉夢迷蝴蝶,望帝春心託杜鵑。\n "] fn bar() {} +#[doc = r" 錦瑟無端五十弦,一弦一柱思華年。"] +#[doc = r" + 莊生曉夢迷蝴蝶,望帝春心託杜鵑。 + "] fn bar() {} "##]], ); } @@ -1281,7 +1285,7 @@ m! { macro_rules! m { ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} ) } -#[doc = " \\ \" \'"] fn bar() {} +#[doc = r#" \ " '"#] fn bar() {} "##]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs index 7e7b4004421..e875950e4e5 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs @@ -18,7 +18,7 @@ macro_rules! m { ($($false:ident)*) => ($false); (double_dollar) => ($$); ($) => (m!($);); - ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*); + ($($t:tt)*) => ($( ${ignore($t)} ${index()} )-*); } m!($); "#, @@ -33,7 +33,7 @@ macro_rules! m { ($($false:ident)*) => ($false); (double_dollar) => ($$); ($) => (m!($);); - ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*); + ($($t:tt)*) => ($( ${ignore($t)} ${index()} )-*); } m!($); "#]], diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index 967b5ad36ba..6560d0ec466 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -77,13 +77,13 @@ fn test_metavar_exprs() { check( r#" macro_rules! m { - ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* ); + ( $( $t:tt )* ) => ( $( ${ignore($t)} -${index()} )-* ); } const _: i32 = m!(a b c); "#, expect![[r#" macro_rules! m { - ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* ); + ( $( $t:tt )* ) => ( $( ${ignore($t)} -${index()} )-* ); } const _: i32 = -0--1--2; "#]], @@ -96,7 +96,7 @@ fn count_basic() { r#" macro_rules! m { ($($t:ident),*) => { - ${count(t)} + ${count($t)} } } @@ -109,7 +109,7 @@ fn test() { expect![[r#" macro_rules! m { ($($t:ident),*) => { - ${count(t)} + ${count($t)} } } @@ -130,9 +130,9 @@ macro_rules! foo { ($( $( $($t:ident)* ),* );*) => { $( { - let depth_none = ${count(t)}; - let depth_zero = ${count(t, 0)}; - let depth_one = ${count(t, 1)}; + let depth_none = ${count($t)}; + let depth_zero = ${count($t, 0)}; + let depth_one = ${count($t, 1)}; } )* } @@ -150,9 +150,9 @@ macro_rules! foo { ($( $( $($t:ident)* ),* );*) => { $( { - let depth_none = ${count(t)}; - let depth_zero = ${count(t, 0)}; - let depth_one = ${count(t, 1)}; + let depth_none = ${count($t)}; + let depth_zero = ${count($t, 0)}; + let depth_one = ${count($t, 1)}; } )* } @@ -160,11 +160,11 @@ macro_rules! foo { fn bar() { { - let depth_none = 6; + let depth_none = 3; let depth_zero = 3; let depth_one = 6; } { - let depth_none = 3; + let depth_none = 1; let depth_zero = 1; let depth_one = 3; } @@ -178,12 +178,12 @@ fn count_depth_out_of_bounds() { check( r#" macro_rules! foo { - ($($t:ident)*) => { ${count(t, 1)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 1)};)* } + ($($t:ident)*) => { ${count($t, 1)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 1)};)* } } macro_rules! bar { - ($($t:ident)*) => { ${count(t, 1024)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 8192)};)* } + ($($t:ident)*) => { ${count($t, 1024)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 8192)};)* } } fn test() { @@ -195,19 +195,21 @@ fn test() { "#, expect![[r#" macro_rules! foo { - ($($t:ident)*) => { ${count(t, 1)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 1)};)* } + ($($t:ident)*) => { ${count($t, 1)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 1)};)* } } macro_rules! bar { - ($($t:ident)*) => { ${count(t, 1024)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 8192)};)* } + ($($t:ident)*) => { ${count($t, 1024)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 8192)};)* } } fn test() { - /* error: ${count} out of bounds */; - /* error: ${count} out of bounds */; - /* error: ${count} out of bounds */; - /* error: ${count} out of bounds */; + 2; + 2; + 1;; + 2; + 2; + 1;; } "#]], ); @@ -218,8 +220,8 @@ fn misplaced_count() { check( r#" macro_rules! foo { - ($($t:ident)*) => { $(${count(t)})* }; - ($l:literal) => { ${count(l)} } + ($($t:ident)*) => { $(${count($t)})* }; + ($l:literal) => { ${count($l)} } } fn test() { @@ -229,13 +231,13 @@ fn test() { "#, expect![[r#" macro_rules! foo { - ($($t:ident)*) => { $(${count(t)})* }; - ($l:literal) => { ${count(l)} } + ($($t:ident)*) => { $(${count($t)})* }; + ($l:literal) => { ${count($l)} } } fn test() { - /* error: ${count} misplaced */; - /* error: ${count} misplaced */; + 1 1 1; + 1; } "#]], ); @@ -246,13 +248,13 @@ fn malformed_count() { check( r#" macro_rules! too_many_args { - ($($t:ident)*) => { ${count(t, 1, leftover)} } + ($($t:ident)*) => { ${count($t, 1, leftover)} } } macro_rules! depth_suffixed { - ($($t:ident)*) => { ${count(t, 0usize)} } + ($($t:ident)*) => { ${count($t, 0usize)} } } macro_rules! depth_too_large { - ($($t:ident)*) => { ${count(t, 18446744073709551616)} } + ($($t:ident)*) => { ${count($t, 18446744073709551616)} } } fn test() { @@ -263,13 +265,13 @@ fn test() { "#, expect![[r#" macro_rules! too_many_args { - ($($t:ident)*) => { ${count(t, 1, leftover)} } + ($($t:ident)*) => { ${count($t, 1, leftover)} } } macro_rules! depth_suffixed { - ($($t:ident)*) => { ${count(t, 0usize)} } + ($($t:ident)*) => { ${count($t, 0usize)} } } macro_rules! depth_too_large { - ($($t:ident)*) => { ${count(t, 18446744073709551616)} } + ($($t:ident)*) => { ${count($t, 18446744073709551616)} } } fn test() { @@ -288,7 +290,7 @@ fn count_interaction_with_empty_binding() { r#" macro_rules! m { ($($t:ident),*) => { - ${count(t, 100)} + ${count($t, 100)} } } @@ -299,7 +301,7 @@ fn test() { expect![[r#" macro_rules! m { ($($t:ident),*) => { - ${count(t, 100)} + ${count($t, 100)} } } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index be2a503d82b..ee806361237 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,9 +16,15 @@ mod proc_macros; use std::{iter, ops::Range, sync}; -use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase}; +use base_db::SourceDatabase; use expect_test::Expect; -use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt}; +use hir_expand::{ + db::ExpandDatabase, + proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind}, + span_map::SpanMapRef, + InFile, MacroFileId, MacroFileIdExt, +}; +use span::Span; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, @@ -26,10 +32,10 @@ use syntax::{ SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT}, SyntaxNode, T, }; +use test_fixture::WithFixture; use crate::{ db::DefDatabase, - macro_id_to_def_id, nameres::{DefMap, MacroSubNs, ModuleSource}, resolver::HasResolver, src::HasSource, @@ -50,7 +56,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream .into(), ProcMacro { name: "identity_when_valid".into(), - kind: base_db::ProcMacroKind::Attr, + kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), }, )]; @@ -90,7 +96,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream .as_call_id_with_errors(&db, krate, |path| { resolver .resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang)) - .map(|(it, _)| macro_id_to_def_id(&db, it)) + .map(|(it, _)| db.macro_def(it)) }) .unwrap(); let macro_call_id = res.value.unwrap(); @@ -307,16 +313,16 @@ fn pretty_print_macro_expansion( // compile errors. #[derive(Debug)] struct IdentityWhenValidProcMacroExpander; -impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander { +impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { fn expand( &self, subtree: &Subtree, _: Option<&Subtree>, _: &base_db::Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result<Subtree, base_db::ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result<Subtree, ProcMacroExpansionError> { let (parse, _) = ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); if parse.errors().is_empty() { diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 9a9fa0e02b0..52a981fd19e 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -59,8 +59,11 @@ mod tests; use std::{cmp::Ord, ops::Deref}; -use base_db::{CrateId, Edition, FileId, ProcMacroKind}; -use hir_expand::{ast_id_map::FileAstId, name::Name, HirFileId, InFile, MacroCallId, MacroDefId}; +use base_db::{CrateId, Edition, FileId}; +use hir_expand::{ + ast_id_map::FileAstId, name::Name, proc_macro::ProcMacroKind, HirFileId, InFile, MacroCallId, + MacroDefId, +}; use itertools::Itertools; use la_arena::Arena; use profile::Count; @@ -97,7 +100,7 @@ pub struct DefMap { /// contains this block. block: Option<BlockInfo>, /// The modules and their data declared in this crate. - modules: Arena<ModuleData>, + pub modules: Arena<ModuleData>, krate: CrateId, /// The prelude module for this crate. This either comes from an import /// marked with the `prelude_import` attribute, or (in the normal case) from @@ -623,8 +626,9 @@ impl DefMap { self.diagnostics.as_slice() } - pub fn recursion_limit(&self) -> Option<u32> { - self.data.recursion_limit + pub fn recursion_limit(&self) -> u32 { + // 128 is the default in rustc + self.data.recursion_limit.unwrap_or(128) } } diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs index a7abf445918..6288b8366bf 100644 --- a/crates/hir-def/src/nameres/attr_resolution.rs +++ b/crates/hir-def/src/nameres/attr_resolution.rs @@ -8,7 +8,6 @@ use crate::{ attr_macro_as_call_id, db::DefDatabase, item_scope::BuiltinShadowMode, - macro_id_to_def_id, nameres::path_resolution::ResolveMode, path::{ModPath, PathKind}, AstIdWithPath, LocalModuleId, UnresolvedMacro, @@ -63,7 +62,7 @@ impl DefMap { &ast_id, attr, self.krate, - macro_id_to_def_id(db, def), + db.macro_def(def), ))) } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index b3a10a3869a..3763bfcbcfa 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -5,7 +5,7 @@ use std::{cmp::Ordering, iter, mem}; -use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId}; +use base_db::{CrateId, Dependency, Edition, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -15,7 +15,7 @@ use hir_expand::{ builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, name::{name, AsName, Name}, - proc_macro::ProcMacroExpander, + proc_macro::CustomProcMacroExpander, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; @@ -23,6 +23,7 @@ use itertools::{izip, Itertools}; use la_arena::Idx; use limit::Limit; use rustc_hash::{FxHashMap, FxHashSet}; +use span::{Span, SyntaxContextId}; use stdx::always; use syntax::{ast, SmolStr}; use triomphe::Arc; @@ -35,9 +36,9 @@ use crate::{ item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports}, item_tree::{ self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, - MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId, + Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, }, - macro_call_as_call_id, macro_call_as_call_id_with_eager, macro_id_to_def_id, + macro_call_as_call_id, macro_call_as_call_id_with_eager, nameres::{ diagnostics::DefDiagnostic, mod_resolution::ModDir, @@ -53,8 +54,9 @@ use crate::{ AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, - MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, - StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc, + MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, + ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, + UnresolvedMacro, UseId, UseLoc, }; static GLOB_RECURSION_LIMIT: Limit = Limit::new(100); @@ -86,16 +88,21 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI // FIXME: a hacky way to create a Name from string. let name = tt::Ident { text: it.name.clone(), - span: tt::SpanData { + span: Span { range: syntax::TextRange::empty(syntax::TextSize::new(0)), - anchor: base_db::span::SpanAnchor { + anchor: span::SpanAnchor { file_id: FileId::BOGUS, - ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + ast_id: span::ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContextId::ROOT, }, }; - (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) + ( + name.as_name(), + CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId( + idx as u32, + )), + ) }) .collect()) } @@ -222,13 +229,13 @@ enum MacroDirectiveKind { FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo, - call_site: SyntaxContextId, + call_site: Span, }, Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize, - call_site: SyntaxContextId, + call_site: Span, }, Attr { ast_id: AstIdWithPath<ast::Item>, @@ -253,7 +260,7 @@ struct DefCollector<'a> { /// built by the build system, and is the list of proc. macros we can actually expand. It is /// empty when proc. macro support is disabled (in which case we still do name resolution for /// them). - proc_macros: Result<Vec<(Name, ProcMacroExpander)>, Box<str>>, + proc_macros: Result<Vec<(Name, CustomProcMacroExpander)>, Box<str>>, is_proc_macro: bool, from_glob_import: PerNsGlobImports, /// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute. @@ -545,6 +552,8 @@ impl DefCollector<'_> { Edition::Edition2015 => name![rust_2015], Edition::Edition2018 => name![rust_2018], Edition::Edition2021 => name![rust_2021], + // FIXME: update this when rust_2024 exists + Edition::Edition2024 => name![rust_2021], }; let path_kind = match self.def_map.data.edition { @@ -603,18 +612,21 @@ impl DefCollector<'_> { let (expander, kind) = match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { Ok(Some(&(_, expander))) => (expander, kind), - _ => (ProcMacroExpander::dummy(), kind), + _ => (CustomProcMacroExpander::dummy(), kind), }; - let proc_macro_id = - ProcMacroLoc { container: self.def_map.crate_root(), id, expander, kind } - .intern(self.db); + let proc_macro_id = ProcMacroLoc { + container: self.def_map.crate_root(), + id, + expander, + kind, + edition: self.def_map.data.edition, + } + .intern(self.db); self.define_proc_macro(def.name.clone(), proc_macro_id); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); if let ProcMacroKind::CustomDerive { helpers } = def.kind { - crate_data - .exported_derives - .insert(macro_id_to_def_id(self.db, proc_macro_id.into()), helpers); + crate_data.exported_derives.insert(self.db.macro_def(proc_macro_id.into()), helpers); } crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id); } @@ -1125,10 +1137,7 @@ impl DefCollector<'_> { BuiltinShadowMode::Module, Some(subns), ); - resolved_res - .resolved_def - .take_macros() - .map(|it| (it, macro_id_to_def_id(self.db, it))) + resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it))) }; let resolver_def_id = |path| resolver(path).map(|(_, it)| it); @@ -1143,6 +1152,9 @@ impl DefCollector<'_> { resolver_def_id, ); if let Ok(Some(call_id)) = call_id { + self.def_map.modules[directive.module_id] + .scope + .add_macro_invoc(ast_id.ast_id, call_id); push_resolved(directive, call_id); res = ReachedFixedPoint::No; @@ -1299,14 +1311,13 @@ impl DefCollector<'_> { // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute. let call_id = attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); - let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id); // If proc attribute macro expansion is disabled, skip expanding it here if !self.db.expand_proc_attr_macros() { self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( directive.module_id, - loc.kind, - loc.def.krate, + self.db.lookup_intern_macro_call(call_id).kind, + def.krate, )); return recollect_without(self); } @@ -1314,14 +1325,14 @@ impl DefCollector<'_> { // Skip #[test]/#[bench] expansion, which would merely result in more memory usage // due to duplicating functions into macro expansions if matches!( - loc.def.kind, + def.kind, MacroDefKind::BuiltInAttr(expander, _) if expander.is_test() || expander.is_bench() ) { return recollect_without(self); } - if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind { + if let MacroDefKind::ProcMacro(exp, ..) = def.kind { if exp.is_dummy() { // If there's no expander for the proc macro (e.g. // because proc macros are disabled, or building the @@ -1329,8 +1340,8 @@ impl DefCollector<'_> { // expansion like we would if it was disabled self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( directive.module_id, - loc.kind, - loc.def.krate, + self.db.lookup_intern_macro_call(call_id).kind, + def.krate, )); return recollect_without(self); @@ -1436,10 +1447,7 @@ impl DefCollector<'_> { BuiltinShadowMode::Module, Some(MacroSubNs::Bang), ); - resolved_res - .resolved_def - .take_macros() - .map(|it| macro_id_to_def_id(self.db, it)) + resolved_res.resolved_def.take_macros().map(|it| self.db.macro_def(it)) }, ); if let Err(UnresolvedMacro { path }) = macro_call_as_call_id { @@ -1645,7 +1653,7 @@ impl ModCollector<'_, '_> { ), ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container), ModItem::MacroRules(id) => self.collect_macro_rules(id, module), - ModItem::MacroDef(id) => self.collect_macro_def(id, module), + ModItem::Macro2(id) => self.collect_macro_def(id, module), ModItem::Impl(imp) => { let impl_id = ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) } @@ -2090,11 +2098,11 @@ impl ModCollector<'_, '_> { // FIXME: a hacky way to create a Name from string. name = tt::Ident { text: it.clone(), - span: tt::SpanData { + span: Span { range: syntax::TextRange::empty(syntax::TextSize::new(0)), - anchor: base_db::span::SpanAnchor { + anchor: span::SpanAnchor { file_id: FileId::BOGUS, - ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + ast_id: span::ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContextId::ROOT, }, @@ -2136,12 +2144,16 @@ impl ModCollector<'_, '_> { }; let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists(); + let mut flags = MacroRulesLocFlags::empty(); + flags.set(MacroRulesLocFlags::LOCAL_INNER, local_inner); + flags.set(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE, allow_internal_unsafe); + let macro_id = MacroRulesLoc { container: module, id: ItemTreeId::new(self.tree_id, id), - local_inner, - allow_internal_unsafe, + flags, expander, + edition: self.def_collector.def_map.data.edition, } .intern(self.def_collector.db); self.def_collector.define_macro_rules( @@ -2152,7 +2164,7 @@ impl ModCollector<'_, '_> { ); } - fn collect_macro_def(&mut self, id: FileItemTreeId<MacroDef>, module: ModuleId) { + fn collect_macro_def(&mut self, id: FileItemTreeId<Macro2>, module: ModuleId) { let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[id]; let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast()); @@ -2207,6 +2219,7 @@ impl ModCollector<'_, '_> { id: ItemTreeId::new(self.tree_id, id), expander, allow_internal_unsafe, + edition: self.def_collector.def_map.data.edition, } .intern(self.def_collector.db); self.def_collector.define_macro_def( @@ -2220,7 +2233,7 @@ impl ModCollector<'_, '_> { Arc::get_mut(&mut self.def_collector.def_map.data) .unwrap() .exported_derives - .insert(macro_id_to_def_id(self.def_collector.db, macro_id.into()), helpers); + .insert(self.def_collector.db.macro_def(macro_id.into()), helpers); } } } @@ -2259,7 +2272,7 @@ impl ModCollector<'_, '_> { Some(MacroSubNs::Bang), ) }) - .map(|it| macro_id_to_def_id(self.def_collector.db, it)) + .map(|it| self.def_collector.db.macro_def(it)) }) }, |path| { @@ -2271,7 +2284,7 @@ impl ModCollector<'_, '_> { BuiltinShadowMode::Module, Some(MacroSubNs::Bang), ); - resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(db, it)) + resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, ) { // FIXME: if there were errors, this mightve been in the eager expansion from an @@ -2279,10 +2292,13 @@ impl ModCollector<'_, '_> { if res.err.is_none() { // Legacy macros need to be expanded immediately, so that any macros they produce // are in scope. - if let Some(val) = res.value { + if let Some(call_id) = res.value { + self.def_collector.def_map.modules[self.module_id] + .scope + .add_macro_invoc(ast_id.ast_id, call_id); self.def_collector.collect_macro_expansion( self.module_id, - val, + call_id, self.macro_depth + 1, container, ); @@ -2296,7 +2312,7 @@ impl ModCollector<'_, '_> { self.def_collector.unresolved_macros.push(MacroDirective { module_id: self.module_id, depth: self.macro_depth + 1, - kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site }, + kind: MacroDirectiveKind::FnLike { ast_id, expand_to, call_site }, container, }); } @@ -2363,8 +2379,10 @@ impl ModCollector<'_, '_> { #[cfg(test)] mod tests { + use base_db::SourceDatabase; + use test_fixture::WithFixture; + use crate::{db::DefDatabase, test_db::TestDB}; - use base_db::{fixture::WithFixture, SourceDatabase}; use super::*; diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs index 751b7beaac1..c126fdac1c6 100644 --- a/crates/hir-def/src/nameres/proc_macro.rs +++ b/crates/hir-def/src/nameres/proc_macro.rs @@ -19,11 +19,13 @@ pub enum ProcMacroKind { } impl ProcMacroKind { - pub(super) fn to_basedb_kind(&self) -> base_db::ProcMacroKind { + pub(super) fn to_basedb_kind(&self) -> hir_expand::proc_macro::ProcMacroKind { match self { - ProcMacroKind::CustomDerive { .. } => base_db::ProcMacroKind::CustomDerive, - ProcMacroKind::FnLike => base_db::ProcMacroKind::FuncLike, - ProcMacroKind::Attr => base_db::ProcMacroKind::Attr, + ProcMacroKind::CustomDerive { .. } => { + hir_expand::proc_macro::ProcMacroKind::CustomDerive + } + ProcMacroKind::FnLike => hir_expand::proc_macro::ProcMacroKind::FuncLike, + ProcMacroKind::Attr => hir_expand::proc_macro::ProcMacroKind::Attr, } } } diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs index b2ffbbe4c5d..17e82dc16c4 100644 --- a/crates/hir-def/src/nameres/tests.rs +++ b/crates/hir-def/src/nameres/tests.rs @@ -4,8 +4,9 @@ mod macros; mod mod_resolution; mod primitives; -use base_db::{fixture::WithFixture, SourceDatabase}; +use base_db::SourceDatabase; use expect_test::{expect, Expect}; +use test_fixture::WithFixture; use triomphe::Arc; use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB}; diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 78cb78e833e..6efced02718 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -1,11 +1,8 @@ use base_db::{SourceDatabase, SourceDatabaseExt}; +use test_fixture::WithFixture; use triomphe::Arc; -use crate::{ - db::DefDatabase, - nameres::tests::{TestDB, WithFixture}, - AdtId, ModuleDefId, -}; +use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId}; fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { let (mut db, pos) = TestDB::with_position(ra_fixture_initial); diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs index e64fa0b46f1..48fe43450a7 100644 --- a/crates/hir-def/src/nameres/tests/macros.rs +++ b/crates/hir-def/src/nameres/tests/macros.rs @@ -1,6 +1,12 @@ -use super::*; +use expect_test::expect; +use test_fixture::WithFixture; + use itertools::Itertools; +use crate::nameres::tests::check; + +use super::*; + #[test] fn macro_rules_are_globally_visible() { check( diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index ba0a2c0224a..301391516d6 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -2,7 +2,10 @@ use std::{fmt, hash::BuildHasherDefault}; use base_db::CrateId; -use hir_expand::name::{name, Name}; +use hir_expand::{ + name::{name, Name}, + MacroDefId, +}; use indexmap::IndexMap; use intern::Interned; use rustc_hash::FxHashSet; @@ -406,6 +409,15 @@ impl Resolver { .take_macros_import() } + pub fn resolve_path_as_macro_def( + &self, + db: &dyn DefDatabase, + path: &ModPath, + expected_macro_kind: Option<MacroSubNs>, + ) -> Option<MacroDefId> { + self.resolve_path_as_macro(db, path, expected_macro_kind).map(|(it, _)| db.macro_def(it)) + } + /// Returns a set of names available in the current scope. /// /// Note that this is a somewhat fuzzy concept -- internally, the compiler @@ -589,6 +601,16 @@ impl Resolver { }) } + pub fn type_owner(&self) -> Option<TypeOwnerId> { + self.scopes().find_map(|scope| match scope { + Scope::BlockScope(_) => None, + &Scope::GenericParams { def, .. } => Some(def.into()), + &Scope::ImplDefScope(id) => Some(id.into()), + &Scope::AdtScope(adt) => Some(adt.into()), + Scope::ExprScope(it) => Some(it.owner.into()), + }) + } + pub fn impl_def(&self) -> Option<ImplId> { self.scopes().find_map(|scope| match scope { Scope::ImplDefScope(def) => Some(*def), @@ -1079,7 +1101,6 @@ impl HasResolver for TypeOwnerId { TypeOwnerId::TypeAliasId(it) => it.resolver(db), TypeOwnerId::ImplId(it) => it.resolver(db), TypeOwnerId::EnumVariantId(it) => it.resolver(db), - TypeOwnerId::ModuleId(it) => it.resolver(db), } } } diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index f5803653c73..49688c5ee9c 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -2,7 +2,7 @@ use std::iter; -use hir_expand::{span::SpanMapRef, InFile}; +use hir_expand::{span_map::SpanMapRef, InFile}; use la_arena::ArenaMap; use syntax::ast; use triomphe::Arc; diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 361bbec4318..e8a8f3ee073 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -15,7 +15,7 @@ doctest = false cov-mark = "2.0.0-pre.1" tracing.workspace = true either.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true la-arena.workspace = true itertools.workspace = true hashbrown.workspace = true @@ -32,6 +32,7 @@ profile.workspace = true tt.workspace = true mbe.workspace = true limit.workspace = true +span.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index be0b72f9dfa..d0d229e1319 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -5,6 +5,8 @@ //! item as an ID. That way, id's don't change unless the set of items itself //! changes. +// FIXME: Consider moving this into the span crate + use std::{ any::type_name, fmt, @@ -17,9 +19,9 @@ use profile::Count; use rustc_hash::FxHasher; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; -use crate::db; +use crate::db::ExpandDatabase; -pub use base_db::span::ErasedFileAstId; +pub use span::ErasedFileAstId; /// `AstId` points to an AST node in any file. /// @@ -27,13 +29,13 @@ pub use base_db::span::ErasedFileAstId; pub type AstId<N> = crate::InFile<FileAstId<N>>; impl<N: AstIdNode> AstId<N> { - pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { + pub fn to_node(&self, db: &dyn ExpandDatabase) -> N { self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) } - pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> { + pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile<N> { crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) } - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> { + pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> AstPtr<N> { db.ast_id_map(self.file_id).get(self.value) } } @@ -41,7 +43,7 @@ impl<N: AstIdNode> AstId<N> { pub type ErasedAstId = crate::InFile<ErasedFileAstId>; impl ErasedAstId { - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { + pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr { db.ast_id_map(self.file_id).get_erased(self.value) } } @@ -197,6 +199,19 @@ impl AstIdMap { FileAstId { raw, covariant: PhantomData } } + pub fn ast_id_for_ptr<N: AstIdNode>(&self, ptr: AstPtr<N>) -> FileAstId<N> { + let ptr = ptr.syntax_node_ptr(); + let hash = hash_ptr(&ptr); + match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) { + Some((&raw, &())) => FileAstId { raw, covariant: PhantomData }, + None => panic!( + "Can't find {:?} in AstIdMap:\n{:?}", + ptr, + self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(), + ), + } + } + pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> { AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index b8fc30c9118..bd0f81881ee 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,19 +1,20 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops}; -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; use cfg::CfgExpr; use either::Either; use intern::Interned; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; +use span::Span; use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; use triomphe::Arc; use crate::{ db::ExpandDatabase, mod_path::ModPath, - span::SpanMapRef, + span_map::SpanMapRef, tt::{self, Subtree}, InFile, }; @@ -52,7 +53,7 @@ impl RawAttrs { id, input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), path: Interned::new(ModPath::from(crate::name!(doc))), - ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx, + span: span_map.span_for_range(comment.syntax().text_range()), }), }); let entries: Arc<[Attr]> = Arc::from_iter(entries); @@ -119,7 +120,7 @@ impl RawAttrs { let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { let tree = Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()), token_trees: attr.to_vec(), }; Attr::from_tt(db, &tree, index.with_cfg_attr(idx)) @@ -176,7 +177,7 @@ pub struct Attr { pub id: AttrId, pub path: Interned<ModPath>, pub input: Option<Interned<AttrInput>>, - pub ctxt: SyntaxContextId, + pub span: Span, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -205,6 +206,7 @@ impl Attr { id: AttrId, ) -> Option<Attr> { let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?); + let span = span_map.span_for_range(ast.syntax().text_range()); let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let value = match lit.kind() { ast::LiteralKind::String(string) => string.value()?.into(), @@ -212,12 +214,12 @@ impl Attr { }; Some(Interned::new(AttrInput::Literal(value))) } else if let Some(tt) = ast.token_tree() { - let tree = syntax_node_to_token_tree(tt.syntax(), span_map); + let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span); Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None }; - Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx }) + Some(Attr { id, path, input, span }) } fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> { @@ -265,7 +267,7 @@ impl Attr { pub fn parse_path_comma_token_tree<'a>( &'a self, db: &'a dyn ExpandDatabase, - ) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> { + ) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> { let args = self.token_tree_value()?; if args.delimiter.kind != DelimiterKind::Parenthesis { @@ -281,7 +283,7 @@ impl Attr { // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation // here or maybe just parse a mod path from a token tree directly let subtree = tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()), token_trees: tts.to_vec(), }; let (parse, span_map) = @@ -293,7 +295,7 @@ impl Attr { return None; } let path = meta.path()?; - let call_site = span_map.span_at(path.syntax().text_range().start()).ctx; + let call_site = span_map.span_at(path.syntax().text_range().start()); Some(( ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, call_site, diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index de58a495fef..55157abe671 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,12 +1,7 @@ //! Builtin attributes. +use span::{MacroCallId, Span}; -use base_db::{ - span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - FileId, -}; -use syntax::{TextRange, TextSize}; - -use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; +use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind}; macro_rules! register_builtin { ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => { @@ -106,7 +101,12 @@ fn derive_attr_expand( MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => { attr_args } - _ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)), + _ => { + return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan { + open: loc.call_site, + close: loc.call_site, + })) + } }; pseudo_derive_attr_expansion(tt, derives, loc.call_site) } @@ -114,20 +114,13 @@ fn derive_attr_expand( pub fn pseudo_derive_attr_expansion( tt: &tt::Subtree, args: &tt::Subtree, - call_site: SyntaxContextId, + call_site: Span, ) -> ExpandResult<tt::Subtree> { let mk_leaf = |char| { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, - span: tt::SpanData { - range: TextRange::empty(TextSize::new(0)), - anchor: base_db::span::SpanAnchor { - file_id: FileId::BOGUS, - ast_id: ROOT_ERASED_FILE_AST_ID, - }, - ctx: call_site, - }, + span: call_site, })) }; diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index 410aa4d289e..8f240ef0732 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -1,20 +1,21 @@ //! Builtin derives. -use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin}; +use base_db::{CrateOrigin, LangCrateOrigin}; use itertools::izip; use rustc_hash::FxHashSet; +use span::{MacroCallId, Span}; use stdx::never; use tracing::debug; use crate::{ hygiene::span_with_def_site_ctxt, name::{AsName, Name}, - span::SpanMapRef, + span_map::SpanMapRef, tt, }; use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; -use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; +use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult}; macro_rules! register_builtin { ( $($trait:ident => $expand:ident),* ) => { @@ -35,7 +36,7 @@ macro_rules! register_builtin { $( BuiltinDeriveExpander::$trait => $expand, )* }; - let span = db.lookup_intern_macro_call(id).span(db); + let span = db.lookup_intern_macro_call(id).call_site; let span = span_with_def_site_ctxt(db, span, id); expander(db, id, span, tt, token_map) } @@ -73,16 +74,16 @@ enum VariantShape { Unit, } -fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> { +fn tuple_field_iterator(span: Span, n: usize) -> impl Iterator<Item = tt::Ident> { (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span)) } impl VariantShape { - fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree { + fn as_pattern(&self, path: tt::Subtree, span: Span) -> tt::Subtree { self.as_pattern_map(path, span, |it| quote!(span => #it)) } - fn field_names(&self, span: SpanData) -> Vec<tt::Ident> { + fn field_names(&self, span: Span) -> Vec<tt::Ident> { match self { VariantShape::Struct(s) => s.clone(), VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(), @@ -93,7 +94,7 @@ impl VariantShape { fn as_pattern_map( &self, path: tt::Subtree, - span: SpanData, + span: Span, field_map: impl Fn(&tt::Ident) -> tt::Subtree, ) -> tt::Subtree { match self { @@ -143,11 +144,11 @@ enum AdtShape { } impl AdtShape { - fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> { + fn as_pattern(&self, span: Span, name: &tt::Ident) -> Vec<tt::Subtree> { self.as_pattern_map(name, |it| quote!(span =>#it), span) } - fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> { + fn field_names(&self, span: Span) -> Vec<Vec<tt::Ident>> { match self { AdtShape::Struct(s) => { vec![s.field_names(span)] @@ -166,7 +167,7 @@ impl AdtShape { &self, name: &tt::Ident, field_map: impl Fn(&tt::Ident) -> tt::Subtree, - span: SpanData, + span: Span, ) -> Vec<tt::Subtree> { match self { AdtShape::Struct(s) => { @@ -199,7 +200,7 @@ struct BasicAdtInfo { fn parse_adt( tm: SpanMapRef<'_>, adt: &ast::Adt, - call_site: SpanData, + call_site: Span, ) -> Result<BasicAdtInfo, ExpandError> { let (name, generic_param_list, shape) = match adt { ast::Adt::Struct(it) => ( @@ -245,7 +246,7 @@ fn parse_adt( match this { Some(it) => { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree(it.syntax(), tm) + mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site) } None => { tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) @@ -253,15 +254,15 @@ fn parse_adt( } }; let bounds = match ¶m { - ast::TypeOrConstParam::Type(it) => { - it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) - } + ast::TypeOrConstParam::Type(it) => it + .type_bound_list() + .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)), ast::TypeOrConstParam::Const(_) => None, }; let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = param .ty() - .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm)) + .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm, call_site)) .unwrap_or_else(|| { tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) }); @@ -297,7 +298,7 @@ fn parse_adt( let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); param_type_set.contains(&name).then_some(p) }) - .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) + .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)) .collect(); let name_token = name_to_token(tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) @@ -349,7 +350,7 @@ fn name_to_token( /// therefore does not get bound by the derived trait. fn expand_simple_derive( // FIXME: use - invoc_span: SpanData, + invoc_span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, trait_path: tt::Subtree, @@ -397,7 +398,7 @@ fn expand_simple_derive( ExpandResult::ok(expanded) } -fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree { +fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: Span) -> tt::TokenTree { // FIXME: make hygiene works for builtin derive macro // such that $crate can be used here. let cg = db.crate_graph(); @@ -416,7 +417,7 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) fn copy_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -427,7 +428,7 @@ fn copy_expand( fn clone_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -470,13 +471,13 @@ fn clone_expand( } /// This function exists since `quote! {span => => }` doesn't work. -fn fat_arrow(span: SpanData) -> tt::Subtree { +fn fat_arrow(span: Span) -> tt::Subtree { let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span }; quote! {span => #eq> } } /// This function exists since `quote! {span => && }` doesn't work. -fn and_and(span: SpanData) -> tt::Subtree { +fn and_and(span: Span) -> tt::Subtree { let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span }; quote! {span => #and& } } @@ -484,7 +485,7 @@ fn and_and(span: SpanData) -> tt::Subtree { fn default_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -529,7 +530,7 @@ fn default_expand( fn debug_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -607,7 +608,7 @@ fn debug_expand( fn hash_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -660,7 +661,7 @@ fn hash_expand( fn eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -671,7 +672,7 @@ fn eq_expand( fn partial_eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -725,7 +726,7 @@ fn partial_eq_expand( fn self_and_other_patterns( adt: &BasicAdtInfo, name: &tt::Ident, - span: SpanData, + span: Span, ) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) { let self_patterns = adt.shape.as_pattern_map( name, @@ -749,7 +750,7 @@ fn self_and_other_patterns( fn ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -760,7 +761,7 @@ fn ord_expand( left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, - span: SpanData, + span: Span, ) -> tt::Subtree { let fat_arrow1 = fat_arrow(span); let fat_arrow2 = fat_arrow(span); @@ -813,7 +814,7 @@ fn ord_expand( fn partial_ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult<tt::Subtree> { @@ -824,7 +825,7 @@ fn partial_ord_expand( left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, - span: SpanData, + span: Span, ) -> tt::Subtree { let fat_arrow1 = fat_arrow(span); let fat_arrow2 = fat_arrow(span); diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index c8f04bfee54..f99a8917623 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,13 +1,11 @@ //! Builtin macro -use base_db::{ - span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - AnchoredPath, Edition, FileId, -}; +use base_db::{AnchoredPath, Edition, FileId}; use cfg::CfgExpr; use either::Either; use itertools::Itertools; use mbe::{parse_exprs_with_sep, parse_to_token_tree}; +use span::{Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use syntax::{ ast::{self, AstToken}, SmolStr, @@ -15,10 +13,11 @@ use syntax::{ use crate::{ db::ExpandDatabase, - hygiene::span_with_def_site_ctxt, - name, quote, + hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt}, + name::{self, known}, + quote, tt::{self, DelimSpan}, - ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, + ExpandError, ExpandResult, HirFileIdExt, MacroCallId, }; macro_rules! register_builtin { @@ -44,7 +43,7 @@ macro_rules! register_builtin { $( BuiltinFnLikeExpander::$kind => $expand, )* }; - let span = db.lookup_intern_macro_call(id).span(db); + let span = db.lookup_intern_macro_call(id).call_site; let span = span_with_def_site_ctxt(db, span, id); expander(db, id, tt, span) } @@ -61,7 +60,7 @@ macro_rules! register_builtin { $( EagerExpander::$e_kind => $e_expand, )* }; - let span = db.lookup_intern_macro_call(id).span(db); + let span = db.lookup_intern_macro_call(id).call_site; let span = span_with_def_site_ctxt(db, span, id); expander(db, id, tt, span) } @@ -109,6 +108,7 @@ register_builtin! { (format_args, FormatArgs) => format_args_expand, (const_format_args, ConstFormatArgs) => format_args_expand, (format_args_nl, FormatArgsNl) => format_args_nl_expand, + (quote, Quote) => quote_expand, EAGER: (compile_error, CompileError) => compile_error_expand, @@ -122,7 +122,7 @@ register_builtin! { (option_env, OptionEnv) => option_env_expand } -fn mk_pound(span: SpanData) -> tt::Subtree { +fn mk_pound(span: Span) -> tt::Subtree { crate::quote::IntoTt::to_subtree( vec![crate::tt::Leaf::Punct(crate::tt::Punct { char: '#', @@ -138,7 +138,7 @@ fn module_path_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { // Just return a dummy result. ExpandResult::ok(quote! {span => @@ -150,13 +150,13 @@ fn line_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { // dummy implementation for type-checking purposes // Note that `line!` and `column!` will never be implemented properly, as they are by definition // not incremental ExpandResult::ok(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(span), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: "0u32".into(), span, @@ -168,7 +168,7 @@ fn log_syntax_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { ExpandResult::ok(quote! {span =>}) } @@ -177,7 +177,7 @@ fn trace_macros_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { ExpandResult::ok(quote! {span =>}) } @@ -186,7 +186,7 @@ fn stringify_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let pretty = ::tt::pretty(&tt.token_trees); @@ -198,32 +198,38 @@ fn stringify_expand( } fn assert_expand( - _db: &dyn ExpandDatabase, - _id: MacroCallId, + db: &dyn ExpandDatabase, + id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { - let args = parse_exprs_with_sep(tt, ','); + let call_site_span = span_with_call_site_ctxt(db, span, id); + let args = parse_exprs_with_sep(tt, ',', call_site_span); let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let expanded = match &*args { [cond, panic_args @ ..] => { let comma = tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(call_site_span), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, - span, + span: call_site_span, }))], }; let cond = cond.clone(); let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); - quote! {span =>{ + let mac = if use_panic_2021(db, span) { + quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) } + } else { + quote! {call_site_span => #dollar_crate::panic!(##panic_args) } + }; + quote! {call_site_span =>{ if !(#cond) { - #dollar_crate::panic!(##panic_args); + #mac; } }} } - [] => quote! {span =>{}}, + [] => quote! {call_site_span =>{}}, }; ExpandResult::ok(expanded) @@ -233,7 +239,7 @@ fn file_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { // FIXME: RA purposefully lacks knowledge of absolute file names // so just return "". @@ -250,7 +256,7 @@ fn format_args_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { format_args_expand_general(db, id, tt, "", span) } @@ -259,7 +265,7 @@ fn format_args_nl_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { format_args_expand_general(db, id, tt, "\\n", span) } @@ -270,7 +276,7 @@ fn format_args_expand_general( tt: &tt::Subtree, // FIXME: Make use of this so that mir interpretation works properly _end_string: &str, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let pound = mk_pound(span); let mut tt = tt.clone(); @@ -284,7 +290,7 @@ fn asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { // We expand all assembly snippets to `format_args!` invocations to get format syntax // highlighting for them. @@ -314,7 +320,7 @@ fn global_asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { // Expand to nothing (at item-level) ExpandResult::ok(quote! {span =>}) @@ -324,7 +330,7 @@ fn cfg_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let loc = db.lookup_intern_macro_call(id); let expr = CfgExpr::parse(tt); @@ -337,19 +343,25 @@ fn panic_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { - let loc: MacroCallLoc = db.lookup_intern_macro_call(id); let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; + let call_site_span = span_with_call_site_ctxt(db, span, id); + + let mac = + if use_panic_2021(db, call_site_span) { known::panic_2021 } else { known::panic_2015 }; + // Expand to a macro call `$crate::panic::panic_{edition}` - let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(span =>#dollar_crate::panic::panic_2021!) - } else { - quote!(span =>#dollar_crate::panic::panic_2015!) - }; + let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!); // Pass the original arguments - call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); + let mut subtree = tt.clone(); + subtree.delimiter = tt::Delimiter { + open: call_site_span, + close: call_site_span, + kind: tt::DelimiterKind::Parenthesis, + }; + call.token_trees.push(tt::TokenTree::Subtree(subtree)); ExpandResult::ok(call) } @@ -357,22 +369,52 @@ fn unreachable_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { - let loc: MacroCallLoc = db.lookup_intern_macro_call(id); - // Expand to a macro call `$crate::panic::unreachable_{edition}` let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; - let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(span =>#dollar_crate::panic::unreachable_2021!) + let call_site_span = span_with_call_site_ctxt(db, span, id); + + let mac = if use_panic_2021(db, call_site_span) { + known::unreachable_2021 } else { - quote!(span =>#dollar_crate::panic::unreachable_2015!) + known::unreachable_2015 }; + // Expand to a macro call `$crate::panic::panic_{edition}` + let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!); + // Pass the original arguments - call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); + let mut subtree = tt.clone(); + subtree.delimiter = tt::Delimiter { + open: call_site_span, + close: call_site_span, + kind: tt::DelimiterKind::Parenthesis, + }; + call.token_trees.push(tt::TokenTree::Subtree(subtree)); ExpandResult::ok(call) } +fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool { + // To determine the edition, we check the first span up the expansion + // stack that does not have #[allow_internal_unstable(edition_panic)]. + // (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.) + loop { + let Some(expn) = db.lookup_intern_syntax_context(span.ctx).outer_expn else { + break false; + }; + let expn = db.lookup_intern_macro_call(expn); + // FIXME: Record allow_internal_unstable in the macro def (not been done yet because it + // would consume quite a bit extra memory for all call locs...) + // if let Some(features) = expn.def.allow_internal_unstable { + // if features.iter().any(|&f| f == sym::edition_panic) { + // span = expn.call_site; + // continue; + // } + // } + break expn.def.edition >= Edition::Edition2021; + } +} + fn unquote_str(lit: &tt::Literal) -> Option<String> { let lit = ast::make::tokens::literal(&lit.to_string()); let token = ast::String::cast(lit)?; @@ -395,7 +437,7 @@ fn compile_error_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { @@ -412,7 +454,7 @@ fn concat_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let mut err = None; let mut text = String::new(); @@ -459,7 +501,7 @@ fn concat_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let mut bytes = Vec::new(); let mut err = None; @@ -543,7 +585,7 @@ fn concat_idents_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let mut err = None; let mut ident = String::new(); @@ -596,7 +638,7 @@ fn include_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let file_id = match include_input_to_file_id(db, arg_id, tt) { Ok(it) => it, @@ -629,11 +671,11 @@ fn include_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(span), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), span, @@ -646,7 +688,7 @@ fn include_str_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let path = match parse_string(tt) { Ok(it) => it, @@ -681,7 +723,7 @@ fn env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let key = match parse_string(tt) { Ok(it) => it, @@ -713,7 +755,7 @@ fn option_env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult<tt::Subtree> { let key = match parse_string(tt) { Ok(it) => it, @@ -729,3 +771,15 @@ fn option_env_expand( ExpandResult::ok(expanded) } + +fn quote_expand( + _db: &dyn ExpandDatabase, + _arg_id: MacroCallId, + _tt: &tt::Subtree, + span: Span, +) -> ExpandResult<tt::Subtree> { + ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), + ExpandError::other("quote! is not implemented"), + ) +} diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs new file mode 100644 index 00000000000..67b7df198e9 --- /dev/null +++ b/crates/hir-expand/src/change.rs @@ -0,0 +1,42 @@ +//! Defines a unit of change that can applied to the database to get the next +//! state. Changes are transactional. +use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot}; +use span::FileId; +use triomphe::Arc; + +use crate::{db::ExpandDatabase, proc_macro::ProcMacros}; + +#[derive(Debug, Default)] +pub struct Change { + pub source_change: FileChange, + pub proc_macros: Option<ProcMacros>, +} + +impl Change { + pub fn new() -> Self { + Self::default() + } + + pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) { + self.source_change.apply(db); + if let Some(proc_macros) = self.proc_macros { + db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); + } + } + + pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) { + self.source_change.change_file(file_id, new_text) + } + + pub fn set_crate_graph(&mut self, graph: CrateGraph) { + self.source_change.set_crate_graph(graph) + } + + pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) { + self.proc_macros = Some(proc_macros); + } + + pub fn set_roots(&mut self, roots: Vec<SourceRoot>) { + self.source_change.set_roots(roots) + } +} diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 32baa6694b4..f7a26e436de 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,14 +1,16 @@ //! Defines database & queries for macro expansion. +use std::sync::OnceLock; + use base_db::{ salsa::{self, debug::DebugQueryTable}, - span::SyntaxContextId, - CrateId, Edition, FileId, SourceDatabase, + CrateId, Edition, FileId, SourceDatabase, VersionReq, }; use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; +use span::{Span, SyntaxContextId}; use syntax::{ ast::{self, HasAttrs}, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, @@ -20,12 +22,17 @@ use crate::{ attrs::{collect_attrs, RawAttrs}, builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, - fixup::{self, SyntaxFixupUndoInfo}, - hygiene::{apply_mark, SyntaxContextData, Transparency}, - span::{RealSpanMap, SpanMap, SpanMapRef}, - tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, - ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId, - MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander, + fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, + hygiene::{ + apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, + SyntaxContextData, Transparency, + }, + proc_macro::ProcMacros, + span_map::{RealSpanMap, SpanMap, SpanMapRef}, + tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, + CustomProcMacroExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, + HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, + MacroFileId, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -39,10 +46,13 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576); #[derive(Debug, Clone, Eq, PartialEq)] /// Old-style `macro_rules` or the new macros 2.0 pub struct DeclarativeMacroExpander { - pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>, + pub mac: mbe::DeclarativeMacro<span::Span>, pub transparency: Transparency, } +// FIXME: Remove this once we drop support for 1.76 +static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new(); + impl DeclarativeMacroExpander { pub fn expand( &self, @@ -50,25 +60,61 @@ impl DeclarativeMacroExpander { tt: tt::Subtree, call_id: MacroCallId, ) -> ExpandResult<tt::Subtree> { + let loc = db.lookup_intern_macro_call(call_id); + let toolchain = &db.crate_graph()[loc.def.krate].toolchain; + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); match self.mac.err() { Some(e) => ExpandResult::new( - tt::Subtree::empty(tt::DelimSpan::DUMMY), + tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), ExpandError::other(format!("invalid macro definition: {e}")), ), None => self .mac - .expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency)) + .expand( + &tt, + |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), + new_meta_vars, + loc.call_site, + ) .map_err(Into::into), } } - pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> { + pub fn expand_unhygienic( + &self, + db: &dyn ExpandDatabase, + tt: tt::Subtree, + krate: CrateId, + call_site: Span, + ) -> ExpandResult<tt::Subtree> { + let toolchain = &db.crate_graph()[krate].toolchain; + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); match self.mac.err() { Some(e) => ExpandResult::new( - tt::Subtree::empty(tt::DelimSpan::DUMMY), + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::other(format!("invalid macro definition: {e}")), ), - None => self.mac.expand(&tt, |_| ()).map_err(Into::into), + None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), } } } @@ -86,11 +132,15 @@ pub enum TokenExpander { /// `derive(Copy)` and such. BuiltInDerive(BuiltinDeriveExpander), /// The thing we love the most here in rust-analyzer -- procedural macros. - ProcMacro(ProcMacroExpander), + ProcMacro(CustomProcMacroExpander), } #[salsa::query_group(ExpandDatabaseStorage)] pub trait ExpandDatabase: SourceDatabase { + /// The proc macros. + #[salsa::input] + fn proc_macros(&self) -> Arc<ProcMacros>; + fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; /// Main public API -- parses a hir file, not caring whether it's a real @@ -164,7 +214,20 @@ pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { } pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> { - Arc::new(RealSpanMap::from_file(db, file_id)) + use syntax::ast::HasModuleItem; + let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; + let ast_id_map = db.ast_id_map(file_id.into()); + let tree = db.parse(file_id).tree(); + pairs.extend( + tree.items() + .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), + ); + + Arc::new(RealSpanMap::from_file( + file_id, + pairs.into_boxed_slice(), + tree.syntax().text_range().end(), + )) } /// This expands the given macro call, but with different arguments. This is @@ -184,12 +247,13 @@ pub fn expand_speculative( // Build the subtree and token mapping for the speculative args let (mut tt, undo_info) = match loc.kind { - MacroCallKind::FnLike { .. } => { - (mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE) - } + MacroCallKind::FnLike { .. } => ( + mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site), + SyntaxFixupUndoInfo::NONE, + ), MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { let censor = censor_for_macro_input(&loc, speculative_args); - let mut fixups = fixup::fixup_syntax(span_map, speculative_args); + let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site); fixups.append.retain(|it, _| match it { syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Token(_) => true, @@ -201,6 +265,7 @@ pub fn expand_speculative( span_map, fixups.append, fixups.remove, + loc.call_site, ), fixups.undo_info, ) @@ -222,8 +287,9 @@ pub fn expand_speculative( }?; match attr.token_tree() { Some(token_tree) => { - let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map); - tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + let mut tree = + syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site); + tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site); Some(tree) } @@ -237,17 +303,16 @@ pub fn expand_speculative( // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. let mut speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { - tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; - let call_site = loc.span(db); + tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site); expander.expand( db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref(), - call_site, - call_site, - call_site, + span_with_def_site_ctxt(db, loc.def.span, actual_macro_call), + span_with_call_site_ctxt(db, loc.def.span, actual_macro_call), + span_with_mixed_site_ctxt(db, loc.def.span, actual_macro_call), ) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { @@ -258,9 +323,12 @@ pub fn expand_speculative( let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); expander.expand(db, actual_macro_call, &adt, span_map) } - MacroDefKind::Declarative(it) => { - db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt) - } + MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic( + db, + tt, + loc.def.krate, + loc.call_site, + ), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), MacroDefKind::BuiltInEager(it, _) => { it.expand(db, actual_macro_call, &tt).map_err(Into::into) @@ -410,23 +478,35 @@ fn macro_arg( MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), }; let (mut tt, undo_info) = match loc.kind { - MacroCallKind::FnLike { .. } => { - (mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE) - } + MacroCallKind::FnLike { .. } => ( + mbe::syntax_node_to_token_tree(&syntax, map.as_ref(), loc.call_site), + SyntaxFixupUndoInfo::NONE, + ), MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { let censor = censor_for_macro_input(&loc, &syntax); - let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax); + let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site); fixups.append.retain(|it, _| match it { syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Token(_) => true, }); fixups.remove.extend(censor); + { + let mut tt = mbe::syntax_node_to_token_tree_modified( + &syntax, + map.as_ref(), + fixups.append.clone(), + fixups.remove.clone(), + loc.call_site, + ); + reverse_fixups(&mut tt, &fixups.undo_info); + } ( mbe::syntax_node_to_token_tree_modified( &syntax, map, fixups.append, fixups.remove, + loc.call_site, ), fixups.undo_info, ) @@ -435,7 +515,7 @@ fn macro_arg( if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included - tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + tt.delimiter.kind = tt::DelimiterKind::Invisible; } if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { @@ -497,7 +577,8 @@ fn decl_macro_expander( def_crate: CrateId, id: AstId<ast::Macro>, ) -> Arc<DeclarativeMacroExpander> { - let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; + let crate_data = &db.crate_graph()[def_crate]; + let is_2021 = crate_data.edition >= Edition::Edition2021; let (root, map) = parse_with_map(db, id.file_id); let root = root.syntax_node(); @@ -521,13 +602,29 @@ fn decl_macro_expander( _ => None, } }; + let toolchain = crate_data.toolchain.as_ref(); + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); let (mac, transparency) = match id.to_ptr(db).to_node(&root) { ast::Macro::MacroRules(macro_rules) => ( match macro_rules.token_tree() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); - let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + map.as_ref(), + map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()), + ); + let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars); mac } None => mbe::DeclarativeMacro::from_err( @@ -540,8 +637,12 @@ fn decl_macro_expander( ast::Macro::MacroDef(macro_def) => ( match macro_def.body() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); - let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + map.as_ref(), + map.span_for_range(macro_def.macro_token().unwrap().text_range()), + ); + let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars); mac } None => mbe::DeclarativeMacro::from_err( @@ -592,7 +693,7 @@ fn macro_expand( let Some((macro_arg, undo_info)) = value else { return ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(loc.call_site), token_trees: Vec::new(), }), // FIXME: We should make sure to enforce an invariant that invalid macro @@ -651,7 +752,7 @@ fn macro_expand( // Skip checking token tree limit for include! macro call if !loc.def.is_include() { // Set a hard limit for the expanded tt - if let Err(value) = check_tt_count(&tt) { + if let Err(value) = check_tt_count(&tt, loc.call_site) { return value; } } @@ -664,7 +765,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { return ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(loc.call_site), token_trees: Vec::new(), }), // FIXME: We should make sure to enforce an invariant that invalid macro @@ -683,22 +784,19 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A _ => None, }; - let call_site = loc.span(db); let ExpandResult { value: mut tt, err } = expander.expand( db, loc.def.krate, loc.krate, ¯o_arg, attr_arg, - // FIXME - call_site, - call_site, - // FIXME - call_site, + span_with_def_site_ctxt(db, loc.def.span, id), + span_with_call_site_ctxt(db, loc.def.span, id), + span_with_mixed_site_ctxt(db, loc.def.span, id), ); // Set a hard limit for the expanded tt - if let Err(value) = check_tt_count(&tt) { + if let Err(value) = check_tt_count(&tt, loc.call_site) { return value; } @@ -721,12 +819,12 @@ fn token_tree_to_syntax_node( mbe::token_tree_to_syntax_node(tt, entry_point) } -fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> { +fn check_tt_count(tt: &tt::Subtree, call_site: Span) -> Result<(), ExpandResult<Arc<tt::Subtree>>> { let count = tt.count(); if TOKEN_LIMIT.check(count).is_err() { Err(ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(call_site), token_trees: vec![], }), err: Some(ExpandError::other(format!( diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 1e2722e8464..da85c2ec7ac 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -18,7 +18,8 @@ //! //! //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros> -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; +use span::Span; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; use triomphe::Arc; @@ -26,9 +27,9 @@ use crate::{ ast::{self, AstNode}, db::ExpandDatabase, mod_path::ModPath, - span::SpanMapRef, - EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId, - MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, + span_map::SpanMapRef, + EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern, + MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; pub fn expand_eager_macro_input( @@ -36,7 +37,7 @@ pub fn expand_eager_macro_input( krate: CrateId, macro_call: InFile<ast::MacroCall>, def: MacroDefId, - call_site: SyntaxContextId, + call_site: Span, resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, ) -> ExpandResult<Option<MacroCallId>> { let ast_map = db.ast_id_map(macro_call.file_id); @@ -48,13 +49,14 @@ pub fn expand_eager_macro_input( // When `lazy_expand` is called, its *parent* file must already exist. // Here we store an eager macro id for the argument expanded subtree // for that purpose. - let arg_id = db.intern_macro_call(MacroCallLoc { + let arg_id = MacroCallLoc { def, krate, eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, call_site, - }); + } + .intern(db); let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); @@ -81,19 +83,19 @@ pub fn expand_eager_macro_input( return ExpandResult { value: None, err }; }; - let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map); + let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site); - subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE; + subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible; let loc = MacroCallLoc { def, krate, - eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })), + eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, call_site, }; - ExpandResult { value: Some(db.intern_macro_call(loc)), err } + ExpandResult { value: Some(loc.intern(db)), err } } fn lazy_expand( @@ -101,7 +103,7 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile<ast::MacroCall>, krate: CrateId, - call_site: SyntaxContextId, + call_site: Span, ) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); @@ -121,7 +123,7 @@ fn eager_macro_recur( mut offset: TextSize, curr: InFile<SyntaxNode>, krate: CrateId, - call_site: SyntaxContextId, + call_site: Span, macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> { let original = curr.value.clone_for_update(); diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 89f0685d5b6..d0a1bef11c3 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -1,11 +1,8 @@ //! Things to wrap other things in file ids. use std::iter; -use base_db::{ - span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}, - FileId, FileRange, -}; use either::Either; +use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}; use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize}; use crate::{db, ExpansionInfo, MacroFileIdExt}; @@ -345,7 +342,7 @@ impl InFile<TextRange> { } impl<N: AstNode> InFile<N> { - pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> { + pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input let file_id = match self.file_id.repr() { diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 11775c531d4..d241d94b8c4 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -1,13 +1,10 @@ //! To make attribute macros work reliably when typing, we need to take care to //! fix up syntax errors in the code we're passing to them. -use base_db::{ - span::{ErasedFileAstId, SpanAnchor, SpanData}, - FileId, -}; -use la_arena::RawIdx; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; +use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER}; +use stdx::never; use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, @@ -16,7 +13,7 @@ use triomphe::Arc; use tt::Spacing; use crate::{ - span::SpanMapRef, + span_map::SpanMapRef, tt::{Ident, Leaf, Punct, Subtree}, }; @@ -41,27 +38,30 @@ impl SyntaxFixupUndoInfo { pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None }; } -// censoring -> just don't convert the node -// replacement -> censor + append -// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how -// to remove later - -pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups { +// We mark spans with `FIXUP_DUMMY_AST_ID` to indicate that they are fake. +const FIXUP_DUMMY_AST_ID: ErasedFileAstId = FIXUP_ERASED_FILE_AST_ID_MARKER; +const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0)); +// If the fake span has this range end, that means that the range start is an index into the +// `original` list in `SyntaxFixupUndoInfo`. +const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0); + +pub(crate) fn fixup_syntax( + span_map: SpanMapRef<'_>, + node: &SyntaxNode, + call_site: Span, +) -> SyntaxFixups { let mut append = FxHashMap::<SyntaxElement, _>::default(); let mut remove = FxHashSet::<SyntaxNode>::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); - let dummy_range = TextRange::empty(TextSize::new(0)); - // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as - // the index into the replacement vec but only if the end points to !0 - let dummy_anchor = SpanAnchor { - file_id: FileId::from_raw(!0), - ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)), - }; - let fake_span = |range| SpanData { - range: dummy_range, - anchor: dummy_anchor, - ctx: span_map.span_for_range(range).ctx, + let dummy_range = FIXUP_DUMMY_RANGE; + let fake_span = |range| { + let span = span_map.span_for_range(range); + SpanData { + range: dummy_range, + anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, + ctx: span.ctx, + } }; while let Some(event) = preorder.next() { let syntax::WalkEvent::Enter(node) = event else { continue }; @@ -70,15 +70,16 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta if can_handle_error(&node) && has_error_to_handle(&node) { remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid - let original_tree = mbe::syntax_node_to_token_tree(&node, span_map); + let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site); let idx = original.len() as u32; original.push(original_tree); + let span = span_map.span_for_range(node_range); let replacement = Leaf::Ident(Ident { text: "__ra_fixup".into(), span: SpanData { - range: TextRange::new(TextSize::new(idx), TextSize::new(!0)), - anchor: dummy_anchor, - ctx: span_map.span_for_range(node_range).ctx, + range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), + anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, + ctx: span.ctx, }, }); append.insert(node.clone().into(), vec![replacement]); @@ -299,6 +300,14 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool { pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { let Some(undo_info) = undo_info.original.as_deref() else { return }; let undo_info = &**undo_info; + #[allow(deprecated)] + if never!( + tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID + || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID + ) { + tt.delimiter.close = SpanData::DUMMY; + tt.delimiter.open = SpanData::DUMMY; + } reverse_fixups_(tt, undo_info); } @@ -310,17 +319,28 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { .filter(|tt| match tt { tt::TokenTree::Leaf(leaf) => { let span = leaf.span(); - span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0) + let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID; + let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END; + is_real_leaf || is_replaced_node } tt::TokenTree::Subtree(_) => true, }) .flat_map(|tt| match tt { tt::TokenTree::Subtree(mut tt) => { + if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID + || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID + { + // Even though fixup never creates subtrees with fixup spans, the old proc-macro server + // might copy them if the proc-macro asks for it, so we need to filter those out + // here as well. + return SmallVec::new_const(); + } reverse_fixups_(&mut tt, undo_info); SmallVec::from_const([tt.into()]) } tt::TokenTree::Leaf(leaf) => { - if leaf.span().anchor.file_id == FileId::from_raw(!0) { + if leaf.span().anchor.ast_id == FIXUP_DUMMY_AST_ID { + // we have a fake node here, we need to replace it again with the original let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone(); if original.delimiter.kind == tt::DelimiterKind::Invisible { original.token_trees.into() @@ -328,6 +348,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { SmallVec::from_const([original.into()]) } } else { + // just a normal leaf SmallVec::from_const([leaf.into()]) } } @@ -339,11 +360,12 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { mod tests { use base_db::FileId; use expect_test::{expect, Expect}; + use syntax::TextRange; use triomphe::Arc; use crate::{ fixup::reverse_fixups, - span::{RealSpanMap, SpanMap}, + span_map::{RealSpanMap, SpanMap}, tt, }; @@ -376,12 +398,17 @@ mod tests { fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture); let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); - let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node()); + let fixups = super::fixup_syntax( + span_map.as_ref(), + &parsed.syntax_node(), + span_map.span_for_range(TextRange::empty(0.into())), + ); let mut tt = mbe::syntax_node_to_token_tree_modified( &parsed.syntax_node(), span_map.as_ref(), fixups.append, fixups.remove, + span_map.span_for_range(TextRange::empty(0.into())), ); let actual = format!("{tt}\n"); @@ -401,8 +428,11 @@ mod tests { // the fixed-up + reversed version should be equivalent to the original input // modulo token IDs and `Punct`s' spacing. - let original_as_tt = - mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref()); + let original_as_tt = mbe::syntax_node_to_token_tree( + &parsed.syntax_node(), + span_map.as_ref(), + span_map.span_for_range(TextRange::empty(0.into())), + ); assert!( check_subtree_eq(&tt, &original_as_tt), "different token tree:\n{tt:?}\n\n{original_as_tt:?}" diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 7b03709aced..57921543c4b 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -2,9 +2,12 @@ //! //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! this moment, this is horribly incomplete and handles only `$crate`. + +// FIXME: Consider moving this into the span crate. + use std::iter; -use base_db::span::{MacroCallId, SpanData, SyntaxContextId}; +use span::{MacroCallId, Span, SyntaxContextId}; use crate::db::ExpandDatabase; @@ -78,37 +81,29 @@ pub enum Transparency { Opaque, } -pub fn span_with_def_site_ctxt( - db: &dyn ExpandDatabase, - span: SpanData, - expn_id: MacroCallId, -) -> SpanData { +pub fn span_with_def_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span { span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque) } -pub fn span_with_call_site_ctxt( - db: &dyn ExpandDatabase, - span: SpanData, - expn_id: MacroCallId, -) -> SpanData { +pub fn span_with_call_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span { span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent) } pub fn span_with_mixed_site_ctxt( db: &dyn ExpandDatabase, - span: SpanData, + span: Span, expn_id: MacroCallId, -) -> SpanData { +) -> Span { span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent) } fn span_with_ctxt_from_mark( db: &dyn ExpandDatabase, - span: SpanData, + span: Span, expn_id: MacroCallId, transparency: Transparency, -) -> SpanData { - SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span } +) -> Span { + Span { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span } } pub(super) fn apply_mark( @@ -121,7 +116,7 @@ pub(super) fn apply_mark( return apply_mark_internal(db, ctxt, Some(call_id), transparency); } - let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site; + let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx; let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { call_site_ctxt.normalize_to_macros_2_0(db) } else { @@ -154,15 +149,16 @@ fn apply_mark_internal( transparency: Transparency, ) -> SyntaxContextId { let syntax_context_data = db.lookup_intern_syntax_context(ctxt); - let mut opaque = syntax_context_data.opaque; - let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent; + let mut opaque = handle_self_ref(ctxt, syntax_context_data.opaque); + let mut opaque_and_semitransparent = + handle_self_ref(ctxt, syntax_context_data.opaque_and_semitransparent); if transparency >= Transparency::Opaque { let parent = opaque; + // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with + // salsa when interning, so we use a sentinel value that effectively means the current + // syntax context. let new_opaque = SyntaxContextId::SELF_REF; - // But we can't just grab the to be allocated ID either as that would not deduplicate - // things! - // So we need a new salsa store type here ... opaque = db.intern_syntax_context(SyntaxContextData { outer_expn: call_id, outer_transparency: transparency, @@ -174,6 +170,9 @@ fn apply_mark_internal( if transparency >= Transparency::SemiTransparent { let parent = opaque_and_semitransparent; + // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with + // salsa when interning, so we use a sentinel value that effectively means the current + // syntax context. let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF; opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData { outer_expn: call_id, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index f5e9cd33f2b..b5197d4c25d 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -6,20 +6,21 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -pub mod db; pub mod ast_id_map; -pub mod name; -pub mod hygiene; +pub mod attrs; pub mod builtin_attr_macro; pub mod builtin_derive_macro; pub mod builtin_fn_macro; -pub mod proc_macro; -pub mod quote; +pub mod db; pub mod eager; -pub mod mod_path; -pub mod attrs; -pub mod span; pub mod files; +pub mod change; +pub mod hygiene; +pub mod mod_path; +pub mod name; +pub mod proc_macro; +pub mod quote; +pub mod span_map; mod fixup; use attrs::collect_attrs; @@ -27,11 +28,9 @@ use triomphe::Arc; use std::{fmt, hash::Hash}; -use base_db::{ - span::{HirFileIdRepr, SpanData, SyntaxContextId}, - CrateId, FileId, FileRange, ProcMacroKind, -}; +use base_db::{CrateId, Edition, FileId}; use either::Either; +use span::{FileRange, HirFileIdRepr, Span, SyntaxContextId}; use syntax::{ ast::{self, AstNode}, SyntaxNode, SyntaxToken, TextRange, TextSize, @@ -42,35 +41,86 @@ use crate::{ builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, - db::TokenExpander, + db::{ExpandDatabase, TokenExpander}, fixup::SyntaxFixupUndoInfo, + hygiene::SyntaxContextData, mod_path::ModPath, - proc_macro::ProcMacroExpander, - span::{ExpansionSpanMap, SpanMap}, + proc_macro::{CustomProcMacroExpander, ProcMacroKind}, + span_map::{ExpansionSpanMap, SpanMap}, }; pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId}; pub use crate::files::{InFile, InMacroFile, InRealFile}; -pub use base_db::span::{HirFileId, MacroCallId, MacroFileId}; pub use mbe::ValueResult; +pub use span::{HirFileId, MacroCallId, MacroFileId}; -pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>; +pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::Span>; pub mod tt { - pub use base_db::span::SpanData; - pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor}; - - pub type Delimiter = ::tt::Delimiter<SpanData>; - pub type DelimSpan = ::tt::DelimSpan<SpanData>; - pub type Subtree = ::tt::Subtree<SpanData>; - pub type Leaf = ::tt::Leaf<SpanData>; - pub type Literal = ::tt::Literal<SpanData>; - pub type Punct = ::tt::Punct<SpanData>; - pub type Ident = ::tt::Ident<SpanData>; - pub type TokenTree = ::tt::TokenTree<SpanData>; + pub use span::Span; + pub use tt::{DelimiterKind, Spacing}; + + pub type Delimiter = ::tt::Delimiter<Span>; + pub type DelimSpan = ::tt::DelimSpan<Span>; + pub type Subtree = ::tt::Subtree<Span>; + pub type Leaf = ::tt::Leaf<Span>; + pub type Literal = ::tt::Literal<Span>; + pub type Punct = ::tt::Punct<Span>; + pub type Ident = ::tt::Ident<Span>; + pub type TokenTree = ::tt::TokenTree<Span>; +} + +#[macro_export] +macro_rules! impl_intern_lookup { + ($db:ident, $id:ident, $loc:ident, $intern:ident, $lookup:ident) => { + impl $crate::Intern for $loc { + type Database<'db> = dyn $db + 'db; + type ID = $id; + fn intern<'db>(self, db: &Self::Database<'db>) -> $id { + db.$intern(self) + } + } + + impl $crate::Lookup for $id { + type Database<'db> = dyn $db + 'db; + type Data = $loc; + fn lookup<'db>(&self, db: &Self::Database<'db>) -> $loc { + db.$lookup(*self) + } + } + }; +} + +// ideally these would be defined in base-db, but the orphan rule doesn't let us +pub trait Intern { + type Database<'db>: ?Sized; + type ID; + fn intern<'db>(self, db: &Self::Database<'db>) -> Self::ID; +} + +pub trait Lookup { + type Database<'db>: ?Sized; + type Data; + fn lookup<'db>(&self, db: &Self::Database<'db>) -> Self::Data; } +impl_intern_lookup!( + ExpandDatabase, + MacroCallId, + MacroCallLoc, + intern_macro_call, + lookup_intern_macro_call +); + +impl_intern_lookup!( + ExpandDatabase, + SyntaxContextId, + SyntaxContextData, + intern_syntax_context, + lookup_intern_syntax_context +); + pub type ExpandResult<T> = ValueResult<T, ExpandError>; #[derive(Debug, PartialEq, Eq, Clone, Hash)] @@ -117,18 +167,20 @@ pub struct MacroCallLoc { pub krate: CrateId, /// Some if this is a macro call for an eager macro. Note that this is `None` /// for the eager input macro file. - eager: Option<Box<EagerCallInfo>>, + // FIXME: This seems bad to save in an interned structure + eager: Option<Arc<EagerCallInfo>>, pub kind: MacroCallKind, - pub call_site: SyntaxContextId, + pub call_site: Span, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroDefId { pub krate: CrateId, + pub edition: Edition, pub kind: MacroDefKind, pub local_inner: bool, pub allow_internal_unsafe: bool, - // pub def_site: SyntaxContextId, + pub span: Span, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -138,7 +190,7 @@ pub enum MacroDefKind { BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>), BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>), BuiltInEager(EagerExpander, AstId<ast::Macro>), - ProcMacro(ProcMacroExpander, ProcMacroKind, AstId<ast::Fn>), + ProcMacro(CustomProcMacroExpander, ProcMacroKind, AstId<ast::Fn>), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -179,40 +231,39 @@ pub enum MacroCallKind { pub trait HirFileIdExt { /// Returns the original file of this macro call hierarchy. - fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; + fn original_file(self, db: &dyn ExpandDatabase) -> FileId; /// Returns the original file of this macro call hierarchy while going into the included file if /// one of the calls comes from an `include!``. - fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId; + fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> FileId; /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>>; + fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>; /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo>; + fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo>; - fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase) - -> Option<InFile<ast::Attr>>; + fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>; } impl HirFileIdExt for HirFileId { - fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { + fn original_file(self, db: &dyn ExpandDatabase) -> FileId { let mut file_id = self; loop { match file_id.repr() { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { - file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id(); + file_id = macro_call_id.lookup(db).kind.file_id(); } } } } - fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId { + fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> FileId { loop { match self.repr() { - base_db::span::HirFileIdRepr::FileId(id) => break id, - base_db::span::HirFileIdRepr::MacroFile(file) => { + HirFileIdRepr::FileId(id) => break id, + HirFileIdRepr::MacroFile(file) => { let loc = db.lookup_intern_macro_call(file.macro_call_id); if loc.def.is_include() { if let Some(eager) = &loc.eager { @@ -231,7 +282,7 @@ impl HirFileIdExt for HirFileId { } } - fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>> { + fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); loop { match call.file_id.repr() { @@ -246,14 +297,11 @@ impl HirFileIdExt for HirFileId { } /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> { + fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo> { Some(ExpansionInfo::new(db, self.macro_file()?)) } - fn as_builtin_derive_attr_node( - &self, - db: &dyn db::ExpandDatabase, - ) -> Option<InFile<ast::Attr>> { + fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let attr = match loc.def.kind { @@ -265,32 +313,32 @@ impl HirFileIdExt for HirFileId { } pub trait MacroFileIdExt { - fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; + fn expansion_level(self, db: &dyn ExpandDatabase) -> u32; /// If this is a macro call, returns the syntax node of the call. - fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode>; + fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>; - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo; + fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo; - fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; - fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool; + fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool; /// Return whether this file is an include macro - fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool; - fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_eager(&self, db: &dyn ExpandDatabase) -> bool; /// Return whether this file is an attr macro - fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool; /// Return whether this file is the pseudo expansion of the derive attribute. /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool; } impl MacroFileIdExt for MacroFileId { - fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> { + fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> { db.lookup_intern_macro_call(self.macro_call_id).to_node(db) } - fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { + fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 { let mut level = 0; let mut macro_file = self; loop { @@ -305,39 +353,39 @@ impl MacroFileIdExt for MacroFileId { } /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo { + fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo { ExpansionInfo::new(db, self) } - fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool { matches!( db.lookup_intern_macro_call(self.macro_call_id).def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) ) } - fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool { matches!( db.lookup_intern_macro_call(self.macro_call_id).def.kind, MacroDefKind::BuiltInDerive(..) ) } - fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool { db.lookup_intern_macro_call(self.macro_call_id).def.is_include() } - fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_eager(&self, db: &dyn ExpandDatabase) -> bool { let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) } - fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool { let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { .. }) } - fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); loc.def.is_attribute_derive() } @@ -346,15 +394,15 @@ impl MacroFileIdExt for MacroFileId { impl MacroDefId { pub fn as_lazy_macro( self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, krate: CrateId, kind: MacroCallKind, - call_site: SyntaxContextId, + call_site: Span, ) -> MacroCallId { - db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site }) + MacroCallLoc { def: self, krate, eager: None, kind, call_site }.intern(db) } - pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile<TextRange> { + pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> { match self.kind { MacroDefKind::Declarative(id) | MacroDefKind::BuiltIn(_, id) @@ -419,19 +467,7 @@ impl MacroDefId { } impl MacroCallLoc { - pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData { - let ast_id = self.kind.erased_ast_id(); - let file_id = self.kind.file_id(); - let range = db.ast_id_map(file_id).get_erased(ast_id).text_range(); - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range), - HirFileIdRepr::MacroFile(m) => { - db.parse_macro_expansion(m).value.1.span_at(range.start()) - } - } - } - - pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> { + pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> { match self.kind { MacroCallKind::FnLike { ast_id, .. } => { ast_id.with_value(ast_id.to_node(db).syntax().clone()) @@ -498,7 +534,7 @@ impl MacroCallKind { } } - fn erased_ast_id(&self) -> ErasedFileAstId { + pub fn erased_ast_id(&self) -> ErasedFileAstId { match *self { MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(), MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(), @@ -509,7 +545,7 @@ impl MacroCallKind { /// Returns the original file range that best describes the location of this macro call. /// /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives. - pub fn original_call_range_with_body(self, db: &dyn db::ExpandDatabase) -> FileRange { + pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id().repr() { @@ -534,7 +570,7 @@ impl MacroCallKind { /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives /// get only the specific derive that is being referred to. - pub fn original_call_range(self, db: &dyn db::ExpandDatabase) -> FileRange { + pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id().repr() { @@ -573,7 +609,7 @@ impl MacroCallKind { FileRange { range, file_id } } - fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile<Option<SyntaxNode>> { + fn arg(&self, db: &dyn ExpandDatabase) -> InFile<Option<SyntaxNode>> { match self { MacroCallKind::FnLike { ast_id, .. } => { ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone())) @@ -617,7 +653,7 @@ impl ExpansionInfo { /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. pub fn map_range_down<'a>( &'a self, - span: SpanData, + span: Span, ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> { let tokens = self .exp_map @@ -630,7 +666,7 @@ impl ExpansionInfo { /// Looks up the span at the given offset. pub fn span_for_offset( &self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, offset: TextSize, ) -> (FileRange, SyntaxContextId) { debug_assert!(self.expanded.value.text_range().contains(offset)); @@ -646,12 +682,12 @@ impl ExpansionInfo { /// Maps up the text range out of the expansion hierarchy back into the original file its from. pub fn map_node_range_up( &self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, range: TextRange, ) -> Option<(FileRange, SyntaxContextId)> { debug_assert!(self.expanded.value.text_range().contains_range(range)); let mut spans = self.exp_map.spans_for_range(range); - let SpanData { range, anchor, ctx } = spans.next()?; + let Span { range, anchor, ctx } = spans.next()?; let mut start = range.start(); let mut end = range.end(); @@ -676,7 +712,7 @@ impl ExpansionInfo { /// Maps up the text range out of the expansion into is macro call. pub fn map_range_up_once( &self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, token: TextRange, ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> { debug_assert!(self.expanded.value.text_range().contains_range(token)); @@ -705,7 +741,7 @@ impl ExpansionInfo { } } - pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { + pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let arg_tt = loc.kind.arg(db); @@ -718,7 +754,7 @@ impl ExpansionInfo { let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { ( Arc::new(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(loc.call_site), token_trees: Vec::new(), }), SyntaxFixupUndoInfo::NONE, diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index 9534b5039f6..30b8c189f52 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -9,10 +9,11 @@ use crate::{ db::ExpandDatabase, hygiene::{marks_rev, SyntaxContextExt, Transparency}, name::{known, AsName, Name}, - span::SpanMapRef, + span_map::SpanMapRef, }; -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; use smallvec::SmallVec; +use span::SyntaxContextId; use syntax::{ast, AstNode}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index a321f94cd75..3d8d01e2556 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -318,6 +318,10 @@ pub mod known { new_lower_hex, new_upper_hex, from_usize, + panic_2015, + panic_2021, + unreachable_2015, + unreachable_2021, // Components of known path (type name) Iterator, IntoIterator, @@ -384,6 +388,7 @@ pub mod known { log_syntax, module_path, option_env, + quote, std_panic, stringify, trace_macros, diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index de577796831..25c78fade82 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -1,18 +1,64 @@ //! Proc Macro Expander stub -use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; +use core::fmt; +use std::{panic::RefUnwindSafe, sync}; + +use base_db::{CrateId, Env}; +use rustc_hash::FxHashMap; +use span::Span; use stdx::never; +use syntax::SmolStr; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ProcMacroId(pub u32); + +#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] +pub enum ProcMacroKind { + CustomDerive, + FuncLike, + Attr, +} + +pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { + fn expand( + &self, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, + env: &Env, + def_site: Span, + call_site: Span, + mixed_site: Span, + ) -> Result<tt::Subtree, ProcMacroExpansionError>; +} + +#[derive(Debug)] +pub enum ProcMacroExpansionError { + Panic(String), + /// Things like "proc macro server was killed by OOM". + System(String), +} + +pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>; + +pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>; + +#[derive(Debug, Clone)] +pub struct ProcMacro { + pub name: SmolStr, + pub kind: ProcMacroKind, + pub expander: sync::Arc<dyn ProcMacroExpander>, +} + #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub struct ProcMacroExpander { +pub struct CustomProcMacroExpander { proc_macro_id: ProcMacroId, } const DUMMY_ID: u32 = !0; -impl ProcMacroExpander { +impl CustomProcMacroExpander { pub fn new(proc_macro_id: ProcMacroId) -> Self { assert_ne!(proc_macro_id.0, DUMMY_ID); Self { proc_macro_id } @@ -33,9 +79,9 @@ impl ProcMacroExpander { calling_crate: CrateId, tt: &tt::Subtree, attr_arg: Option<&tt::Subtree>, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, + def_site: Span, + call_site: Span, + mixed_site: Span, ) -> ExpandResult<tt::Subtree> { match self.proc_macro_id { ProcMacroId(DUMMY_ID) => ExpandResult::new( diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index acbde26c8dd..9bdd75f9d22 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -1,6 +1,8 @@ //! A simplified version of quote-crate like quasi quote macro -use base_db::span::SpanData; +use span::Span; + +use crate::name::Name; // A helper macro quote macro // FIXME: @@ -130,12 +132,12 @@ macro_rules! quote { } pub(crate) trait IntoTt { - fn to_subtree(self, span: SpanData) -> crate::tt::Subtree; + fn to_subtree(self, span: Span) -> crate::tt::Subtree; fn to_tokens(self) -> Vec<crate::tt::TokenTree>; } impl IntoTt for Vec<crate::tt::TokenTree> { - fn to_subtree(self, span: SpanData) -> crate::tt::Subtree { + fn to_subtree(self, span: Span) -> crate::tt::Subtree { crate::tt::Subtree { delimiter: crate::tt::Delimiter::invisible_spanned(span), token_trees: self, @@ -148,7 +150,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> { } impl IntoTt for crate::tt::Subtree { - fn to_subtree(self, _: SpanData) -> crate::tt::Subtree { + fn to_subtree(self, _: Span) -> crate::tt::Subtree { self } @@ -158,39 +160,39 @@ impl IntoTt for crate::tt::Subtree { } pub(crate) trait ToTokenTree { - fn to_token(self, span: SpanData) -> crate::tt::TokenTree; + fn to_token(self, span: Span) -> crate::tt::TokenTree; } impl ToTokenTree for crate::tt::TokenTree { - fn to_token(self, _: SpanData) -> crate::tt::TokenTree { + fn to_token(self, _: Span) -> crate::tt::TokenTree { self } } impl ToTokenTree for &crate::tt::TokenTree { - fn to_token(self, _: SpanData) -> crate::tt::TokenTree { + fn to_token(self, _: Span) -> crate::tt::TokenTree { self.clone() } } impl ToTokenTree for crate::tt::Subtree { - fn to_token(self, _: SpanData) -> crate::tt::TokenTree { + fn to_token(self, _: Span) -> crate::tt::TokenTree { self.into() } } macro_rules! impl_to_to_tokentrees { - ($($span:ident: $ty:ty => $this:ident $im:block);*) => { + ($($span:ident: $ty:ty => $this:ident $im:block;)*) => { $( impl ToTokenTree for $ty { - fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { + fn to_token($this, $span: Span) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.into(); leaf.into() } } impl ToTokenTree for &$ty { - fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { + fn to_token($this, $span: Span) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.clone().into(); leaf.into() } @@ -209,20 +211,19 @@ impl_to_to_tokentrees! { _span: crate::tt::Ident => self { self }; _span: crate::tt::Punct => self { self }; span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}; - span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}} + span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}; + span: Name => self { crate::tt::Ident{text: self.to_smol_str(), span}}; } #[cfg(test)] mod tests { use crate::tt; - use base_db::{ - span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - FileId, - }; + use base_db::FileId; use expect_test::expect; + use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use syntax::{TextRange, TextSize}; - const DUMMY: tt::SpanData = tt::SpanData { + const DUMMY: tt::Span = tt::Span { range: TextRange::empty(TextSize::new(0)), anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, ctx: SyntaxContextId::ROOT, diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs deleted file mode 100644 index fe476a40feb..00000000000 --- a/crates/hir-expand/src/span.rs +++ /dev/null @@ -1,124 +0,0 @@ -//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well -//! as associating spans with text ranges in a particular file. -use base_db::{ - span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - FileId, -}; -use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize}; -use triomphe::Arc; - -use crate::db::ExpandDatabase; - -pub type ExpansionSpanMap = mbe::SpanMap<SpanData>; - -/// Spanmap for a macro file or a real file -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum SpanMap { - /// Spanmap for a macro file - ExpansionSpanMap(Arc<ExpansionSpanMap>), - /// Spanmap for a real file - RealSpanMap(Arc<RealSpanMap>), -} - -#[derive(Copy, Clone)] -pub enum SpanMapRef<'a> { - /// Spanmap for a macro file - ExpansionSpanMap(&'a ExpansionSpanMap), - /// Spanmap for a real file - RealSpanMap(&'a RealSpanMap), -} - -impl mbe::SpanMapper<SpanData> for SpanMap { - fn span_for(&self, range: TextRange) -> SpanData { - self.span_for_range(range) - } -} -impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> { - fn span_for(&self, range: TextRange) -> SpanData { - self.span_for_range(range) - } -} -impl mbe::SpanMapper<SpanData> for RealSpanMap { - fn span_for(&self, range: TextRange) -> SpanData { - self.span_for_range(range) - } -} - -impl SpanMap { - pub fn span_for_range(&self, range: TextRange) -> SpanData { - match self { - Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), - Self::RealSpanMap(span_map) => span_map.span_for_range(range), - } - } - - pub fn as_ref(&self) -> SpanMapRef<'_> { - match self { - Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map), - Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), - } - } -} - -impl SpanMapRef<'_> { - pub fn span_for_range(self, range: TextRange) -> SpanData { - match self { - Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), - Self::RealSpanMap(span_map) => span_map.span_for_range(range), - } - } -} - -#[derive(PartialEq, Eq, Hash, Debug)] -pub struct RealSpanMap { - file_id: FileId, - /// Invariant: Sorted vec over TextSize - // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? - pairs: Box<[(TextSize, ErasedFileAstId)]>, - end: TextSize, -} - -impl RealSpanMap { - /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). - pub fn absolute(file_id: FileId) -> Self { - RealSpanMap { - file_id, - pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]), - end: TextSize::new(!0), - } - } - - pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self { - let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]; - let ast_id_map = db.ast_id_map(file_id.into()); - let tree = db.parse(file_id).tree(); - pairs - .extend(tree.items().map(|item| { - (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase()) - })); - RealSpanMap { - file_id, - pairs: pairs.into_boxed_slice(), - end: tree.syntax().text_range().end(), - } - } - - pub fn span_for_range(&self, range: TextRange) -> SpanData { - assert!( - range.end() <= self.end, - "range {range:?} goes beyond the end of the file {:?}", - self.end - ); - let start = range.start(); - let idx = self - .pairs - .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less)) - .unwrap_err(); - let (offset, ast_id) = self.pairs[idx - 1]; - SpanData { - range: range - offset, - anchor: SpanAnchor { file_id: self.file_id, ast_id }, - ctx: SyntaxContextId::ROOT, - } - } -} diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs new file mode 100644 index 00000000000..4ec6e657f9e --- /dev/null +++ b/crates/hir-expand/src/span_map.rs @@ -0,0 +1,65 @@ +//! Span maps for real files and macro expansions. +use span::Span; +use syntax::TextRange; +use triomphe::Arc; + +pub use span::RealSpanMap; + +pub type ExpansionSpanMap = span::SpanMap<Span>; + +/// Spanmap for a macro file or a real file +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum SpanMap { + /// Spanmap for a macro file + ExpansionSpanMap(Arc<ExpansionSpanMap>), + /// Spanmap for a real file + RealSpanMap(Arc<RealSpanMap>), +} + +#[derive(Copy, Clone)] +pub enum SpanMapRef<'a> { + /// Spanmap for a macro file + ExpansionSpanMap(&'a ExpansionSpanMap), + /// Spanmap for a real file + RealSpanMap(&'a RealSpanMap), +} + +impl mbe::SpanMapper<Span> for SpanMap { + fn span_for(&self, range: TextRange) -> Span { + self.span_for_range(range) + } +} +impl mbe::SpanMapper<Span> for SpanMapRef<'_> { + fn span_for(&self, range: TextRange) -> Span { + self.span_for_range(range) + } +} + +impl SpanMap { + pub fn span_for_range(&self, range: TextRange) -> Span { + match self { + // FIXME: Is it correct for us to only take the span at the start? This feels somewhat + // wrong. The context will be right, but the range could be considered wrong. See + // https://github.com/rust-lang/rust/issues/23480, we probably want to fetch the span at + // the start and end, then merge them like rustc does in `Span::to + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } + + pub fn as_ref(&self) -> SpanMapRef<'_> { + match self { + Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map), + Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), + } + } +} + +impl SpanMapRef<'_> { + pub fn span_for_range(self, range: TextRange) -> Span { + match self { + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } +} diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index bbcb76a43ff..45e69c59d71 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -14,14 +14,14 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" itertools.workspace = true -arrayvec = "0.7.2" +arrayvec.workspace = true bitflags.workspace = true smallvec.workspace = true ena = "0.14.0" either.workspace = true oorandom = "11.1.3" tracing.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true scoped-tls = "1.0.0" chalk-solve = { version = "0.95.0", default-features = false } chalk-ir = "0.95.0" @@ -54,6 +54,7 @@ project-model = { path = "../project-model" } # local deps test-utils.workspace = true +test-fixture.workspace = true [features] in-rust-tree = ["rustc-dependencies/in-rust-tree"] diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index b395e7f4a81..ac82208708a 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -1,6 +1,7 @@ -use base_db::{fixture::WithFixture, FileId}; +use base_db::FileId; use chalk_ir::Substitution; use hir_def::db::DefDatabase; +use test_fixture::WithFixture; use test_utils::skip_slow_tests; use crate::{ diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 8262edec22c..e295dd8d4e1 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -113,7 +113,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer // FIXME(const-generic-body): We should not get the return type in this way. ctx.return_ty = c .lookup(db.upcast()) - .thing + .expected_ty .box_any() .downcast::<InTypeConstIdMetadata>() .unwrap() @@ -1200,6 +1200,12 @@ impl<'a> InferenceContext<'a> { path: &ModPath, ) -> (Ty, Option<VariantId>) { let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0); + let ty = match ty.kind(Interner) { + TyKind::Alias(AliasTy::Projection(proj_ty)) => { + self.db.normalize_projection(proj_ty.clone(), self.table.trait_env.clone()) + } + _ => ty, + }; match remaining { None => { let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id { diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index 7ff12e5b7f8..acdb540289d 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -262,7 +262,7 @@ impl InferenceContext<'_> { fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty { let mut expected = self.resolve_ty_shallow(expected); - if is_non_ref_pat(self.body, pat) { + if self.is_non_ref_pat(self.body, pat) { let mut pat_adjustments = Vec::new(); while let Some((inner, _lifetime, mutability)) = expected.as_reference() { pat_adjustments.push(expected.clone()); @@ -496,24 +496,28 @@ impl InferenceContext<'_> { self.infer_expr(expr, &Expectation::has_type(expected.clone())) } -} -fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool { - match &body[pat] { - Pat::Tuple { .. } - | Pat::TupleStruct { .. } - | Pat::Record { .. } - | Pat::Range { .. } - | Pat::Slice { .. } => true, - Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)), - // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented. - Pat::Path(..) => true, - Pat::ConstBlock(..) => true, - Pat::Lit(expr) => !matches!( - body[*expr], - Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..)) - ), - Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false, + fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool { + match &body[pat] { + Pat::Tuple { .. } + | Pat::TupleStruct { .. } + | Pat::Record { .. } + | Pat::Range { .. } + | Pat::Slice { .. } => true, + Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)), + Pat::Path(p) => { + let v = self.resolve_value_path_inner(p, pat.into()); + v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_))) + } + Pat::ConstBlock(..) => false, + Pat::Lit(expr) => !matches!( + body[*expr], + Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..)) + ), + Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => { + false + } + } } } diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index fcfe1a3b5cf..49fb78f67a6 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -40,33 +40,7 @@ impl InferenceContext<'_> { } fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> { - let (value, self_subst) = if let Some(type_ref) = path.type_anchor() { - let last = path.segments().last()?; - - // Don't use `self.make_ty()` here as we need `orig_ns`. - let ctx = - crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); - let (ty, orig_ns) = ctx.lower_ty_ext(type_ref); - let ty = self.table.insert_type_vars(ty); - let ty = self.table.normalize_associated_types_in(ty); - - let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); - let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty); - let ty = self.table.insert_type_vars(ty); - let ty = self.table.normalize_associated_types_in(ty); - self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? - } else { - // FIXME: report error, unresolved first path segment - let value_or_partial = - self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?; - - match value_or_partial { - ResolveValueResult::ValueNs(it, _) => (it, None), - ResolveValueResult::Partial(def, remaining_index, _) => self - .resolve_assoc_item(def, path, remaining_index, id) - .map(|(it, substs)| (it, Some(substs)))?, - } - }; + let (value, self_subst) = self.resolve_value_path_inner(path, id)?; let value_def = match value { ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) { @@ -144,6 +118,41 @@ impl InferenceContext<'_> { Some(ValuePathResolution::GenericDef(value_def, generic_def, substs)) } + pub(super) fn resolve_value_path_inner( + &mut self, + path: &Path, + id: ExprOrPatId, + ) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> { + let (value, self_subst) = if let Some(type_ref) = path.type_anchor() { + let last = path.segments().last()?; + + // Don't use `self.make_ty()` here as we need `orig_ns`. + let ctx = + crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); + let (ty, orig_ns) = ctx.lower_ty_ext(type_ref); + let ty = self.table.insert_type_vars(ty); + let ty = self.table.normalize_associated_types_in(ty); + + let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); + let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty); + let ty = self.table.insert_type_vars(ty); + let ty = self.table.normalize_associated_types_in(ty); + self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? + } else { + // FIXME: report error, unresolved first path segment + let value_or_partial = + self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?; + + match value_or_partial { + ResolveValueResult::ValueNs(it, _) => (it, None), + ResolveValueResult::Partial(def, remaining_index, _) => self + .resolve_assoc_item(def, path, remaining_index, id) + .map(|(it, substs)| (it, Some(substs)))?, + } + }; + Some((value, self_subst)) + } + fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) { let predicates = self.db.generic_predicates(def); for predicate in predicates.iter() { diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index 5e3a86c80e3..9937113685c 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -1,9 +1,9 @@ use std::collections::HashMap; -use base_db::fixture::WithFixture; use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; +use test_fixture::WithFixture; use triomphe::Arc; use crate::{ diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index c86fe9adff8..97c4a741ff2 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -113,7 +113,9 @@ pub struct TyLoweringContext<'a> { pub db: &'a dyn HirDatabase, resolver: &'a Resolver, in_binders: DebruijnIndex, - owner: TypeOwnerId, + // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases + // where expected + owner: Option<TypeOwnerId>, /// Note: Conceptually, it's thinkable that we could be in a location where /// some type params should be represented as placeholders, and others /// should be converted to variables. I think in practice, this isn't @@ -127,6 +129,14 @@ pub struct TyLoweringContext<'a> { impl<'a> TyLoweringContext<'a> { pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self { + Self::new_maybe_unowned(db, resolver, Some(owner)) + } + + pub fn new_maybe_unowned( + db: &'a dyn HirDatabase, + resolver: &'a Resolver, + owner: Option<TypeOwnerId>, + ) -> Self { let impl_trait_mode = ImplTraitLoweringState::Disallowed; let type_param_mode = ParamLoweringMode::Placeholder; let in_binders = DebruijnIndex::INNERMOST; @@ -213,10 +223,11 @@ impl<'a> TyLoweringContext<'a> { } pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const { + let Some(owner) = self.owner else { return unknown_const(const_type) }; const_or_path_to_chalk( self.db, self.resolver, - self.owner, + owner, const_type, const_ref, self.type_param_mode, @@ -1768,10 +1779,11 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> { let resolver = t.resolver(db.upcast()); let ctx = TyLoweringContext::new(db, &resolver, t.into()) .with_type_param_mode(ParamLoweringMode::Variable); - if db.type_alias_data(t).is_extern { + let type_alias_data = db.type_alias_data(t); + if type_alias_data.is_extern { Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)) } else { - let type_ref = &db.type_alias_data(t).type_ref; + let type_ref = &type_alias_data.type_ref; let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error)); make_binders(db, &generics, inner) } @@ -2042,7 +2054,7 @@ pub(crate) fn const_or_path_to_chalk( .intern_in_type_const(InTypeConstLoc { id: it, owner, - thing: Box::new(InTypeConstIdMetadata(expected_ty.clone())), + expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())), }) .into(); intern_const_scalar( diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs index ff30dc6dade..b0f929279a5 100644 --- a/crates/hir-ty/src/mir/eval/tests.rs +++ b/crates/hir-ty/src/mir/eval/tests.rs @@ -1,6 +1,7 @@ -use base_db::{fixture::WithFixture, FileId}; +use base_db::FileId; use hir_def::db::DefDatabase; use syntax::{TextRange, TextSize}; +use test_fixture::WithFixture; use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution}; diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index 1446e83fa88..c8cc61cc21b 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -12,7 +12,7 @@ mod diagnostics; use std::{collections::HashMap, env}; -use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt}; +use base_db::{FileRange, SourceDatabaseExt}; use expect_test::Expect; use hir_def::{ body::{Body, BodySourceMap, SyntheticSyntax}, @@ -30,6 +30,7 @@ use syntax::{ ast::{self, AstNode, HasName}, SyntaxNode, }; +use test_fixture::WithFixture; use tracing_subscriber::{layer::SubscriberExt, Registry}; use tracing_tree::HierarchicalLayer; use triomphe::Arc; diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs index bb15ca8c436..82d934009f3 100644 --- a/crates/hir-ty/src/tests/incremental.rs +++ b/crates/hir-ty/src/tests/incremental.rs @@ -1,4 +1,5 @@ -use base_db::{fixture::WithFixture, SourceDatabaseExt}; +use base_db::SourceDatabaseExt; +use test_fixture::WithFixture; use triomphe::Arc; use crate::{db::HirDatabase, test_db::TestDB}; @@ -9,11 +10,10 @@ use super::visit_module; fn typing_whitespace_inside_a_function_should_not_invalidate_types() { let (mut db, pos) = TestDB::with_position( " - //- /lib.rs - fn foo() -> i32 { - $01 + 1 - } - ", +//- /lib.rs +fn foo() -> i32 { + $01 + 1 +}", ); { let events = db.log_executed(|| { @@ -27,12 +27,11 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { } let new_text = " - fn foo() -> i32 { - 1 - + - 1 - } - "; +fn foo() -> i32 { + 1 + + + 1 +}"; db.set_file_text(pos.file_id, Arc::from(new_text)); @@ -47,3 +46,55 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { assert!(!format!("{events:?}").contains("infer"), "{events:#?}") } } + +#[test] +fn typing_inside_a_function_should_not_invalidate_types_in_another() { + let (mut db, pos) = TestDB::with_position( + " +//- /lib.rs +fn foo() -> f32 { + 1.0 + 2.0 +} +fn bar() -> i32 { + $01 + 1 +} +fn baz() -> i32 { + 1 + 1 +}", + ); + { + let events = db.log_executed(|| { + let module = db.module_for_file(pos.file_id); + let crate_def_map = module.def_map(&db); + visit_module(&db, &crate_def_map, module.local_id, &mut |def| { + db.infer(def); + }); + }); + assert!(format!("{events:?}").contains("infer")) + } + + let new_text = " +fn foo() -> f32 { + 1.0 + 2.0 +} +fn bar() -> i32 { + 53 +} +fn baz() -> i32 { + 1 + 1 +} +"; + + db.set_file_text(pos.file_id, Arc::from(new_text)); + + { + let events = db.log_executed(|| { + let module = db.module_for_file(pos.file_id); + let crate_def_map = module.def_map(&db); + visit_module(&db, &crate_def_map, module.local_id, &mut |def| { + db.infer(def); + }); + }); + assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}") + } +} diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 5d7bab09c26..548f782f4f2 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1153,3 +1153,75 @@ fn main() { "#, ); } + +#[test] +fn generic_alias_with_qualified_path() { + check_types( + r#" +type Wrap<T> = T; + +struct S; + +trait Schematic { + type Props; +} + +impl Schematic for S { + type Props = X; +} + +enum X { + A { cool: u32, stuff: u32 }, + B, +} + +fn main() { + let wrapped = Wrap::<<S as Schematic>::Props>::A { + cool: 100, + stuff: 100, + }; + + if let Wrap::<<S as Schematic>::Props>::A { cool, ..} = &wrapped {} + //^^^^ &u32 +} +"#, + ); +} + +#[test] +fn type_mismatch_pat_const_reference() { + check_no_mismatches( + r#" +const TEST_STR: &'static str = "abcd"; + +fn main() { + let s = "abcd"; + match s { + TEST_STR => (), + _ => (), + } +} + + "#, + ); + check( + r#" +struct Foo<T>(T); + +impl<T> Foo<T> { + const TEST_I32_REF: &'static i32 = &3; + const TEST_I32: i32 = 3; +} + +fn main() { + match &6 { + Foo::<i32>::TEST_I32_REF => (), + Foo::<i32>::TEST_I32 => (), + //^^^^^^^^^^^^^^^^^^^^ expected &i32, got i32 + _ => (), + } +} + + "#, + ); +} diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 4c1dfbc294e..063365a16bc 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -12,9 +12,9 @@ rust-version.workspace = true doctest = false [dependencies] -rustc-hash = "1.1.0" +rustc-hash.workspace = true either.workspace = true -arrayvec = "0.7.2" +arrayvec.workspace = true itertools.workspace = true smallvec.workspace = true triomphe.workspace = true diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 18585335318..d60d20f5b7e 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -11,7 +11,7 @@ use hir_def::{ }; use hir_expand::{ name::Name, - span::{RealSpanMap, SpanMapRef}, + span_map::{RealSpanMap, SpanMapRef}, }; use hir_ty::db::HirDatabase; use syntax::{ast, AstNode}; diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index d98e3decd21..7204868464b 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -24,6 +24,6 @@ pub use hir_def::db::{ pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, - ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, + ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacrosQuery, RealSpanMapQuery, }; pub use hir_ty::db::*; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index e0230fa3761..85762603ed1 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -37,7 +37,7 @@ mod display; use std::{iter, mem::discriminant, ops::ControlFlow}; use arrayvec::ArrayVec; -use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind}; +use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId}; use either::Either; use hir_def::{ body::{BodyDiagnostic, SyntheticSyntax}, @@ -47,7 +47,6 @@ use hir_def::{ item_tree::ItemTreeNode, lang_item::LangItemTarget, layout::{self, ReprOptions, TargetDataLayout}, - macro_id_to_def_id, nameres::{self, diagnostics::DefDiagnostic}, path::ImportAlias, per_ns::PerNs, @@ -59,7 +58,7 @@ use hir_def::{ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; -use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind}; +use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind}; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, @@ -125,8 +124,10 @@ pub use { }, hir_expand::{ attrs::{Attr, AttrId}, + change::Change, hygiene::{marks_rev, SyntaxContextExt}, name::{known, Name}, + proc_macro::ProcMacros, tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, MacroFileIdExt, }, @@ -146,7 +147,7 @@ use { hir_def::path::Path, hir_expand::{ name::AsName, - span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, + span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, }, }; @@ -808,7 +809,7 @@ impl Module { } fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) { - let id = macro_id_to_def_id(db.upcast(), m.id); + let id = db.macro_def(m.id); if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { if let Some(e) = expander.mac.err() { let Some(ast) = id.ast_id().left() else { @@ -2784,9 +2785,13 @@ impl AsAssocItem for DefWithBody { } } -fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem> +fn as_assoc_item<'db, ID, DEF, CTOR, AST>( + db: &(dyn HirDatabase + 'db), + ctor: CTOR, + id: ID, +) -> Option<AssocItem> where - ID: Lookup<Data = AssocItemLoc<AST>>, + ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<AST>>, DEF: From<ID>, CTOR: FnOnce(DEF) -> AssocItem, AST: ItemTreeNode, @@ -3520,7 +3525,7 @@ impl Impl { let src = self.source(db)?; let macro_file = src.file_id.macro_file()?; - let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let loc = macro_file.macro_call_id.lookup(db.upcast()); let (derive_attr, derive_index) = match loc.kind { MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { let module_id = self.id.lookup(db.upcast()).container; diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index dcf8ba27a68..95f6cb9bd42 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -13,7 +13,6 @@ use either::Either; use hir_def::{ hir::Expr, lower::LowerCtx, - macro_id_to_def_id, nameres::MacroSubNs, resolver::{self, HasResolver, Resolver, TypeNs}, type_ref::Mutability, @@ -341,9 +340,7 @@ impl<'db> SemanticsImpl<'db> { let macro_call = InFile::new(file_id, actual_macro_call); let krate = resolver.krate(); let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { - resolver - .resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang)) - .map(|(it, _)| macro_id_to_def_id(self.db.upcast(), it)) + resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang)) })?; hir_expand::db::expand_speculative( self.db.upcast(), @@ -512,8 +509,7 @@ impl<'db> SemanticsImpl<'db> { } /// Descend the token into its macro call if it is part of one, returning the tokens in the - /// expansion that it is associated with. If `offset` points into the token's range, it will - /// be considered for the mapping in case of inline format args. + /// expansion that it is associated with. pub fn descend_into_macros( &self, mode: DescendPreference, @@ -674,7 +670,7 @@ impl<'db> SemanticsImpl<'db> { _ => 0, }; // FIXME: here, the attribute's text range is used to strip away all - // entries from the start of the attribute "list" up the the invoking + // entries from the start of the attribute "list" up the invoking // attribute. But in // ``` // mod foo { @@ -850,7 +846,7 @@ impl<'db> SemanticsImpl<'db> { /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> { - self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( + self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map( |InRealFile { file_id, value }| { self.cache(find_root(value.syntax()), file_id.into()); value @@ -948,10 +944,10 @@ impl<'db> SemanticsImpl<'db> { pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> { let analyze = self.analyze(ty.syntax())?; let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id); - let ty = hir_ty::TyLoweringContext::new( + let ty = hir_ty::TyLoweringContext::new_maybe_unowned( self.db, &analyze.resolver, - analyze.resolver.module().into(), + analyze.resolver.type_owner(), ) .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone())); Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 73db6f8f0b8..54b4d81012f 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -16,7 +16,6 @@ use hir_def::{ hir::{BindingId, ExprId, Pat, PatId}, lang_item::LangItem, lower::LowerCtx, - macro_id_to_def_id, nameres::MacroSubNs, path::{ModPath, Path, PathKind}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, @@ -771,9 +770,7 @@ impl SourceAnalyzer { ) -> Option<MacroFileId> { let krate = self.resolver.krate(); let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { - self.resolver - .resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang)) - .map(|(it, _)| macro_id_to_def_id(db.upcast(), it)) + self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang)) })?; // why the 64? Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) @@ -1040,8 +1037,9 @@ fn resolve_hir_path_( let types = || { let (ty, unresolved) = match path.type_anchor() { Some(type_ref) => { - let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into()) - .lower_ty_ext(type_ref); + let (_, res) = + TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner()) + .lower_ty_ext(type_ref); res.map(|ty_ns| (ty_ns, path.segments().first())) } None => { @@ -1162,9 +1160,40 @@ fn resolve_hir_path_qualifier( resolver: &Resolver, path: &Path, ) -> Option<PathResolution> { - resolver - .resolve_path_in_type_ns_fully(db.upcast(), &path) - .map(|ty| match ty { + (|| { + let (ty, unresolved) = match path.type_anchor() { + Some(type_ref) => { + let (_, res) = + TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner()) + .lower_ty_ext(type_ref); + res.map(|ty_ns| (ty_ns, path.segments().first())) + } + None => { + let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?; + match remaining_idx { + Some(remaining_idx) => { + if remaining_idx + 1 == path.segments().len() { + Some((ty, path.segments().last())) + } else { + None + } + } + None => Some((ty, None)), + } + } + }?; + + // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type + // within the trait's associated types. + if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { + if let Some(type_alias_id) = + db.trait_data(trait_id).associated_type_by_name(unresolved.name) + { + return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); + } + } + + let res = match ty { TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()), TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => { @@ -1175,11 +1204,28 @@ fn resolve_hir_path_qualifier( TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()), - }) - .or_else(|| { - resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()?) - .take_types() - .map(|it| PathResolution::Def(it.into())) - }) + }; + match unresolved { + Some(unresolved) => resolver + .generic_def() + .and_then(|def| { + hir_ty::associated_type_shorthand_candidates( + db, + def, + res.in_type_ns()?, + |name, id| (name == unresolved.name).then_some(id), + ) + }) + .map(TypeAlias::from) + .map(Into::into) + .map(PathResolution::Def), + None => Some(res), + } + })() + .or_else(|| { + resolver + .resolve_module_path_in_items(db.upcast(), path.mod_path()?) + .take_types() + .map(|it| PathResolution::Def(it.into())) + }) } diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index a2a30edeb03..ad214e1b80e 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -2,6 +2,7 @@ use base_db::FileRange; use hir_def::{ + db::DefDatabase, item_scope::ItemInNs, src::{HasChildSource, HasSource}, AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId, @@ -274,9 +275,9 @@ impl<'a> SymbolCollector<'a> { } } - fn push_decl<L>(&mut self, id: L, is_assoc: bool) + fn push_decl<'db, L>(&mut self, id: L, is_assoc: bool) where - L: Lookup + Into<ModuleDefId>, + L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>, <L as Lookup>::Data: HasSource, <<L as Lookup>::Data as HasSource>::Value: HasName, { diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml index a622ec1a953..69d98fd78fd 100644 --- a/crates/ide-assists/Cargo.toml +++ b/crates/ide-assists/Cargo.toml @@ -31,6 +31,7 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true [features] diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs index f508c42c53e..1f785b5d0a8 100644 --- a/crates/ide-assists/src/handlers/auto_import.rs +++ b/crates/ide-assists/src/handlers/auto_import.rs @@ -281,11 +281,8 @@ mod tests { use super::*; use hir::Semantics; - use ide_db::{ - assists::AssistResolveStrategy, - base_db::{fixture::WithFixture, FileRange}, - RootDatabase, - }; + use ide_db::{assists::AssistResolveStrategy, base_db::FileRange, RootDatabase}; + use test_fixture::WithFixture; use crate::tests::{ check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target, diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index 0f2d1057c0a..b7b00e7ed06 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -16,11 +16,14 @@ use syntax::{ edit_in_place::{AttrsOwnerEdit, Indent}, make, HasName, }, - ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, + AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, }; use text_edit::TextRange; -use crate::assist_context::{AssistContext, Assists}; +use crate::{ + assist_context::{AssistContext, Assists}, + utils, +}; // Assist: bool_to_enum // @@ -73,7 +76,7 @@ pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let usages = definition.usages(&ctx.sema).all(); add_enum_def(edit, ctx, &usages, target_node, &target_module); - replace_usages(edit, ctx, &usages, definition, &target_module); + replace_usages(edit, ctx, usages, definition, &target_module); }, ) } @@ -169,8 +172,8 @@ fn replace_bool_expr(edit: &mut SourceChangeBuilder, expr: ast::Expr) { /// Converts an expression of type `bool` to one of the new enum type. fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr { - let true_expr = make::expr_path(make::path_from_text("Bool::True")).clone_for_update(); - let false_expr = make::expr_path(make::path_from_text("Bool::False")).clone_for_update(); + let true_expr = make::expr_path(make::path_from_text("Bool::True")); + let false_expr = make::expr_path(make::path_from_text("Bool::False")); if let ast::Expr::Literal(literal) = &expr { match literal.kind() { @@ -184,7 +187,6 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr { make::tail_only_block_expr(true_expr), Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))), ) - .clone_for_update() } } @@ -192,21 +194,19 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr { fn replace_usages( edit: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, - usages: &UsageSearchResult, + usages: UsageSearchResult, target_definition: Definition, target_module: &hir::Module, ) { - for (file_id, references) in usages.iter() { - edit.edit_file(*file_id); + for (file_id, references) in usages { + edit.edit_file(file_id); - let refs_with_imports = - augment_references_with_imports(edit, ctx, references, target_module); + let refs_with_imports = augment_references_with_imports(ctx, references, target_module); refs_with_imports.into_iter().rev().for_each( - |FileReferenceWithImport { range, old_name, new_name, import_data }| { + |FileReferenceWithImport { range, name, import_data }| { // replace the usages in patterns and expressions - if let Some(ident_pat) = old_name.syntax().ancestors().find_map(ast::IdentPat::cast) - { + if let Some(ident_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) { cov_mark::hit!(replaces_record_pat_shorthand); let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local); @@ -214,36 +214,35 @@ fn replace_usages( replace_usages( edit, ctx, - &def.usages(&ctx.sema).all(), + def.usages(&ctx.sema).all(), target_definition, target_module, ) } - } else if let Some(initializer) = find_assignment_usage(&new_name) { + } else if let Some(initializer) = find_assignment_usage(&name) { cov_mark::hit!(replaces_assignment); replace_bool_expr(edit, initializer); - } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&new_name) { + } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&name) { cov_mark::hit!(replaces_negation); edit.replace( prefix_expr.syntax().text_range(), format!("{} == Bool::False", inner_expr), ); - } else if let Some((record_field, initializer)) = old_name + } else if let Some((record_field, initializer)) = name .as_name_ref() .and_then(ast::RecordExprField::for_field_name) .and_then(|record_field| ctx.sema.resolve_record_field(&record_field)) .and_then(|(got_field, _, _)| { - find_record_expr_usage(&new_name, got_field, target_definition) + find_record_expr_usage(&name, got_field, target_definition) }) { cov_mark::hit!(replaces_record_expr); - let record_field = edit.make_mut(record_field); let enum_expr = bool_expr_to_enum_expr(initializer); - record_field.replace_expr(enum_expr); - } else if let Some(pat) = find_record_pat_field_usage(&old_name) { + utils::replace_record_field_expr(ctx, edit, record_field, enum_expr); + } else if let Some(pat) = find_record_pat_field_usage(&name) { match pat { ast::Pat::IdentPat(ident_pat) => { cov_mark::hit!(replaces_record_pat); @@ -253,7 +252,7 @@ fn replace_usages( replace_usages( edit, ctx, - &def.usages(&ctx.sema).all(), + def.usages(&ctx.sema).all(), target_definition, target_module, ) @@ -270,40 +269,44 @@ fn replace_usages( } _ => (), } - } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&new_name) - { + } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&name) { edit.replace(ty_annotation.syntax().text_range(), "Bool"); replace_bool_expr(edit, initializer); - } else if let Some(receiver) = find_method_call_expr_usage(&new_name) { + } else if let Some(receiver) = find_method_call_expr_usage(&name) { edit.replace( receiver.syntax().text_range(), format!("({} == Bool::True)", receiver), ); - } else if new_name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() { + } else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() { // for any other usage in an expression, replace it with a check that it is the true variant - if let Some((record_field, expr)) = new_name - .as_name_ref() - .and_then(ast::RecordExprField::for_field_name) - .and_then(|record_field| { - record_field.expr().map(|expr| (record_field, expr)) - }) + if let Some((record_field, expr)) = + name.as_name_ref().and_then(ast::RecordExprField::for_field_name).and_then( + |record_field| record_field.expr().map(|expr| (record_field, expr)), + ) { - record_field.replace_expr( + utils::replace_record_field_expr( + ctx, + edit, + record_field, make::expr_bin_op( expr, ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }), make::expr_path(make::path_from_text("Bool::True")), - ) - .clone_for_update(), + ), ); } else { - edit.replace(range, format!("{} == Bool::True", new_name.text())); + edit.replace(range, format!("{} == Bool::True", name.text())); } } // add imports across modules where needed if let Some((import_scope, path)) = import_data { - insert_use(&import_scope, path, &ctx.config.insert_use); + let scope = match import_scope.clone() { + ImportScope::File(it) => ImportScope::File(edit.make_mut(it)), + ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)), + ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)), + }; + insert_use(&scope, path, &ctx.config.insert_use); } }, ) @@ -312,37 +315,31 @@ fn replace_usages( struct FileReferenceWithImport { range: TextRange, - old_name: ast::NameLike, - new_name: ast::NameLike, + name: ast::NameLike, import_data: Option<(ImportScope, ast::Path)>, } fn augment_references_with_imports( - edit: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, - references: &[FileReference], + references: Vec<FileReference>, target_module: &hir::Module, ) -> Vec<FileReferenceWithImport> { let mut visited_modules = FxHashSet::default(); references - .iter() + .into_iter() .filter_map(|FileReference { range, name, .. }| { let name = name.clone().into_name_like()?; - ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module())) + ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module())) }) .map(|(range, name, ref_module)| { - let old_name = name.clone(); - let new_name = edit.make_mut(name.clone()); - // if the referenced module is not the same as the target one and has not been seen before, add an import let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module && !visited_modules.contains(&ref_module) { visited_modules.insert(ref_module); - let import_scope = - ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema); + let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema); let path = ref_module .find_use_path_prefixed( ctx.sema.db, @@ -360,7 +357,7 @@ fn augment_references_with_imports( None }; - FileReferenceWithImport { range, old_name, new_name, import_data } + FileReferenceWithImport { range, name, import_data } }) .collect() } @@ -405,13 +402,12 @@ fn find_record_expr_usage( let record_field = ast::RecordExprField::for_field_name(name_ref)?; let initializer = record_field.expr()?; - if let Definition::Field(expected_field) = target_definition { - if got_field != expected_field { - return None; + match target_definition { + Definition::Field(expected_field) if got_field == expected_field => { + Some((record_field, initializer)) } + _ => None, } - - Some((record_field, initializer)) } fn find_record_pat_field_usage(name: &ast::NameLike) -> Option<ast::Pat> { @@ -466,12 +462,9 @@ fn add_enum_def( let indent = IndentLevel::from_node(&insert_before); enum_def.reindent_to(indent); - ted::insert_all( - ted::Position::before(&edit.make_syntax_mut(insert_before)), - vec![ - enum_def.syntax().clone().into(), - make::tokens::whitespace(&format!("\n\n{indent}")).into(), - ], + edit.insert( + insert_before.text_range().start(), + format!("{}\n\n{indent}", enum_def.syntax().text()), ); } @@ -801,6 +794,78 @@ fn main() { } #[test] + fn local_var_init_struct_usage() { + check_assist( + bool_to_enum, + r#" +struct Foo { + foo: bool, +} + +fn main() { + let $0foo = true; + let s = Foo { foo }; +} +"#, + r#" +struct Foo { + foo: bool, +} + +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn main() { + let foo = Bool::True; + let s = Foo { foo: foo == Bool::True }; +} +"#, + ) + } + + #[test] + fn local_var_init_struct_usage_in_macro() { + check_assist( + bool_to_enum, + r#" +struct Struct { + boolean: bool, +} + +macro_rules! identity { + ($body:expr) => { + $body + } +} + +fn new() -> Struct { + let $0boolean = true; + identity![Struct { boolean }] +} +"#, + r#" +struct Struct { + boolean: bool, +} + +macro_rules! identity { + ($body:expr) => { + $body + } +} + +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn new() -> Struct { + let boolean = Bool::True; + identity![Struct { boolean: boolean == Bool::True }] +} +"#, + ) + } + + #[test] fn field_struct_basic() { cov_mark::check!(replaces_record_expr); check_assist( @@ -1322,6 +1387,46 @@ fn main() { } #[test] + fn field_in_macro() { + check_assist( + bool_to_enum, + r#" +struct Struct { + $0boolean: bool, +} + +fn boolean(x: Struct) { + let Struct { boolean } = x; +} + +macro_rules! identity { ($body:expr) => { $body } } + +fn new() -> Struct { + identity!(Struct { boolean: true }) +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +struct Struct { + boolean: Bool, +} + +fn boolean(x: Struct) { + let Struct { boolean } = x; +} + +macro_rules! identity { ($body:expr) => { $body } } + +fn new() -> Struct { + identity!(Struct { boolean: Bool::True }) +} +"#, + ) + } + + #[test] fn field_non_bool() { cov_mark::check!(not_applicable_non_bool_field); check_assist_not_applicable( diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 017853a4a20..435d7c4a537 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -216,7 +216,7 @@ fn edit_field_references( edit.edit_file(file_id); for r in refs { if let Some(name_ref) = r.name.as_name_ref() { - edit.replace(name_ref.syntax().text_range(), name.text()); + edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text()); } } } diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs index e7c884dcb70..874b81d3b63 100644 --- a/crates/ide-assists/src/handlers/extract_variable.rs +++ b/crates/ide-assists/src/handlers/extract_variable.rs @@ -1,12 +1,8 @@ use hir::TypeInfo; -use stdx::format_to; use syntax::{ - ast::{self, AstNode}, - NodeOrToken, - SyntaxKind::{ - BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, MATCH_GUARD, - PATH_EXPR, RETURN_EXPR, - }, + ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName}, + ted, NodeOrToken, + SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR}, SyntaxNode, }; @@ -66,98 +62,140 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op .as_ref() .map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_))); - let reference_modifier = match ty.filter(|_| needs_adjust) { - Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ", - Some(receiver_type) if receiver_type.is_reference() => "&", - _ => "", - }; - - let var_modifier = match parent { - Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => "mut ", - _ => "", - }; - let anchor = Anchor::from(&to_extract)?; - let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone(); let target = to_extract.syntax().text_range(); acc.add( AssistId("extract_variable", AssistKind::RefactorExtract), "Extract into variable", target, move |edit| { - let field_shorthand = - match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) { - Some(field) => field.name_ref(), - None => None, - }; - - let mut buf = String::new(); - - let var_name = match &field_shorthand { - Some(it) => it.to_string(), - None => suggest_name::for_variable(&to_extract, &ctx.sema), + let field_shorthand = to_extract + .syntax() + .parent() + .and_then(ast::RecordExprField::cast) + .filter(|field| field.name_ref().is_some()); + + let (var_name, expr_replace) = match field_shorthand { + Some(field) => (field.to_string(), field.syntax().clone()), + None => ( + suggest_name::for_variable(&to_extract, &ctx.sema), + to_extract.syntax().clone(), + ), }; - let expr_range = match &field_shorthand { - Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()), - None => to_extract.syntax().text_range(), + + let ident_pat = match parent { + Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => { + make::ident_pat(false, true, make::name(&var_name)) + } + _ => make::ident_pat(false, false, make::name(&var_name)), }; - match anchor { - Anchor::Before(_) | Anchor::Replace(_) => { - format_to!(buf, "let {var_modifier}{var_name} = {reference_modifier}") + let to_extract = match ty.as_ref().filter(|_| needs_adjust) { + Some(receiver_type) if receiver_type.is_mutable_reference() => { + make::expr_ref(to_extract, true) } - Anchor::WrapInBlock(_) => { - format_to!(buf, "{{ let {var_name} = {reference_modifier}") + Some(receiver_type) if receiver_type.is_reference() => { + make::expr_ref(to_extract, false) } + _ => to_extract, }; - format_to!(buf, "{to_extract}"); - if let Anchor::Replace(stmt) = anchor { - cov_mark::hit!(test_extract_var_expr_stmt); - if stmt.semicolon_token().is_none() { - buf.push(';'); - } - match ctx.config.snippet_cap { - Some(cap) => { - let snip = buf.replace( - &format!("let {var_modifier}{var_name}"), - &format!("let {var_modifier}$0{var_name}"), - ); - edit.replace_snippet(cap, expr_range, snip) + let expr_replace = edit.make_syntax_mut(expr_replace); + let let_stmt = + make::let_stmt(ident_pat.into(), None, Some(to_extract)).clone_for_update(); + let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update(); + + match anchor { + Anchor::Before(place) => { + let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token()); + let indent_to = IndentLevel::from_node(&place); + let insert_place = edit.make_syntax_mut(place); + + // Adjust ws to insert depending on if this is all inline or on separate lines + let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with("\n")) { + format!("\n{indent_to}") + } else { + format!(" ") + }; + + ted::insert_all_raw( + ted::Position::before(insert_place), + vec![ + let_stmt.syntax().clone().into(), + make::tokens::whitespace(&trailing_ws).into(), + ], + ); + + ted::replace(expr_replace, name_expr.syntax()); + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() { + if let Some(name) = ident_pat.name() { + edit.add_tabstop_before(cap, name); + } + } } - None => edit.replace(expr_range, buf), } - return; - } + Anchor::Replace(stmt) => { + cov_mark::hit!(test_extract_var_expr_stmt); - buf.push(';'); - - // We want to maintain the indent level, - // but we do not want to duplicate possible - // extra newlines in the indent block - let text = indent.text(); - if text.starts_with('\n') { - buf.push('\n'); - buf.push_str(text.trim_start_matches('\n')); - } else { - buf.push_str(text); - } + let stmt_replace = edit.make_mut(stmt); + ted::replace(stmt_replace.syntax(), let_stmt.syntax()); - edit.replace(expr_range, var_name.clone()); - let offset = anchor.syntax().text_range().start(); - match ctx.config.snippet_cap { - Some(cap) => { - let snip = buf.replace( - &format!("let {var_modifier}{var_name}"), - &format!("let {var_modifier}$0{var_name}"), - ); - edit.insert_snippet(cap, offset, snip) + if let Some(cap) = ctx.config.snippet_cap { + if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() { + if let Some(name) = ident_pat.name() { + edit.add_tabstop_before(cap, name); + } + } + } } - None => edit.insert(offset, buf), - } + Anchor::WrapInBlock(to_wrap) => { + let indent_to = to_wrap.indent_level(); + + let block = if to_wrap.syntax() == &expr_replace { + // Since `expr_replace` is the same that needs to be wrapped in a block, + // we can just directly replace it with a block + let block = + make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update(); + ted::replace(expr_replace, block.syntax()); + + block + } else { + // `expr_replace` is a descendant of `to_wrap`, so both steps need to be + // handled seperately, otherwise we wrap the wrong expression + let to_wrap = edit.make_mut(to_wrap); + + // Replace the target expr first so that we don't need to find where + // `expr_replace` is in the wrapped `to_wrap` + ted::replace(expr_replace, name_expr.syntax()); + + // Wrap `to_wrap` in a block + let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone())) + .clone_for_update(); + ted::replace(to_wrap.syntax(), block.syntax()); + + block + }; + + if let Some(cap) = ctx.config.snippet_cap { + // Adding a tabstop to `name` requires finding the let stmt again, since + // the existing `let_stmt` is not actually added to the tree + let pat = block.statements().find_map(|stmt| { + let ast::Stmt::LetStmt(let_stmt) = stmt else { return None }; + let_stmt.pat() + }); + + if let Some(ast::Pat::IdentPat(ident_pat)) = pat { + if let Some(name) = ident_pat.name() { + edit.add_tabstop_before(cap, name); + } + } + } - if let Anchor::WrapInBlock(_) = anchor { - edit.insert(anchor.syntax().text_range().end(), " }"); + // fixup indentation of block + block.indent(indent_to); + } } }, ) @@ -181,7 +219,7 @@ fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> { enum Anchor { Before(SyntaxNode), Replace(ast::ExprStmt), - WrapInBlock(SyntaxNode), + WrapInBlock(ast::Expr), } impl Anchor { @@ -204,16 +242,16 @@ impl Anchor { } if let Some(parent) = node.parent() { - if parent.kind() == CLOSURE_EXPR { + if let Some(parent) = ast::ClosureExpr::cast(parent.clone()) { cov_mark::hit!(test_extract_var_in_closure_no_block); - return Some(Anchor::WrapInBlock(node)); + return parent.body().map(Anchor::WrapInBlock); } - if parent.kind() == MATCH_ARM { + if let Some(parent) = ast::MatchArm::cast(parent) { if node.kind() == MATCH_GUARD { cov_mark::hit!(test_extract_var_in_match_guard); } else { cov_mark::hit!(test_extract_var_in_match_arm_no_block); - return Some(Anchor::WrapInBlock(node)); + return parent.expr().map(Anchor::WrapInBlock); } } } @@ -229,13 +267,6 @@ impl Anchor { None }) } - - fn syntax(&self) -> &SyntaxNode { - match self { - Anchor::Before(it) | Anchor::WrapInBlock(it) => it, - Anchor::Replace(stmt) => stmt.syntax(), - } - } } #[cfg(test)] @@ -502,7 +533,10 @@ fn main() { fn main() { let x = true; let tuple = match x { - true => { let $0var_name = 2 + 2; (var_name, true) } + true => { + let $0var_name = 2 + 2; + (var_name, true) + } _ => (0, false) }; } @@ -579,7 +613,10 @@ fn main() { "#, r#" fn main() { - let lambda = |x: u32| { let $0var_name = x * 2; var_name }; + let lambda = |x: u32| { + let $0var_name = x * 2; + var_name + }; } "#, ); diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 1a1e992e28a..2aaf9d0679d 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -114,7 +114,7 @@ fn add_variant_to_accumulator( parent: PathParent, ) -> Option<()> { let db = ctx.db(); - let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; + let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node_rooted(db)?; acc.add( AssistId("generate_enum_variant", AssistKind::Generate), diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index a113c817f7e..5bb200e84a4 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -8,20 +8,21 @@ use ide_db::{ famous_defs::FamousDefs, helpers::is_editable_crate, path_transform::PathTransform, + source_change::SourceChangeBuilder, FxHashMap, FxHashSet, RootDatabase, SnippetCap, }; +use itertools::Itertools; use stdx::to_lower_snake_case; use syntax::{ ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - make, AstNode, CallExpr, HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds, + self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, CallExpr, HasArgList, + HasGenericParams, HasModuleItem, HasTypeBounds, }, - SyntaxKind, SyntaxNode, TextRange, TextSize, + ted, SyntaxKind, SyntaxNode, TextRange, T, }; use crate::{ - utils::{convert_reference_type, find_struct_impl, render_snippet, Cursor}, + utils::{convert_reference_type, find_struct_impl}, AssistContext, AssistId, AssistKind, Assists, }; @@ -65,7 +66,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let fn_name = &*name_ref.text(); - let TargetInfo { target_module, adt_name, target, file, insert_offset } = + let TargetInfo { target_module, adt_name, target, file } = fn_target_info(ctx, path, &call, fn_name)?; if let Some(m) = target_module { @@ -77,16 +78,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?; let text_range = call.syntax().text_range(); let label = format!("Generate {} function", function_builder.fn_name); - add_func_to_accumulator( - acc, - ctx, - text_range, - function_builder, - insert_offset, - file, - adt_name, - label, - ) + add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label) } struct TargetInfo { @@ -94,7 +86,6 @@ struct TargetInfo { adt_name: Option<hir::Name>, target: GeneratedFunctionTarget, file: FileId, - insert_offset: TextSize, } impl TargetInfo { @@ -103,9 +94,8 @@ impl TargetInfo { adt_name: Option<hir::Name>, target: GeneratedFunctionTarget, file: FileId, - insert_offset: TextSize, ) -> Self { - Self { target_module, adt_name, target, file, insert_offset } + Self { target_module, adt_name, target, file } } } @@ -156,7 +146,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?; - let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; + let target = get_method_target(ctx, &impl_, &adt)?; let function_builder = FunctionBuilder::from_method_call( ctx, @@ -169,16 +159,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let text_range = call.syntax().text_range(); let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; let label = format!("Generate {} method", function_builder.fn_name); - add_func_to_accumulator( - acc, - ctx, - text_range, - function_builder, - insert_offset, - file, - adt_name, - label, - ) + add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label) } fn add_func_to_accumulator( @@ -186,23 +167,28 @@ fn add_func_to_accumulator( ctx: &AssistContext<'_>, text_range: TextRange, function_builder: FunctionBuilder, - insert_offset: TextSize, file: FileId, adt_name: Option<hir::Name>, label: String, ) -> Option<()> { - acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| { - let indent = IndentLevel::from_node(function_builder.target.syntax()); - let function_template = function_builder.render(adt_name.is_some()); - let mut func = function_template.to_string(ctx.config.snippet_cap); + acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |edit| { + edit.edit_file(file); + + let target = function_builder.target.clone(); + let function_template = function_builder.render(); + let func = function_template.to_ast(ctx.config.snippet_cap, edit); + if let Some(name) = adt_name { + let name = make::ty_path(make::ext::ident_path(&format!("{}", name.display(ctx.db())))); + // FIXME: adt may have generic params. - func = format!("\n{indent}impl {} {{\n{func}\n{indent}}}", name.display(ctx.db())); - } - builder.edit_file(file); - match ctx.config.snippet_cap { - Some(cap) => builder.insert_snippet(cap, insert_offset, func), - None => builder.insert(insert_offset, func), + let impl_ = make::impl_(None, None, name, None, None).clone_for_update(); + + func.indent(IndentLevel(1)); + impl_.get_or_create_assoc_item_list().add_item(func.into()); + target.insert_impl_at(edit, impl_); + } else { + target.insert_fn_at(edit, func); } }) } @@ -220,36 +206,33 @@ fn get_adt_source( } struct FunctionTemplate { - leading_ws: String, fn_def: ast::Fn, ret_type: Option<ast::RetType>, should_focus_return_type: bool, - trailing_ws: String, tail_expr: ast::Expr, } impl FunctionTemplate { - fn to_string(&self, cap: Option<SnippetCap>) -> String { - let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } = - self; - - let f = match cap { - Some(cap) => { - let cursor = if *should_focus_return_type { - // Focus the return type if there is one - match ret_type { - Some(ret_type) => ret_type.syntax(), - None => tail_expr.syntax(), + fn to_ast(&self, cap: Option<SnippetCap>, edit: &mut SourceChangeBuilder) -> ast::Fn { + let Self { fn_def, ret_type, should_focus_return_type, tail_expr } = self; + + if let Some(cap) = cap { + if *should_focus_return_type { + // Focus the return type if there is one + match ret_type { + Some(ret_type) => { + edit.add_placeholder_snippet(cap, ret_type.clone()); } - } else { - tail_expr.syntax() - }; - render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor)) + None => { + edit.add_placeholder_snippet(cap, tail_expr.clone()); + } + } + } else { + edit.add_placeholder_snippet(cap, tail_expr.clone()); } - None => fn_def.to_string(), - }; + } - format!("{leading_ws}{f}{trailing_ws}") + fn_def.clone() } } @@ -356,7 +339,7 @@ impl FunctionBuilder { }) } - fn render(self, is_method: bool) -> FunctionTemplate { + fn render(self) -> FunctionTemplate { let placeholder_expr = make::ext::expr_todo(); let fn_body = make::block_expr(vec![], Some(placeholder_expr)); let visibility = match self.visibility { @@ -364,7 +347,7 @@ impl FunctionBuilder { Visibility::Crate => Some(make::visibility_pub_crate()), Visibility::Pub => Some(make::visibility_pub()), }; - let mut fn_def = make::fn_( + let fn_def = make::fn_( visibility, self.fn_name, self.generic_param_list, @@ -375,34 +358,10 @@ impl FunctionBuilder { self.is_async, false, // FIXME : const and unsafe are not handled yet. false, - ); - let leading_ws; - let trailing_ws; - - match self.target { - GeneratedFunctionTarget::BehindItem(it) => { - let mut indent = IndentLevel::from_node(&it); - if is_method { - indent = indent + 1; - leading_ws = format!("{indent}"); - } else { - leading_ws = format!("\n\n{indent}"); - } - - fn_def = fn_def.indent(indent); - trailing_ws = String::new(); - } - GeneratedFunctionTarget::InEmptyItemList(it) => { - let indent = IndentLevel::from_node(&it); - let leading_indent = indent + 1; - leading_ws = format!("\n{leading_indent}"); - fn_def = fn_def.indent(leading_indent); - trailing_ws = format!("\n{indent}"); - } - }; + ) + .clone_for_update(); FunctionTemplate { - leading_ws, ret_type: fn_def.ret_type(), // PANIC: we guarantee we always create a function body with a tail expr tail_expr: fn_def @@ -412,7 +371,6 @@ impl FunctionBuilder { .expect("function body should have a tail expression"), should_focus_return_type: self.should_focus_return_type, fn_def, - trailing_ws, } } } @@ -456,40 +414,37 @@ fn get_fn_target_info( target_module: Option<Module>, call: CallExpr, ) -> Option<TargetInfo> { - let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?; - Some(TargetInfo::new(target_module, None, target, file, insert_offset)) + let (target, file) = get_fn_target(ctx, target_module, call)?; + Some(TargetInfo::new(target_module, None, target, file)) } fn get_fn_target( ctx: &AssistContext<'_>, target_module: Option<Module>, call: CallExpr, -) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> { +) -> Option<(GeneratedFunctionTarget, FileId)> { let mut file = ctx.file_id(); let target = match target_module { Some(target_module) => { - let module_source = target_module.definition_source(ctx.db()); - let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?; + let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module); file = in_file; target } None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?, }; - Some((target.clone(), file, get_insert_offset(&target))) + Some((target.clone(), file)) } fn get_method_target( ctx: &AssistContext<'_>, impl_: &Option<ast::Impl>, adt: &Adt, -) -> Option<(GeneratedFunctionTarget, TextSize)> { +) -> Option<GeneratedFunctionTarget> { let target = match impl_ { - Some(impl_) => next_space_for_fn_in_impl(impl_)?, - None => { - GeneratedFunctionTarget::BehindItem(adt.source(ctx.sema.db)?.syntax().value.clone()) - } + Some(impl_) => GeneratedFunctionTarget::InImpl(impl_.clone()), + None => GeneratedFunctionTarget::AfterItem(adt.source(ctx.sema.db)?.syntax().value.clone()), }; - Some((target.clone(), get_insert_offset(&target))) + Some(target) } fn assoc_fn_target_info( @@ -505,36 +460,120 @@ fn assoc_fn_target_info( return None; } let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; - let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; + let target = get_method_target(ctx, &impl_, &adt)?; let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; - Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset)) -} - -fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { - match target { - GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(), - GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'), - } + Some(TargetInfo::new(target_module, adt_name, target, file)) } #[derive(Clone)] enum GeneratedFunctionTarget { - BehindItem(SyntaxNode), + AfterItem(SyntaxNode), InEmptyItemList(SyntaxNode), + InImpl(ast::Impl), } impl GeneratedFunctionTarget { fn syntax(&self) -> &SyntaxNode { match self { - GeneratedFunctionTarget::BehindItem(it) => it, + GeneratedFunctionTarget::AfterItem(it) => it, GeneratedFunctionTarget::InEmptyItemList(it) => it, + GeneratedFunctionTarget::InImpl(it) => it.syntax(), } } fn parent(&self) -> SyntaxNode { match self { - GeneratedFunctionTarget::BehindItem(it) => it.parent().expect("item without parent"), + GeneratedFunctionTarget::AfterItem(it) => it.parent().expect("item without parent"), GeneratedFunctionTarget::InEmptyItemList(it) => it.clone(), + GeneratedFunctionTarget::InImpl(it) => it.syntax().clone(), + } + } + + fn insert_impl_at(&self, edit: &mut SourceChangeBuilder, impl_: ast::Impl) { + match self { + GeneratedFunctionTarget::AfterItem(item) => { + let item = edit.make_syntax_mut(item.clone()); + let position = if item.parent().is_some() { + ted::Position::after(&item) + } else { + ted::Position::first_child_of(&item) + }; + + let indent = IndentLevel::from_node(&item); + let leading_ws = make::tokens::whitespace(&format!("\n{indent}")); + impl_.indent(indent); + + ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]); + } + GeneratedFunctionTarget::InEmptyItemList(item_list) => { + let item_list = edit.make_syntax_mut(item_list.clone()); + let insert_after = + item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']); + let position = match insert_after { + Some(child) => ted::Position::after(child), + None => ted::Position::first_child_of(&item_list), + }; + + let indent = IndentLevel::from_node(&item_list); + let leading_indent = indent + 1; + let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}")); + impl_.indent(indent); + + ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]); + } + GeneratedFunctionTarget::InImpl(_) => { + unreachable!("can't insert an impl inside an impl") + } + } + } + + fn insert_fn_at(&self, edit: &mut SourceChangeBuilder, func: ast::Fn) { + match self { + GeneratedFunctionTarget::AfterItem(item) => { + let item = edit.make_syntax_mut(item.clone()); + let position = if item.parent().is_some() { + ted::Position::after(&item) + } else { + ted::Position::first_child_of(&item) + }; + + let indent = IndentLevel::from_node(&item); + let leading_ws = make::tokens::whitespace(&format!("\n\n{indent}")); + func.indent(indent); + + ted::insert_all_raw( + position, + vec![leading_ws.into(), func.syntax().clone().into()], + ); + } + GeneratedFunctionTarget::InEmptyItemList(item_list) => { + let item_list = edit.make_syntax_mut(item_list.clone()); + let insert_after = + item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']); + let position = match insert_after { + Some(child) => ted::Position::after(child), + None => ted::Position::first_child_of(&item_list), + }; + + let indent = IndentLevel::from_node(&item_list); + let leading_indent = indent + 1; + let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}")); + let trailing_ws = make::tokens::whitespace(&format!("\n{indent}")); + func.indent(leading_indent); + + ted::insert_all( + position, + vec![leading_ws.into(), func.syntax().clone().into(), trailing_ws.into()], + ); + } + GeneratedFunctionTarget::InImpl(impl_) => { + let impl_ = edit.make_mut(impl_.clone()); + + let leading_indent = impl_.indent_level() + 1; + func.indent(leading_indent); + + impl_.get_or_create_assoc_item_list().add_item(func.into()); + } } } } @@ -1026,43 +1065,40 @@ fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option<Generate } last_ancestor = Some(next_ancestor); } - last_ancestor.map(GeneratedFunctionTarget::BehindItem) + last_ancestor.map(GeneratedFunctionTarget::AfterItem) } fn next_space_for_fn_in_module( - db: &dyn hir::db::ExpandDatabase, - module_source: &hir::InFile<hir::ModuleSource>, -) -> Option<(FileId, GeneratedFunctionTarget)> { - let file = module_source.file_id.original_file(db); + db: &dyn hir::db::HirDatabase, + target_module: hir::Module, +) -> (FileId, GeneratedFunctionTarget) { + let module_source = target_module.definition_source(db); + let file = module_source.file_id.original_file(db.upcast()); let assist_item = match &module_source.value { hir::ModuleSource::SourceFile(it) => match it.items().last() { - Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()), - None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()), + Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()), + None => GeneratedFunctionTarget::AfterItem(it.syntax().clone()), }, hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) { - Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()), - None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()), + Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()), + None => { + let item_list = + it.item_list().expect("module definition source should have an item list"); + GeneratedFunctionTarget::InEmptyItemList(item_list.syntax().clone()) + } }, hir::ModuleSource::BlockExpr(it) => { if let Some(last_item) = it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last() { - GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()) + GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()) } else { GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone()) } } }; - Some((file, assist_item)) -} -fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarget> { - let assoc_item_list = impl_.assoc_item_list()?; - if let Some(last_item) = assoc_item_list.assoc_items().last() { - Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())) - } else { - Some(GeneratedFunctionTarget::InEmptyItemList(assoc_item_list.syntax().clone())) - } + (file, assist_item) } #[derive(Clone, Copy)] diff --git a/crates/ide-assists/src/handlers/introduce_named_generic.rs b/crates/ide-assists/src/handlers/introduce_named_generic.rs index b0d35c02d67..e90a1ed79b4 100644 --- a/crates/ide-assists/src/handlers/introduce_named_generic.rs +++ b/crates/ide-assists/src/handlers/introduce_named_generic.rs @@ -18,7 +18,7 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; // ``` pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let impl_trait_type = ctx.find_node_at_offset::<ast::ImplTraitType>()?; - let param = impl_trait_type.syntax().parent().and_then(ast::Param::cast)?; + let param = impl_trait_type.syntax().ancestors().find_map(|node| ast::Param::cast(node))?; let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?; let type_bound_list = impl_trait_type.type_bound_list()?; @@ -149,4 +149,22 @@ fn foo< r#"fn foo<$0F: Foo + Bar>(bar: F) {}"#, ); } + + #[test] + fn replace_impl_with_mut() { + check_assist( + introduce_named_generic, + r#"fn f(iter: &mut $0impl Iterator<Item = i32>) {}"#, + r#"fn f<$0I: Iterator<Item = i32>>(iter: &mut I) {}"#, + ); + } + + #[test] + fn replace_impl_inside() { + check_assist( + introduce_named_generic, + r#"fn f(x: &mut Vec<$0impl Iterator<Item = i32>>) {}"#, + r#"fn f<$0I: Iterator<Item = i32>>(x: &mut Vec<I>) {}"#, + ); + } } diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs index 6ed9bd85fcc..67fea772c79 100644 --- a/crates/ide-assists/src/handlers/promote_local_to_const.rs +++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs @@ -11,7 +11,10 @@ use syntax::{ ted, AstNode, WalkEvent, }; -use crate::assist_context::{AssistContext, Assists}; +use crate::{ + assist_context::{AssistContext, Assists}, + utils, +}; // Assist: promote_local_to_const // @@ -79,15 +82,13 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) let name_ref = make::name_ref(&name); for usage in usages { - let Some(usage) = usage.name.as_name_ref().cloned() else { continue }; - if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage) { - let record_field = edit.make_mut(record_field); - let name_expr = - make::expr_path(make::path_from_text(&name)).clone_for_update(); - record_field.replace_expr(name_expr); + let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue }; + if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) { + let name_expr = make::expr_path(make::path_from_text(&name)); + utils::replace_record_field_expr(ctx, edit, record_field, name_expr); } else { - let usage = edit.make_mut(usage); - ted::replace(usage.syntax(), name_ref.clone_for_update().syntax()); + let usage_range = usage.range; + edit.replace(usage_range, name_ref.syntax().text()); } } } @@ -213,6 +214,76 @@ fn main() { } #[test] + fn usage_in_macro() { + check_assist( + promote_local_to_const, + r" +macro_rules! identity { + ($body:expr) => { + $body + } +} + +fn baz() -> usize { + let $0foo = 2; + identity![foo] +} +", + r" +macro_rules! identity { + ($body:expr) => { + $body + } +} + +fn baz() -> usize { + const $0FOO: usize = 2; + identity![FOO] +} +", + ) + } + + #[test] + fn usage_shorthand_in_macro() { + check_assist( + promote_local_to_const, + r" +struct Foo { + foo: usize, +} + +macro_rules! identity { + ($body:expr) => { + $body + }; +} + +fn baz() -> Foo { + let $0foo = 2; + identity![Foo { foo }] +} +", + r" +struct Foo { + foo: usize, +} + +macro_rules! identity { + ($body:expr) => { + $body + }; +} + +fn baz() -> Foo { + const $0FOO: usize = 2; + identity![Foo { foo: FOO }] +} +", + ) + } + + #[test] fn not_applicable_non_const_meth_call() { cov_mark::check!(promote_local_non_const); check_assist_not_applicable( diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs index ee44064e7c5..859ed1476c4 100644 --- a/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -423,7 +423,7 @@ mod z { struct X(); struct Y(); mod z { - use super::{X}; + use super::X; fn w() { let x = X(); @@ -495,7 +495,7 @@ struct X(); mod y { struct Y(); mod z { - use crate::{X}; + use crate::X; fn f() { let x = X(); } @@ -526,7 +526,7 @@ struct X(); mod y { struct Y(); mod z { - use crate::{y::Y}; + use crate::y::Y; fn f() { let y = Y(); } @@ -537,6 +537,184 @@ mod y { } #[test] + fn remove_unused_auto_remove_brace_nested() { + check_assist( + remove_unused_imports, + r#" +mod a { + pub struct A(); +} +mod b { + struct F(); + mod c { + $0use {{super::{{ + {d::{{{{{{{S, U}}}}}}}}, + {{{{e::{H, L, {{{R}}}}}}}}, + F, super::a::A + }}}};$0 + fn f() { + let f = F(); + let l = L(); + let a = A(); + let s = S(); + let h = H(); + } + } + + mod d { + pub struct S(); + pub struct U(); + } + + mod e { + pub struct H(); + pub struct L(); + pub struct R(); + } +} +"#, + r#" +mod a { + pub struct A(); +} +mod b { + struct F(); + mod c { + use super::{ + d::S, + e::{H, L}, + F, super::a::A + }; + fn f() { + let f = F(); + let l = L(); + let a = A(); + let s = S(); + let h = H(); + } + } + + mod d { + pub struct S(); + pub struct U(); + } + + mod e { + pub struct H(); + pub struct L(); + pub struct R(); + } +} +"#, + ); + } + + #[test] + fn remove_comma_after_auto_remove_brace() { + check_assist( + remove_unused_imports, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + } +} + +$0use m::{ + x::{A, B}, + y::C, +};$0 + +fn main() { + B; +} +"#, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + } +} + +use m:: + x::B +; + +fn main() { + B; +} +"#, + ); + check_assist( + remove_unused_imports, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + pub struct D; + } + pub mod z { + pub struct E; + pub struct F; + } +} + +$0use m::{ + x::{A, B}, + y::{C, D,}, + z::{E, F}, +};$0 + +fn main() { + B; + C; + F; +} +"#, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + pub struct D; + } + pub mod z { + pub struct E; + pub struct F; + } +} + +use m::{ + x::B, + y::C, + z::F, +}; + +fn main() { + B; + C; + F; +} +"#, + ); + } + + #[test] fn remove_nested_all_unused() { check_assist( remove_unused_imports, diff --git a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs index b1daaea1ed1..09759019baa 100644 --- a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs +++ b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs @@ -1,4 +1,7 @@ -use syntax::ast::{self, AstNode}; +use syntax::{ + ast::{self, make, AstNode}, + ted, +}; use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; @@ -42,19 +45,34 @@ pub(crate) fn replace_is_method_with_if_let_method( suggest_name::for_variable(&receiver, &ctx.sema) }; - let target = call_expr.syntax().text_range(); - let (assist_id, message, text) = if name_ref.text() == "is_some" { ("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some") } else { ("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok") }; - acc.add(AssistId(assist_id, AssistKind::RefactorRewrite), message, target, |edit| { - let var_name = format!("${{0:{}}}", var_name); - let replacement = format!("let {}({}) = {}", text, var_name, receiver); - edit.replace(target, replacement); - }) + acc.add( + AssistId(assist_id, AssistKind::RefactorRewrite), + message, + call_expr.syntax().text_range(), + |edit| { + let call_expr = edit.make_mut(call_expr); + + let var_pat = make::ident_pat(false, false, make::name(&var_name)); + let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]); + let let_expr = make::expr_let(pat.into(), receiver).clone_for_update(); + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() { + if let Some(first_var) = pat.fields().next() { + edit.add_placeholder_snippet(cap, first_var); + } + } + } + + ted::replace(call_expr.syntax(), let_expr.syntax()); + }, + ) } _ => return None, } diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index 25b3d6d9da9..95b9eb52948 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -5,13 +5,14 @@ mod sourcegen; use expect_test::expect; use hir::Semantics; use ide_db::{ - base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}, + base_db::{FileId, FileRange, SourceDatabaseExt}, imports::insert_use::{ImportGranularity, InsertUseConfig}, source_change::FileSystemEdit, RootDatabase, SnippetCap, }; use stdx::{format_to, trim_indent}; use syntax::TextRange; +use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; use crate::{ @@ -504,16 +505,33 @@ pub fn test_some_range(a: int) -> bool { TextEdit { indels: [ Indel { - insert: "let $0var_name = 5;\n ", - delete: 45..45, + insert: "let", + delete: 45..47, }, Indel { insert: "var_name", - delete: 59..60, + delete: 48..60, + }, + Indel { + insert: "=", + delete: 61..81, + }, + Indel { + insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }", + delete: 82..108, }, ], }, - None, + Some( + SnippetEdit( + [ + ( + 0, + 49..49, + ), + ], + ), + ), ), }, file_system_edits: [], @@ -566,16 +584,33 @@ pub fn test_some_range(a: int) -> bool { TextEdit { indels: [ Indel { - insert: "let $0var_name = 5;\n ", - delete: 45..45, + insert: "let", + delete: 45..47, }, Indel { insert: "var_name", - delete: 59..60, + delete: 48..60, + }, + Indel { + insert: "=", + delete: 61..81, + }, + Indel { + insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }", + delete: 82..108, }, ], }, - None, + Some( + SnippetEdit( + [ + ( + 0, + 49..49, + ), + ], + ), + ), ), }, file_system_edits: [], diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index f51e99a914e..927a8e3c19a 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -813,3 +813,21 @@ fn test_required_hashes() { assert_eq!(3, required_hashes("#ab\"##c")); assert_eq!(5, required_hashes("#ab\"##\"####c")); } + +/// Replaces the record expression, handling field shorthands including inside macros. +pub(crate) fn replace_record_field_expr( + ctx: &AssistContext<'_>, + edit: &mut SourceChangeBuilder, + record_field: ast::RecordExprField, + initializer: ast::Expr, +) { + if let Some(ast::Expr::PathExpr(path_expr)) = record_field.expr() { + // replace field shorthand + let file_range = ctx.sema.original_range(path_expr.syntax()); + edit.insert(file_range.range.end(), format!(": {}", initializer.syntax().text())) + } else if let Some(expr) = record_field.expr() { + // just replace expr + let file_range = ctx.sema.original_range(expr.syntax()); + edit.replace(file_range.range, initializer.syntax().text()); + } +} diff --git a/crates/ide-assists/src/utils/suggest_name.rs b/crates/ide-assists/src/utils/suggest_name.rs index 16704d598ef..2f1b47b4b4b 100644 --- a/crates/ide-assists/src/utils/suggest_name.rs +++ b/crates/ide-assists/src/utils/suggest_name.rs @@ -275,7 +275,8 @@ fn from_field_name(expr: &ast::Expr) -> Option<String> { #[cfg(test)] mod tests { - use ide_db::base_db::{fixture::WithFixture, FileRange}; + use ide_db::base_db::FileRange; + use test_fixture::WithFixture; use super::*; diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml index 60f90a41b96..f7fec3cf773 100644 --- a/crates/ide-completion/Cargo.toml +++ b/crates/ide-completion/Cargo.toml @@ -35,3 +35,4 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs index 466f0b1fb7f..9155caa2e0b 100644 --- a/crates/ide-completion/src/completions/attribute.rs +++ b/crates/ide-completion/src/completions/attribute.rs @@ -26,6 +26,7 @@ mod cfg; mod derive; mod lint; mod repr; +mod macro_use; pub(crate) use self::derive::complete_derive_path; @@ -35,6 +36,7 @@ pub(crate) fn complete_known_attribute_input( ctx: &CompletionContext<'_>, &colon_prefix: &bool, fake_attribute_under_caret: &ast::Attr, + extern_crate: Option<&ast::ExternCrate>, ) -> Option<()> { let attribute = fake_attribute_under_caret; let name_ref = match attribute.path() { @@ -66,6 +68,9 @@ pub(crate) fn complete_known_attribute_input( lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints); } "cfg" => cfg::complete_cfg(acc, ctx), + "macro_use" => { + macro_use::complete_macro_use(acc, ctx, extern_crate, &parse_tt_as_comma_sep_paths(tt)?) + } _ => (), } Some(()) diff --git a/crates/ide-completion/src/completions/attribute/macro_use.rs b/crates/ide-completion/src/completions/attribute/macro_use.rs new file mode 100644 index 00000000000..f45f9cba258 --- /dev/null +++ b/crates/ide-completion/src/completions/attribute/macro_use.rs @@ -0,0 +1,35 @@ +//! Completion for macros in `#[macro_use(...)]` +use hir::ModuleDef; +use ide_db::SymbolKind; +use syntax::ast; + +use crate::{context::CompletionContext, item::CompletionItem, Completions}; + +pub(super) fn complete_macro_use( + acc: &mut Completions, + ctx: &CompletionContext<'_>, + extern_crate: Option<&ast::ExternCrate>, + existing_imports: &[ast::Path], +) { + let Some(extern_crate) = extern_crate else { return }; + let Some(extern_crate) = ctx.sema.to_def(extern_crate) else { return }; + let Some(krate) = extern_crate.resolved_crate(ctx.db) else { return }; + + for mod_def in krate.root_module().declarations(ctx.db) { + if let ModuleDef::Macro(mac) = mod_def { + let mac_name = mac.name(ctx.db); + let Some(mac_name) = mac_name.as_str() else { continue }; + + let existing_import = existing_imports + .iter() + .filter_map(|p| p.as_single_name_ref()) + .find(|n| n.text() == mac_name); + if existing_import.is_some() { + continue; + } + + let item = CompletionItem::new(SymbolKind::Macro, ctx.source_range(), mac_name); + item.add_to(acc, ctx.db); + } + } +} diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 613a35dcb10..53a1c8405c2 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -27,6 +27,8 @@ pub(crate) fn complete_dot( } let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. }); + let is_method_acces_with_parens = + matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); complete_fields( acc, @@ -35,6 +37,7 @@ pub(crate) fn complete_dot( |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), is_field_access, + is_method_acces_with_parens, ); complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); @@ -83,6 +86,7 @@ pub(crate) fn complete_undotted_self( }, |acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty), true, + false, ); complete_methods(ctx, &ty, |func| { acc.add_method( @@ -106,12 +110,14 @@ fn complete_fields( mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type), mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type), is_field_access: bool, + is_method_acess_with_parens: bool, ) { let mut seen_names = FxHashSet::default(); for receiver in receiver.autoderef(ctx.db) { for (field, ty) in receiver.fields(ctx.db) { if seen_names.insert(field.name(ctx.db)) - && (is_field_access || ty.is_fn() || ty.is_closure()) + && (is_field_access + || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) { named_field(acc, field, ty); } @@ -120,7 +126,8 @@ fn complete_fields( // Tuples are always the last type in a deref chain, so just check if the name is // already seen without inserting into the hashset. if !seen_names.contains(&hir::Name::new_tuple_field(i)) - && (is_field_access || ty.is_fn() || ty.is_closure()) + && (is_field_access + || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) { // Tuple fields are always public (tuple struct fields are handled above). tuple_index(acc, i, ty); @@ -1236,4 +1243,24 @@ fn foo() { "#, ) } + + #[test] + fn test_fn_field_dot_access_method_has_parens_false() { + check( + r#" +struct Foo { baz: fn() } +impl Foo { + fn bar<T>(self, t: T): T { t } +} + +fn baz() { + let foo = Foo{ baz: || {} }; + foo.ba$0::<>; +} +"#, + expect![[r#" + me bar(…) fn(self, T) + "#]], + ); + } } diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 0da7ba6d000..108b040de6b 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -371,6 +371,7 @@ pub(super) enum CompletionAnalysis { UnexpandedAttrTT { colon_prefix: bool, fake_attribute_under_caret: Option<ast::Attr>, + extern_crate: Option<ast::ExternCrate>, }, } @@ -693,7 +694,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); - let toolchain = db.crate_graph()[krate.into()].channel; + let toolchain = db.crate_graph()[krate.into()].channel(); // `toolchain == None` means we're in some detached files. Since we have no information on // the toolchain being used, let's just allow unstable items to be listed. let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 1e6b2f319aa..2d62c45174f 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -1,7 +1,7 @@ //! Module responsible for analyzing the code surrounding the cursor for completion. use std::iter; -use hir::{HasSource, Semantics, Type, TypeInfo, Variant}; +use hir::{Semantics, Type, TypeInfo, Variant}; use ide_db::{active_parameter::ActiveParameter, RootDatabase}; use syntax::{ algo::{find_node_at_offset, non_trivia_sibling}, @@ -254,11 +254,13 @@ fn analyze( { let colon_prefix = previous_non_trivia_token(self_token.clone()) .map_or(false, |it| T![:] == it.kind()); + CompletionAnalysis::UnexpandedAttrTT { fake_attribute_under_caret: fake_ident_token .parent_ancestors() .find_map(ast::Attr::cast), colon_prefix, + extern_crate: p.ancestors().find_map(ast::ExternCrate::cast), } } else { return None; @@ -740,13 +742,13 @@ fn classify_name_ref( match sema.resolve_path(&segment.parent_path().top_path())? { hir::PathResolution::Def(def) => match def { hir::ModuleDef::Function(func) => { - func.source(sema.db)?.value.generic_param_list() + sema.source(func)?.value.generic_param_list() } hir::ModuleDef::Adt(adt) => { - adt.source(sema.db)?.value.generic_param_list() + sema.source(adt)?.value.generic_param_list() } hir::ModuleDef::Variant(variant) => { - variant.parent_enum(sema.db).source(sema.db)?.value.generic_param_list() + sema.source(variant.parent_enum(sema.db))?.value.generic_param_list() } hir::ModuleDef::Trait(trait_) => { if let ast::GenericArg::AssocTypeArg(arg) = &arg { @@ -772,14 +774,14 @@ fn classify_name_ref( return None; } else { in_trait = Some(trait_); - trait_.source(sema.db)?.value.generic_param_list() + sema.source(trait_)?.value.generic_param_list() } } hir::ModuleDef::TraitAlias(trait_) => { - trait_.source(sema.db)?.value.generic_param_list() + sema.source(trait_)?.value.generic_param_list() } hir::ModuleDef::TypeAlias(ty_) => { - ty_.source(sema.db)?.value.generic_param_list() + sema.source(ty_)?.value.generic_param_list() } _ => None, }, @@ -788,7 +790,7 @@ fn classify_name_ref( }, ast::MethodCallExpr(call) => { let func = sema.resolve_method_call(&call)?; - func.source(sema.db)?.value.generic_param_list() + sema.source(func)?.value.generic_param_list() }, ast::AssocTypeArg(arg) => { let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?; @@ -805,7 +807,7 @@ fn classify_name_ref( }, _ => None, })?; - assoc_ty.source(sema.db)?.value.generic_param_list() + sema.source(*assoc_ty)?.value.generic_param_list() } _ => None, }, diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 37a2828e8dc..ff324e7a56d 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -211,12 +211,14 @@ pub fn completions( CompletionAnalysis::UnexpandedAttrTT { colon_prefix, fake_attribute_under_caret: Some(attr), + extern_crate, } => { completions::attribute::complete_known_attribute_input( acc, ctx, colon_prefix, attr, + extern_crate.as_ref(), ); } CompletionAnalysis::UnexpandedAttrTT { .. } | CompletionAnalysis::String { .. } => (), diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index f28afacc586..f13754e2ded 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -26,12 +26,13 @@ mod visibility; use expect_test::Expect; use hir::PrefixKind; use ide_db::{ - base_db::{fixture::ChangeFixture, FileLoader, FilePosition}, + base_db::{FileLoader, FilePosition}, imports::insert_use::{ImportGranularity, InsertUseConfig}, RootDatabase, SnippetCap, }; use itertools::Itertools; use stdx::{format_to, trim_indent}; +use test_fixture::ChangeFixture; use test_utils::assert_eq_text; use crate::{ diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs index d8c134c533b..351abe9850b 100644 --- a/crates/ide-completion/src/tests/attribute.rs +++ b/crates/ide-completion/src/tests/attribute.rs @@ -1067,3 +1067,82 @@ mod repr { ); } } + +mod macro_use { + use super::*; + + #[test] + fn completes_macros() { + check( + r#" +//- /dep.rs crate:dep +#[macro_export] +macro_rules! foo { + () => {}; +} + +#[macro_export] +macro_rules! bar { + () => {}; +} + +//- /main.rs crate:main deps:dep +#[macro_use($0)] +extern crate dep; +"#, + expect![[r#" + ma bar + ma foo + "#]], + ) + } + + #[test] + fn only_completes_exported_macros() { + check( + r#" +//- /dep.rs crate:dep +#[macro_export] +macro_rules! foo { + () => {}; +} + +macro_rules! bar { + () => {}; +} + +//- /main.rs crate:main deps:dep +#[macro_use($0)] +extern crate dep; +"#, + expect![[r#" + ma foo + "#]], + ) + } + + #[test] + fn does_not_completes_already_imported_macros() { + check( + r#" +//- /dep.rs crate:dep +#[macro_export] +macro_rules! foo { + () => {}; +} + +#[macro_export] +macro_rules! bar { + () => {}; +} + +//- /main.rs crate:main deps:dep +#[macro_use(foo, $0)] +extern crate dep; +"#, + expect![[r#" + ma bar + "#]], + ) + } +} diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml index 4a2e770f193..a3b1dc5b604 100644 --- a/crates/ide-db/Cargo.toml +++ b/crates/ide-db/Cargo.toml @@ -16,11 +16,11 @@ cov-mark = "2.0.0-pre.1" tracing.workspace = true rayon.workspace = true fst = { version = "0.4.7", default-features = false } -rustc-hash = "1.1.0" +rustc-hash.workspace = true once_cell = "1.17.0" either.workspace = true itertools.workspace = true -arrayvec = "0.7.2" +arrayvec.workspace = true indexmap.workspace = true memchr = "2.6.4" triomphe.workspace = true @@ -34,6 +34,7 @@ profile.workspace = true stdx.workspace = true syntax.workspace = true text-edit.workspace = true +span.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true @@ -47,4 +48,5 @@ xshell.workspace = true # local deps test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 343be870c9e..db6cd128e83 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -5,13 +5,13 @@ use base_db::{ debug::{DebugQueryTable, TableEntry}, Database, Durability, Query, QueryTable, }, - Change, SourceRootId, + SourceRootId, }; use profile::{memory_usage, Bytes}; use rustc_hash::FxHashSet; use triomphe::Arc; -use crate::{symbol_index::SymbolsDatabase, RootDatabase}; +use crate::{symbol_index::SymbolsDatabase, Change, RootDatabase}; impl RootDatabase { pub fn request_cancellation(&mut self) { @@ -23,7 +23,7 @@ impl RootDatabase { let _p = profile::span("RootDatabase::apply_change"); self.request_cancellation(); tracing::trace!("apply_change {:?}", change); - if let Some(roots) = &change.roots { + if let Some(roots) = &change.source_change.roots { let mut local_roots = FxHashSet::default(); let mut library_roots = FxHashSet::default(); for (idx, root) in roots.iter().enumerate() { @@ -87,7 +87,6 @@ impl RootDatabase { // SourceDatabase base_db::ParseQuery base_db::CrateGraphQuery - base_db::ProcMacrosQuery // SourceDatabaseExt base_db::FileTextQuery @@ -104,6 +103,7 @@ impl RootDatabase { hir::db::MacroArgQuery hir::db::ParseMacroExpansionQuery hir::db::RealSpanMapQuery + hir::db::ProcMacrosQuery // DefDatabase hir::db::FileItemTreeQuery diff --git a/crates/ide-db/src/documentation.rs b/crates/ide-db/src/documentation.rs index 26f3cd28a27..cc8e8431708 100644 --- a/crates/ide-db/src/documentation.rs +++ b/crates/ide-db/src/documentation.rs @@ -138,15 +138,13 @@ pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> { for doc in docs { // str::lines doesn't yield anything for the empty string if !doc.is_empty() { - buf.extend(Itertools::intersperse( - doc.lines().map(|line| { - line.char_indices() - .nth(indent) - .map_or(line, |(offset, _)| &line[offset..]) - .trim_end() - }), - "\n", - )); + // We don't trim trailing whitespace from doc comments as multiple trailing spaces + // indicates a hard line break in Markdown. + let lines = doc.lines().map(|line| { + line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..]) + }); + + buf.extend(Itertools::intersperse(lines, "\n")); } buf.push('\n'); } diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs index 01d2f1970c3..a3abce89642 100644 --- a/crates/ide-db/src/imports/insert_use/tests.rs +++ b/crates/ide-db/src/imports/insert_use/tests.rs @@ -1,6 +1,6 @@ -use base_db::fixture::WithFixture; use hir::PrefixKind; use stdx::trim_indent; +use test_fixture::WithFixture; use test_utils::{assert_eq_text, CURSOR_MARKER}; use super::*; diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index fefc05e5355..128971994f6 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -43,6 +43,8 @@ pub mod syntax_helpers { pub use parser::LexedStr; } +pub use hir::Change; + use std::{fmt, mem::ManuallyDrop}; use base_db::{ diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index d2b6a732689..7f28965885a 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -22,9 +22,10 @@ //! Our current behavior is ¯\_(ツ)_/¯. use std::fmt; -use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange}; +use base_db::{AnchoredPathBuf, FileId, FileRange}; use either::Either; use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics}; +use span::SyntaxContextId; use stdx::{never, TupleExt}; use syntax::{ ast::{self, HasName}, @@ -515,7 +516,7 @@ fn source_edit_from_def( if let Definition::Local(local) = def { let mut file_id = None; for source in local.sources(sema.db) { - let source = match source.source.clone().original_ast_node(sema.db) { + let source = match source.source.clone().original_ast_node_rooted(sema.db) { Some(source) => source, None => match source .source @@ -559,7 +560,7 @@ fn source_edit_from_def( } } else { // Foo { ref mut field } -> Foo { field: ref mut new_name } - // ^ insert `field: ` + // original_ast_node_rootedd: ` // ^^^^^ replace this with `new_name` edit.insert( pat.syntax().text_range().start(), diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index be8566b759c..24d08500f13 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -378,9 +378,9 @@ impl Query { #[cfg(test)] mod tests { - use base_db::fixture::WithFixture; use expect_test::expect_file; use hir::symbols::SymbolCollector; + use test_fixture::WithFixture; use super::*; diff --git a/crates/ide-db/src/traits.rs b/crates/ide-db/src/traits.rs index 9abbc344142..bbdfd81d653 100644 --- a/crates/ide-db/src/traits.rs +++ b/crates/ide-db/src/traits.rs @@ -113,10 +113,11 @@ fn assoc_item_of_trait( #[cfg(test)] mod tests { - use base_db::{fixture::ChangeFixture, FilePosition}; + use base_db::FilePosition; use expect_test::{expect, Expect}; use hir::Semantics; use syntax::ast::{self, AstNode}; + use test_fixture::ChangeFixture; use crate::RootDatabase; diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml index f4055024cc3..fd77badbd65 100644 --- a/crates/ide-diagnostics/Cargo.toml +++ b/crates/ide-diagnostics/Cargo.toml @@ -32,6 +32,7 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true [features] diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index 82001439146..c202264bb56 100644 --- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -76,4 +76,24 @@ impl Marker for Foo { "#, ) } + + #[test] + fn dont_work_for_negative_impl() { + check_diagnostics( + r#" +trait Marker { + const FLAG: bool = false; + fn boo(); + fn foo () {} +} +struct Foo; +impl !Marker for Foo { + type T = i32; + const FLAG: bool = true; + fn bar() {} + fn boo() {} +} + "#, + ) + } } diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 6541bf60579..579386c72ef 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -94,7 +94,7 @@ use syntax::{ }; // FIXME: Make this an enum -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum DiagnosticCode { RustcHardError(&'static str), RustcLint(&'static str), @@ -198,7 +198,7 @@ impl Diagnostic { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Severity { Error, Warning, diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 48e0363c9ca..67912a3a03e 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -3,12 +3,11 @@ mod sourcegen; use expect_test::Expect; use ide_db::{ - assists::AssistResolveStrategy, - base_db::{fixture::WithFixture, SourceDatabaseExt}, - LineIndexDatabase, RootDatabase, + assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase, }; use itertools::Itertools; use stdx::trim_indent; +use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_annotations, MiniCore}; use crate::{DiagnosticsConfig, ExprFillDefaultMode, Severity}; diff --git a/crates/ide-ssr/Cargo.toml b/crates/ide-ssr/Cargo.toml index 56b29f92b82..9ea7beaf985 100644 --- a/crates/ide-ssr/Cargo.toml +++ b/crates/ide-ssr/Cargo.toml @@ -31,3 +31,4 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs index 424ba3d7fd5..7c7d146cb4a 100644 --- a/crates/ide-ssr/src/tests.rs +++ b/crates/ide-ssr/src/tests.rs @@ -65,8 +65,8 @@ fn parser_undefined_placeholder_in_replacement() { /// `code` may optionally contain a cursor marker `$0`. If it doesn't, then the position will be /// the start of the file. If there's a second cursor marker, then we'll return a single range. pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) { - use ide_db::base_db::fixture::WithFixture; use ide_db::symbol_index::SymbolsDatabase; + use test_fixture::{WithFixture, WORKSPACE}; let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { ide_db::RootDatabase::with_range_or_offset(code) } else { @@ -86,7 +86,7 @@ pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Ve } } let mut local_roots = FxHashSet::default(); - local_roots.insert(ide_db::base_db::fixture::WORKSPACE); + local_roots.insert(WORKSPACE); db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); (db, position, selections) } diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 0943574ec1b..daff8bdac12 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" crossbeam-channel = "0.5.5" -arrayvec = "0.7.4" +arrayvec.workspace = true either.workspace = true itertools.workspace = true tracing.workspace = true @@ -50,6 +50,7 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true [features] in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"] diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs index d7f82b4af3e..f49c5af0af1 100644 --- a/crates/ide/src/annotations.rs +++ b/crates/ide/src/annotations.rs @@ -3,8 +3,9 @@ use ide_db::{ base_db::{FileId, FilePosition, FileRange}, defs::Definition, helpers::visit_file_defs, - RootDatabase, + FxHashSet, RootDatabase, }; +use itertools::Itertools; use syntax::{ast::HasName, AstNode, TextRange}; use crate::{ @@ -23,13 +24,13 @@ mod fn_references; // and running/debugging binaries. // // image::https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png[] -#[derive(Debug)] +#[derive(Debug, Hash, PartialEq, Eq)] pub struct Annotation { pub range: TextRange, pub kind: AnnotationKind, } -#[derive(Debug)] +#[derive(Debug, Hash, PartialEq, Eq)] pub enum AnnotationKind { Runnable(Runnable), HasImpls { pos: FilePosition, data: Option<Vec<NavigationTarget>> }, @@ -56,7 +57,7 @@ pub(crate) fn annotations( config: &AnnotationConfig, file_id: FileId, ) -> Vec<Annotation> { - let mut annotations = Vec::default(); + let mut annotations = FxHashSet::default(); if config.annotate_runnables { for runnable in runnables(db, file_id) { @@ -66,7 +67,7 @@ pub(crate) fn annotations( let range = runnable.nav.focus_or_full_range(); - annotations.push(Annotation { range, kind: AnnotationKind::Runnable(runnable) }); + annotations.insert(Annotation { range, kind: AnnotationKind::Runnable(runnable) }); } } @@ -99,13 +100,13 @@ pub(crate) fn annotations( }) .for_each(|range| { let (annotation_range, target_position) = mk_ranges(range); - annotations.push(Annotation { + annotations.insert(Annotation { range: annotation_range, kind: AnnotationKind::HasReferences { pos: target_position, data: None, }, - }) + }); }) } if config.annotate_references || config.annotate_impls { @@ -131,14 +132,14 @@ pub(crate) fn annotations( }; let (annotation_range, target_pos) = mk_ranges(range); if config.annotate_impls && !matches!(def, Definition::Const(_)) { - annotations.push(Annotation { + annotations.insert(Annotation { range: annotation_range, kind: AnnotationKind::HasImpls { pos: target_pos, data: None }, }); } if config.annotate_references { - annotations.push(Annotation { + annotations.insert(Annotation { range: annotation_range, kind: AnnotationKind::HasReferences { pos: target_pos, data: None }, }); @@ -149,7 +150,7 @@ pub(crate) fn annotations( node: InFile<T>, source_file_id: FileId, ) -> Option<(TextRange, Option<TextRange>)> { - if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) { + if let Some(InRealFile { file_id, value }) = node.original_ast_node_rooted(db) { if file_id == source_file_id { return Some(( value.syntax().text_range(), @@ -171,7 +172,7 @@ pub(crate) fn annotations( })); } - annotations + annotations.into_iter().sorted_by_key(|a| (a.range.start(), a.range.end())).collect() } pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation { @@ -253,25 +254,6 @@ fn main() { expect![[r#" [ Annotation { - range: 53..57, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 50..85, - focus_range: 53..57, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, - Annotation { range: 6..10, kind: HasReferences { pos: FilePosition { @@ -308,6 +290,25 @@ fn main() { }, Annotation { range: 53..57, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 50..85, + focus_range: 53..57, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, + Annotation { + range: 53..57, kind: HasReferences { pos: FilePosition { file_id: FileId( @@ -338,27 +339,8 @@ fn main() { expect![[r#" [ Annotation { - range: 17..21, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 14..48, - focus_range: 17..21, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, - Annotation { range: 7..11, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -366,13 +348,20 @@ fn main() { offset: 7, }, data: Some( - [], + [ + FileRange { + file_id: FileId( + 0, + ), + range: 41..45, + }, + ], ), }, }, Annotation { range: 7..11, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, @@ -380,19 +369,31 @@ fn main() { offset: 7, }, data: Some( - [ - FileRange { - file_id: FileId( - 0, - ), - range: 41..45, - }, - ], + [], ), }, }, Annotation { range: 17..21, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 14..48, + focus_range: 17..21, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, + Annotation { + range: 17..21, kind: HasReferences { pos: FilePosition { file_id: FileId( @@ -427,27 +428,8 @@ fn main() { expect![[r#" [ Annotation { - range: 69..73, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 66..100, - focus_range: 69..73, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, - Annotation { range: 7..11, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -456,14 +438,17 @@ fn main() { }, data: Some( [ - NavigationTarget { + FileRange { file_id: FileId( 0, ), - full_range: 36..64, - focus_range: 57..61, - name: "impl", - kind: Impl, + range: 57..61, + }, + FileRange { + file_id: FileId( + 0, + ), + range: 93..97, }, ], ), @@ -471,7 +456,7 @@ fn main() { }, Annotation { range: 7..11, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, @@ -480,17 +465,14 @@ fn main() { }, data: Some( [ - FileRange { - file_id: FileId( - 0, - ), - range: 57..61, - }, - FileRange { + NavigationTarget { file_id: FileId( 0, ), - range: 93..97, + full_range: 36..64, + focus_range: 57..61, + name: "impl", + kind: Impl, }, ], ), @@ -555,6 +537,25 @@ fn main() { ), }, }, + Annotation { + range: 69..73, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 66..100, + focus_range: 69..73, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, ] "#]], ); @@ -623,27 +624,8 @@ fn main() { expect![[r#" [ Annotation { - range: 61..65, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 58..95, - focus_range: 61..65, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, - Annotation { range: 7..11, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -652,14 +634,17 @@ fn main() { }, data: Some( [ - NavigationTarget { + FileRange { file_id: FileId( 0, ), - full_range: 14..56, - focus_range: 19..23, - name: "impl", - kind: Impl, + range: 19..23, + }, + FileRange { + file_id: FileId( + 0, + ), + range: 74..78, }, ], ), @@ -667,7 +652,7 @@ fn main() { }, Annotation { range: 7..11, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, @@ -676,17 +661,14 @@ fn main() { }, data: Some( [ - FileRange { - file_id: FileId( - 0, - ), - range: 19..23, - }, - FileRange { + NavigationTarget { file_id: FileId( 0, ), - range: 74..78, + full_range: 14..56, + focus_range: 19..23, + name: "impl", + kind: Impl, }, ], ), @@ -727,6 +709,25 @@ fn main() { ), }, }, + Annotation { + range: 61..65, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 58..95, + focus_range: 61..65, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, ] "#]], ); @@ -747,6 +748,20 @@ mod tests { [ Annotation { range: 3..7, + kind: HasReferences { + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 3, + }, + data: Some( + [], + ), + }, + }, + Annotation { + range: 3..7, kind: Runnable( Runnable { use_name_in_title: false, @@ -812,20 +827,6 @@ mod tests { }, ), }, - Annotation { - range: 3..7, - kind: HasReferences { - pos: FilePosition { - file_id: FileId( - 0, - ), - offset: 3, - }, - data: Some( - [], - ), - }, - }, ] "#]], ); @@ -877,7 +878,7 @@ struct Foo; [ Annotation { range: 0..71, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -891,7 +892,7 @@ struct Foo; }, Annotation { range: 0..71, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index 9760f9daf0a..a36082bafcf 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -492,7 +492,7 @@ fn get_doc_base_urls( let Some(krate) = def.krate(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() }; let crate_data = &db.crate_graph()[krate.into()]; - let channel = crate_data.channel.map_or("nightly", ReleaseChannel::as_str); + let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str(); let (web_base, local_base) = match &crate_data.origin { // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs index 2e5903c0602..3b19b85c4bc 100644 --- a/crates/ide/src/fixture.rs +++ b/crates/ide/src/fixture.rs @@ -1,5 +1,5 @@ //! Utilities for creating `Analysis` instances for tests. -use ide_db::base_db::fixture::ChangeFixture; +use test_fixture::ChangeFixture; use test_utils::{extract_annotations, RangeOrOffset}; use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 7491879a67f..e0beba8fb38 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -4,7 +4,7 @@ use crate::{ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, RangeInfo, TryToNav, }; -use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics}; +use hir::{AsAssocItem, AssocItem, DescendPreference, ModuleDef, Semantics}; use ide_db::{ base_db::{AnchoredPath, FileId, FileLoader}, defs::{Definition, IdentClass}, @@ -73,10 +73,15 @@ pub(crate) fn goto_definition( .into_iter() .filter_map(|token| { let parent = token.parent()?; + if let Some(tt) = ast::TokenTree::cast(parent.clone()) { if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) { return Some(vec![x]); } + + if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.clone()) { + return Some(vec![x]); + } } Some( IdentClass::classify_node(sema, &parent)? @@ -140,6 +145,27 @@ fn try_lookup_include_path( }) } +fn try_lookup_macro_def_in_macro_use( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, +) -> Option<NavigationTarget> { + let extern_crate = token.parent()?.ancestors().find_map(ast::ExternCrate::cast)?; + let extern_crate = sema.to_def(&extern_crate)?; + let krate = extern_crate.resolved_crate(sema.db)?; + + for mod_def in krate.root_module().declarations(sema.db) { + if let ModuleDef::Macro(mac) = mod_def { + if mac.name(sema.db).as_str() == Some(token.text()) { + if let Some(nav) = mac.try_to_nav(sema.db) { + return Some(nav.call_site); + } + } + } + } + + None +} + /// finds the trait definition of an impl'd item, except function /// e.g. /// ```rust @@ -2081,4 +2107,47 @@ fn test() { "#, ); } + + #[test] + fn goto_macro_def_from_macro_use() { + check( + r#" +//- /main.rs crate:main deps:mac +#[macro_use(foo$0)] +extern crate mac; + +//- /mac.rs crate:mac +#[macro_export] +macro_rules! foo { + //^^^ + () => {}; +} + "#, + ); + + check( + r#" +//- /main.rs crate:main deps:mac +#[macro_use(foo, bar$0, baz)] +extern crate mac; + +//- /mac.rs crate:mac +#[macro_export] +macro_rules! foo { + () => {}; +} + +#[macro_export] +macro_rules! bar { + //^^^ + () => {}; +} + +#[macro_export] +macro_rules! baz { + () => {}; +} + "#, + ); + } } diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 6384db39d7c..c1a4a7b1fc7 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -4,7 +4,6 @@ use ide_db::{ helpers::pick_best_token, RootDatabase, }; -use itertools::Itertools; use syntax::{ast, AstNode, SyntaxKind::*, T}; use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; @@ -34,10 +33,10 @@ pub(crate) fn goto_implementation( })?; let range = original_token.text_range(); let navs = - sema.descend_into_macros(DescendPreference::None, original_token) - .into_iter() - .filter_map(|token| token.parent().and_then(ast::NameLike::cast)) - .filter_map(|node| match &node { + sema.descend_into_macros_single(DescendPreference::SameText, original_token) + .parent() + .and_then(ast::NameLike::cast) + .and_then(|node| match &node { ast::NameLike::Name(name) => { NameClass::classify(&sema, name).and_then(|class| match class { NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), @@ -52,8 +51,7 @@ pub(crate) fn goto_implementation( }), ast::NameLike::Lifetime(_) => None, }) - .unique() - .filter_map(|def| { + .and_then(|def| { let navs = match def { Definition::Trait(trait_) => impls_for_trait(&sema, trait_), Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)), @@ -75,8 +73,7 @@ pub(crate) fn goto_implementation( }; Some(navs) }) - .flatten() - .collect(); + .unwrap_or_default(); Some(RangeInfo { range, info: navs }) } diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index e3548f3f0cb..6ff16b9e2f7 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -67,6 +67,7 @@ use std::ffi::OsStr; use cfg::CfgOptions; use fetch_crates::CrateInfo; +use hir::Change; use ide_db::{ base_db::{ salsa::{self, ParallelDatabase}, @@ -122,7 +123,7 @@ pub use ide_completion::{ }; pub use ide_db::{ base_db::{ - Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, + Cancelled, CrateGraph, CrateId, Edition, FileChange, FileId, FilePosition, FileRange, SourceRoot, SourceRootId, }, documentation::Documentation, @@ -133,7 +134,9 @@ pub use ide_db::{ symbol_index::Query, RootDatabase, SymbolKind, }; -pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity}; +pub use ide_diagnostics::{ + Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity, +}; pub use ide_ssr::SsrError; pub use syntax::{TextRange, TextSize}; pub use text_edit::{Indel, TextEdit}; @@ -181,7 +184,7 @@ impl AnalysisHost { /// Applies changes to the current state of the world. If there are /// outstanding snapshots, they will be canceled. pub fn apply_change(&mut self, change: Change) { - self.db.apply_change(change) + self.db.apply_change(change); } /// NB: this clears the database diff --git a/crates/ide/src/markdown_remove.rs b/crates/ide/src/markdown_remove.rs index 718868c8747..fa01875e204 100644 --- a/crates/ide/src/markdown_remove.rs +++ b/crates/ide/src/markdown_remove.rs @@ -6,6 +6,7 @@ use pulldown_cmark::{Event, Parser, Tag}; /// Currently limited in styling, i.e. no ascii tables or lists pub(crate) fn remove_markdown(markdown: &str) -> String { let mut out = String::new(); + out.reserve_exact(markdown.len()); let parser = Parser::new(markdown); for event in parser { @@ -13,10 +14,7 @@ pub(crate) fn remove_markdown(markdown: &str) -> String { Event::Text(text) | Event::Code(text) => out.push_str(&text), Event::SoftBreak => out.push(' '), Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => out.push('\n'), - Event::End(Tag::Paragraph) => { - out.push('\n'); - out.push('\n'); - } + Event::End(Tag::Paragraph) => out.push_str("\n\n"), Event::Start(_) | Event::End(_) | Event::Html(_) @@ -25,7 +23,10 @@ pub(crate) fn remove_markdown(markdown: &str) -> String { } } - if let Some(p) = out.rfind(|c| c != '\n') { + if let Some(mut p) = out.rfind(|c| c != '\n') { + while !out.is_char_boundary(p + 1) { + p += 1; + } out.drain(p + 1..); } @@ -153,4 +154,10 @@ book] or the [Reference]. For more information on the various types of functions and how they're used, consult the Rust book or the Reference."#]].assert_eq(&res); } + + #[test] + fn on_char_boundary() { + expect!["a┘"].assert_eq(&remove_markdown("```text\na┘\n```")); + expect!["وقار"].assert_eq(&remove_markdown("```\nوقار\n```\n")); + } } diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index f85700daf1f..bf6ad47a495 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -1,5 +1,6 @@ +use hir::{db::ExpandDatabase, ProcMacros}; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacros, SourceDatabase}, + base_db::{salsa::Durability, CrateGraph, SourceDatabase}, FxHashMap, RootDatabase, }; use triomphe::Arc; @@ -39,7 +40,7 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.is_proc_macro, data.origin.clone(), data.target_layout.clone(), - data.channel, + data.toolchain.clone(), ); new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); map.insert(old_id, new_id); diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index 990376a4965..483fb76d91c 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -646,8 +646,9 @@ mod tests { use std::iter; use expect_test::{expect, Expect}; - use ide_db::base_db::{fixture::ChangeFixture, FilePosition}; + use ide_db::base_db::FilePosition; use stdx::format_to; + use test_fixture::ChangeFixture; use crate::RootDatabase; diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs index d8d81869a2f..f0d18fdefa7 100644 --- a/crates/ide/src/ssr.rs +++ b/crates/ide/src/ssr.rs @@ -59,10 +59,11 @@ mod tests { use expect_test::expect; use ide_assists::{Assist, AssistResolveStrategy}; use ide_db::{ - base_db::{fixture::WithFixture, salsa::Durability, FileRange}, + base_db::{salsa::Durability, FileRange}, symbol_index::SymbolsDatabase, FxHashSet, RootDatabase, }; + use test_fixture::WithFixture; use triomphe::Arc; use super::ssr_assists; @@ -70,7 +71,7 @@ mod tests { fn get_assists(ra_fixture: &str, resolve: AssistResolveStrategy) -> Vec<Assist> { let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture); let mut local_roots = FxHashSet::default(); - local_roots.insert(ide_db::base_db::fixture::WORKSPACE); + local_roots.insert(test_fixture::WORKSPACE); db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); ssr_assists(&db, &resolve, FileRange { file_id, range: range_or_offset.into() }) } diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index e7f97ebe6f7..b2b305c1d38 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -10,7 +10,7 @@ use ide_db::{ debug::{DebugQueryTable, TableEntry}, Query, QueryTable, }, - CrateId, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId, + CrateData, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId, }, symbol_index::ModuleSymbolsQuery, }; @@ -54,25 +54,54 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String { format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db))); if let Some(file_id) = file_id { - format_to!(buf, "\nFile info:\n"); + format_to!(buf, "\nCrates for file {}:\n", file_id.index()); let crates = crate::parent_module::crates_for(db, file_id); if crates.is_empty() { format_to!(buf, "Does not belong to any crate"); } let crate_graph = db.crate_graph(); - for krate in crates { - let display_crate = |krate: CrateId| match &crate_graph[krate].display_name { - Some(it) => format!("{it}({})", krate.into_raw()), - None => format!("{}", krate.into_raw()), - }; - format_to!(buf, "Crate: {}\n", display_crate(krate)); - format_to!(buf, "Enabled cfgs: {:?}\n", crate_graph[krate].cfg_options); - let deps = crate_graph[krate] - .dependencies + for crate_id in crates { + let CrateData { + root_file_id, + edition, + version, + display_name, + cfg_options, + potential_cfg_options, + env, + dependencies, + origin, + is_proc_macro, + target_layout, + toolchain, + } = &crate_graph[crate_id]; + format_to!( + buf, + "Crate: {}\n", + match display_name { + Some(it) => format!("{it}({})", crate_id.into_raw()), + None => format!("{}", crate_id.into_raw()), + } + ); + format_to!(buf, " Root module file id: {}\n", root_file_id.index()); + format_to!(buf, " Edition: {}\n", edition); + format_to!(buf, " Version: {}\n", version.as_deref().unwrap_or("n/a")); + format_to!(buf, " Enabled cfgs: {:?}\n", cfg_options); + format_to!(buf, " Potential cfgs: {:?}\n", potential_cfg_options); + format_to!(buf, " Env: {:?}\n", env); + format_to!(buf, " Origin: {:?}\n", origin); + format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro); + format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout); + format_to!( + buf, + " Workspace Toolchain: {}\n", + toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string()) + ); + let deps = dependencies .iter() - .map(|dep| format!("{}={:?}", dep.name, dep.crate_id)) + .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw())) .format(", "); - format_to!(buf, "Dependencies: {}\n", deps); + format_to!(buf, " Dependencies: {}\n", deps); } } diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml index d9184b0fb6f..3b0c2559482 100644 --- a/crates/intern/Cargo.toml +++ b/crates/intern/Cargo.toml @@ -16,5 +16,5 @@ doctest = false # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap.workspace = true hashbrown.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true triomphe.workspace = true diff --git a/crates/load-cargo/Cargo.toml b/crates/load-cargo/Cargo.toml index 31b9f6c76d0..ae7c7e2ac75 100644 --- a/crates/load-cargo/Cargo.toml +++ b/crates/load-cargo/Cargo.toml @@ -12,7 +12,7 @@ authors.workspace = true [dependencies] anyhow.workspace = true -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true itertools.workspace = true tracing.workspace = true @@ -23,3 +23,6 @@ project-model.workspace = true tt.workspace = true vfs.workspace = true vfs-notify.workspace = true +span.workspace = true + +hir-expand.workspace = true diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index db9654220dd..556ed73a04c 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -5,17 +5,19 @@ use std::{collections::hash_map::Entry, mem, path::Path, sync}; use crossbeam_channel::{unbounded, Receiver}; -use ide::{AnalysisHost, Change, SourceRoot}; +use hir_expand::proc_macro::{ + ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacroLoadResult, + ProcMacros, +}; +use ide::{AnalysisHost, SourceRoot}; use ide_db::{ - base_db::{ - span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacroKind, ProcMacroLoadResult, ProcMacros, - }, - FxHashMap, + base_db::{CrateGraph, Env}, + Change, FxHashMap, }; use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; +use span::Span; use tt::DelimSpan; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; @@ -374,13 +376,13 @@ struct Expander(proc_macro_api::ProcMacro); impl ProcMacroExpander for Expander { fn expand( &self, - subtree: &tt::Subtree<SpanData>, - attrs: Option<&tt::Subtree<SpanData>>, + subtree: &tt::Subtree<Span>, + attrs: Option<&tt::Subtree<Span>>, env: &Env, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, - ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> { + def_site: Span, + call_site: Span, + mixed_site: Span, + ) -> Result<tt::Subtree<Span>, ProcMacroExpansionError> { let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) { Ok(Ok(subtree)) => Ok(subtree), @@ -397,13 +399,13 @@ struct IdentityExpander; impl ProcMacroExpander for IdentityExpander { fn expand( &self, - subtree: &tt::Subtree<SpanData>, - _: Option<&tt::Subtree<SpanData>>, + subtree: &tt::Subtree<Span>, + _: Option<&tt::Subtree<Span>>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result<tt::Subtree<Span>, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -415,13 +417,13 @@ struct EmptyExpander; impl ProcMacroExpander for EmptyExpander { fn expand( &self, - _: &tt::Subtree<SpanData>, - _: Option<&tt::Subtree<SpanData>>, + _: &tt::Subtree<Span>, + _: Option<&tt::Subtree<Span>>, _: &Env, - call_site: SpanData, - _: SpanData, - _: SpanData, - ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> { + call_site: Span, + _: Span, + _: Span, + ) -> Result<tt::Subtree<Span>, ProcMacroExpansionError> { Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site })) } } diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index adab1003d10..77f48a57f90 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -13,7 +13,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -rustc-hash = "1.1.0" +rustc-hash.workspace = true smallvec.workspace = true tracing.workspace = true @@ -22,6 +22,7 @@ syntax.workspace = true parser.workspace = true tt.workspace = true stdx.workspace = true +span.workspace = true [dev-dependencies] test-utils.workspace = true diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index f503aecce2c..6c3917b37f1 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -20,7 +20,10 @@ fn benchmark_parse_macro_rules() { let rules = macro_rules_fixtures_tt(); let hash: usize = { let _pt = bench("mbe parse macro rules"); - rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it, true).rules.len()).sum() + rules + .values() + .map(|it| DeclarativeMacro::parse_macro_rules(it, true, true).rules.len()) + .sum() }; assert_eq!(hash, 1144); } @@ -38,7 +41,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(&tt, |_| ()); + let res = rules[&id].expand(&tt, |_| (), true, DUMMY); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -50,7 +53,7 @@ fn benchmark_expand_macro_rules() { fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> { macro_rules_fixtures_tt() .into_iter() - .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) + .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true, true))) .collect() } @@ -64,8 +67,11 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData> .filter_map(ast::MacroRules::cast) .map(|rule| { let id = rule.name().unwrap().to_string(); - let def_tt = - syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap); + let def_tt = syntax_node_to_token_tree( + rule.token_tree().unwrap().syntax(), + DummyTestSpanMap, + DUMMY, + ); (id, def_tt) }) .collect() @@ -105,7 +111,7 @@ fn invocation_fixtures( for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } - if it.expand(&subtree, |_| ()).err.is_none() { + if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() { res.push((name.clone(), subtree)); break; } @@ -199,7 +205,7 @@ fn invocation_fixtures( }); parent.token_trees.push(subtree.into()); } - Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } => {} + Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => {} }; // Simple linear congruential generator for deterministic result diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 0e755f69bf7..60483809dc1 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -16,6 +16,8 @@ pub(crate) fn expand_rules<S: Span>( input: &tt::Subtree<S>, marker: impl Fn(&mut S) + Copy, is_2021: bool, + new_meta_vars: bool, + call_site: S, ) -> ExpandResult<tt::Subtree<S>> { let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None; for rule in rules { @@ -25,8 +27,13 @@ pub(crate) fn expand_rules<S: Span>( // If we find a rule that applies without errors, we're done. // Unconditionally returning the transcription here makes the // `test_repeat_bad_var` test fail. - let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &new_match.bindings, marker); + let ExpandResult { value, err: transcribe_err } = transcriber::transcribe( + &rule.rhs, + &new_match.bindings, + marker, + new_meta_vars, + call_site, + ); if transcribe_err.is_none() { return ExpandResult::ok(value); } @@ -45,11 +52,14 @@ pub(crate) fn expand_rules<S: Span>( if let Some((match_, rule)) = match_ { // if we got here, there was no match without errors let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &match_.bindings, marker); + transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site); ExpandResult { value, err: match_.err.or(transcribe_err) } } else { ExpandResult::new( - tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }, + tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(call_site), + token_trees: vec![], + }, ExpandError::NoMatchingRule, ) } @@ -121,6 +131,7 @@ enum Binding<S> { #[derive(Debug, Clone, PartialEq, Eq)] enum Fragment<S> { + Empty, /// token fragments are just copy-pasted into the output Tokens(tt::TokenTree<S>), /// Expr ast fragments are surrounded with `()` on insertion to preserve diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 012b02a3f87..40b4c7cdd65 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -63,7 +63,7 @@ use std::rc::Rc; use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; -use tt::Span; +use tt::{DelimSpan, Span}; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, @@ -74,11 +74,7 @@ use crate::{ impl<S: Span> Bindings<S> { fn push_optional(&mut self, name: &SmolStr) { - // FIXME: Do we have a better way to represent an empty token ? - // Insert an empty subtree for empty token - let tt = - tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into(); - self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); + self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty)); } fn push_empty(&mut self, name: &SmolStr) { @@ -387,6 +383,7 @@ fn match_loop_inner<'t, S: Span>( eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>, error_items: &mut SmallVec<[MatchState<'t, S>; 1]>, is_2021: bool, + delim_span: tt::DelimSpan<S>, ) { macro_rules! try_push { ($items: expr, $it:expr) => { @@ -474,7 +471,7 @@ fn match_loop_inner<'t, S: Span>( cur_items.push(new_item); } cur_items.push(MatchState { - dot: tokens.iter_delimited(None), + dot: tokens.iter_delimited(delim_span), stack: Default::default(), up: Some(Box::new(item)), sep: separator.clone(), @@ -489,7 +486,7 @@ fn match_loop_inner<'t, S: Span>( if let Ok(subtree) = src.clone().expect_subtree() { if subtree.delimiter.kind == delimiter.kind { item.stack.push(item.dot); - item.dot = tokens.iter_delimited(Some(*delimiter)); + item.dot = tokens.iter_delimited_with(*delimiter); cur_items.push(item); } } @@ -497,7 +494,7 @@ fn match_loop_inner<'t, S: Span>( OpDelimited::Op(Op::Var { kind, name, .. }) => { if let &Some(kind) = kind { let mut fork = src.clone(); - let match_res = match_meta_var(kind, &mut fork, is_2021); + let match_res = match_meta_var(kind, &mut fork, is_2021, delim_span); match match_res.err { None => { // Some meta variables are optional (e.g. vis) @@ -588,7 +585,9 @@ fn match_loop_inner<'t, S: Span>( item.is_error = true; error_items.push(item); } - OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. }) => { + OpDelimited::Op( + Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. }, + ) => { stdx::never!("metavariable expression in lhs found"); } OpDelimited::Open => { @@ -609,6 +608,7 @@ fn match_loop_inner<'t, S: Span>( } fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> { + let span = src.delimiter.delim_span(); let mut src = TtIter::new(src); let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new(); let mut res = Match::default(); @@ -617,7 +617,7 @@ fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: let mut bindings_builder = BindingsBuilder::default(); let mut cur_items = smallvec![MatchState { - dot: pattern.iter_delimited(None), + dot: pattern.iter_delimited(span), stack: Default::default(), up: None, sep: None, @@ -648,6 +648,7 @@ fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: &mut eof_items, &mut error_items, is_2021, + span, ); stdx::always!(cur_items.is_empty()); @@ -761,12 +762,13 @@ fn match_meta_var<S: Span>( kind: MetaVarKind, input: &mut TtIter<'_, S>, is_2021: bool, + delim_span: DelimSpan<S>, ) -> ExpandResult<Option<Fragment<S>>> { let fragment = match kind { MetaVarKind::Path => { - return input - .expect_fragment(parser::PrefixEntryPoint::Path) - .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path)); + return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| { + it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) + }); } MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop, @@ -795,7 +797,7 @@ fn match_meta_var<S: Span>( return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { tt.map(|tt| match tt { tt::TokenTree::Leaf(leaf) => tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), token_trees: vec![leaf.into()], }, tt::TokenTree::Subtree(mut s) => { @@ -829,7 +831,7 @@ fn match_meta_var<S: Span>( match neg { None => lit.into(), Some(neg) => tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(*literal.span()), token_trees: vec![neg, lit.into()], }), } @@ -851,18 +853,21 @@ fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &Meta Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens), Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens), Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => {} - Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } => { + Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => { stdx::never!("metavariable expression in lhs found"); } } } } impl<S: Span> MetaTemplate<S> { - fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> { + fn iter_delimited_with(&self, delimiter: tt::Delimiter<S>) -> OpDelimitedIter<'_, S> { + OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter } + } + fn iter_delimited(&self, span: tt::DelimSpan<S>) -> OpDelimitedIter<'_, S> { OpDelimitedIter { inner: &self.0, idx: 0, - delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE), + delimited: tt::Delimiter::invisible_delim_spanned(span), } } } @@ -958,11 +963,13 @@ impl<S: Span> TtIter<'_, S> { self.expect_lifetime() } else { let puncts = self.expect_glued_punct()?; + let delimiter = tt::Delimiter { + open: puncts.first().unwrap().span, + close: puncts.last().unwrap().span, + kind: tt::DelimiterKind::Invisible, + }; let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); - Ok(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), - token_trees, - })) + Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees })) } } else { self.next().ok_or(()).cloned() @@ -977,7 +984,11 @@ impl<S: Span> TtIter<'_, S> { let ident = self.expect_ident_or_underscore()?; Ok(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter { + open: punct.span, + close: ident.span, + kind: tt::DelimiterKind::Invisible, + }, token_trees: vec![ tt::Leaf::Punct(*punct).into(), tt::Leaf::Ident(ident.clone()).into(), diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 7a3e8653c28..6e79cdaa0b9 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -59,12 +59,12 @@ impl<S: Span> Bindings<S> { token_trees: token_trees.clone(), }; Ok(match f { - Fragment::Tokens(_) => unreachable!(), + Fragment::Tokens(_) | Fragment::Empty => unreachable!(), Fragment::Expr(_) => Fragment::Expr, Fragment::Path(_) => Fragment::Path, }(subtree)) } - Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()), + Binding::Fragment(it @ (Fragment::Tokens(_) | Fragment::Empty)) => Ok(it.clone()), // emit some reasonable default expansion for missing bindings, // this gives better recovery than emitting the `$fragment-name` verbatim Binding::Missing(it) => Ok({ @@ -87,10 +87,7 @@ impl<S: Span> Bindings<S> { })), // FIXME: Meta and Item should get proper defaults MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { - Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, - token_trees: vec![], - })) + Fragment::Empty } MetaVarKind::Path | MetaVarKind::Ty @@ -131,8 +128,10 @@ pub(super) fn transcribe<S: Span>( template: &MetaTemplate<S>, bindings: &Bindings<S>, marker: impl Fn(&mut S) + Copy, + new_meta_vars: bool, + call_site: S, ) -> ExpandResult<tt::Subtree<S>> { - let mut ctx = ExpandCtx { bindings, nesting: Vec::new() }; + let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site }; let mut arena: Vec<tt::TokenTree<S>> = Vec::new(); expand_subtree(&mut ctx, template, None, &mut arena, marker) } @@ -152,6 +151,8 @@ struct NestingState { struct ExpandCtx<'a, S> { bindings: &'a Bindings<S>, nesting: Vec<NestingState>, + new_meta_vars: bool, + call_site: S, } fn expand_subtree<S: Span>( @@ -206,13 +207,13 @@ fn expand_subtree<S: Span>( Op::Var { name, id, .. } => { let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker); err = err.or(e); - push_fragment(arena, fragment); + push_fragment(ctx, arena, fragment); } Op::Repeat { tokens: subtree, kind, separator } => { let ExpandResult { value: fragment, err: e } = expand_repeat(ctx, subtree, *kind, separator, arena, marker); err = err.or(e); - push_fragment(arena, fragment) + push_fragment(ctx, arena, fragment) } Op::Ignore { name, id } => { // Expand the variable, but ignore the result. This registers the repetition count. @@ -225,8 +226,20 @@ fn expand_subtree<S: Span>( arena.push( tt::Leaf::Literal(tt::Literal { text: index.to_string().into(), - // FIXME - span: S::DUMMY, + span: ctx.call_site, + }) + .into(), + ); + } + Op::Length { depth } => { + let length = ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |_nest| { + // FIXME: to be implemented + 0 + }); + arena.push( + tt::Leaf::Literal(tt::Literal { + text: length.to_string().into(), + span: ctx.call_site, }) .into(), ); @@ -268,7 +281,13 @@ fn expand_subtree<S: Span>( } } - let c = match count(ctx, binding, 0, *depth) { + let res = if ctx.new_meta_vars { + count(ctx, binding, 0, depth.unwrap_or(0)) + } else { + count_old(ctx, binding, 0, *depth) + }; + + let c = match res { Ok(c) => c, Err(e) => { // XXX: It *might* make sense to emit a dummy integer value like `0` here. @@ -285,8 +304,7 @@ fn expand_subtree<S: Span>( arena.push( tt::Leaf::Literal(tt::Literal { text: c.to_string().into(), - // FIXME - span: S::DUMMY, + span: ctx.call_site, }) .into(), ); @@ -297,7 +315,7 @@ fn expand_subtree<S: Span>( let tts = arena.drain(start_elements..).collect(); ExpandResult { value: tt::Subtree { - delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible), + delimiter: delimiter.unwrap_or_else(|| tt::Delimiter::invisible_spanned(ctx.call_site)), token_trees: tts, }, err, @@ -330,7 +348,7 @@ fn expand_var<S: Span>( // ``` // We just treat it a normal tokens let tt = tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(id), token_trees: vec![ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) .into(), @@ -342,10 +360,8 @@ fn expand_var<S: Span>( } Err(e) => ExpandResult { value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan { - // FIXME - open: S::DUMMY, - // FIXME - close: S::DUMMY, + open: ctx.call_site, + close: ctx.call_site, }))), err: Some(e), }, @@ -389,7 +405,7 @@ fn expand_repeat<S: Span>( return ExpandResult { value: Fragment::Tokens( tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(ctx.call_site), token_trees: vec![], } .into(), @@ -403,7 +419,7 @@ fn expand_repeat<S: Span>( continue; } - t.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + t.delimiter.kind = tt::DelimiterKind::Invisible; push_subtree(&mut buf, t); if let Some(sep) = separator { @@ -437,7 +453,11 @@ fn expand_repeat<S: Span>( // Check if it is a single token subtree without any delimiter // e.g {Delimiter:None> ['>'] /Delimiter:None>} - let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into(); + let tt = tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(ctx.call_site), + token_trees: buf, + } + .into(); if RepeatKind::OneOrMore == kind && counter == 0 { return ExpandResult { @@ -448,14 +468,19 @@ fn expand_repeat<S: Span>( ExpandResult { value: Fragment::Tokens(tt), err } } -fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) { +fn push_fragment<S: Span>( + ctx: &ExpandCtx<'_, S>, + buf: &mut Vec<tt::TokenTree<S>>, + fragment: Fragment<S>, +) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), Fragment::Expr(sub) => { push_subtree(buf, sub); } - Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt), + Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, buf, tt), Fragment::Tokens(tt) => buf.push(tt), + Fragment::Empty => (), } } @@ -469,7 +494,11 @@ fn push_subtree<S>(buf: &mut Vec<tt::TokenTree<S>>, tt: tt::Subtree<S>) { /// Inserts the path separator `::` between an identifier and its following generic /// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why /// we need this fixup. -fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt::Subtree<S>) { +fn fix_up_and_push_path_tt<S: Span>( + ctx: &ExpandCtx<'_, S>, + buf: &mut Vec<tt::TokenTree<S>>, + subtree: tt::Subtree<S>, +) { stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible)); let mut prev_was_ident = false; // Note that we only need to fix up the top-level `TokenTree`s because the @@ -486,8 +515,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Joint, - // FIXME - span: S::DUMMY, + span: ctx.call_site, }) .into(), ); @@ -495,8 +523,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Alone, - // FIXME - span: S::DUMMY, + span: ctx.call_site, }) .into(), ); @@ -512,14 +539,33 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt fn count<S>( ctx: &ExpandCtx<'_, S>, binding: &Binding<S>, + depth_curr: usize, + depth_max: usize, +) -> Result<usize, CountError> { + match binding { + Binding::Nested(bs) => { + if depth_curr == depth_max { + Ok(bs.len()) + } else { + bs.iter().map(|b| count(ctx, b, depth_curr + 1, depth_max)).sum() + } + } + Binding::Empty => Ok(0), + Binding::Fragment(_) | Binding::Missing(_) => Ok(1), + } +} + +fn count_old<S>( + ctx: &ExpandCtx<'_, S>, + binding: &Binding<S>, our_depth: usize, count_depth: Option<usize>, ) -> Result<usize, CountError> { match binding { Binding::Nested(bs) => match count_depth { - None => bs.iter().map(|b| count(ctx, b, our_depth + 1, None)).sum(), + None => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, None)).sum(), Some(0) => Ok(bs.len()), - Some(d) => bs.iter().map(|b| count(ctx, b, our_depth + 1, Some(d - 1))).sum(), + Some(d) => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, Some(d - 1))).sum(), }, Binding::Empty => Ok(0), Binding::Fragment(_) | Binding::Missing(_) => { diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 9331798589f..2622d7eac10 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -16,7 +16,6 @@ mod to_parser_input; #[cfg(test)] mod benchmark; -mod token_map; use stdx::impl_from; use tt::Span; @@ -30,15 +29,12 @@ use crate::{ // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces pub use ::parser::TopEntryPoint; -pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext}; - -pub use crate::{ - syntax_bridge::{ - parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, - syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, - SpanMapper, - }, - token_map::SpanMap, +pub use tt::{Delimiter, DelimiterKind, Punct}; + +pub use crate::syntax_bridge::{ + parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, + syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, + SpanMapper, }; pub use crate::syntax_bridge::dummy_test_span_utils::*; @@ -151,7 +147,12 @@ impl<S: Span> DeclarativeMacro<S> { } /// The old, `macro_rules! m {}` flavor. - pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> { + pub fn parse_macro_rules( + tt: &tt::Subtree<S>, + is_2021: bool, + // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) + new_meta_vars: bool, + ) -> DeclarativeMacro<S> { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -160,7 +161,7 @@ impl<S: Span> DeclarativeMacro<S> { let mut err = None; while src.len() > 0 { - let rule = match Rule::parse(&mut src, true) { + let rule = match Rule::parse(&mut src, true, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -187,7 +188,12 @@ impl<S: Span> DeclarativeMacro<S> { } /// The new, unstable `macro m {}` flavor. - pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> { + pub fn parse_macro2( + tt: &tt::Subtree<S>, + is_2021: bool, + // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) + new_meta_vars: bool, + ) -> DeclarativeMacro<S> { let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; @@ -195,7 +201,7 @@ impl<S: Span> DeclarativeMacro<S> { if tt::DelimiterKind::Brace == tt.delimiter.kind { cov_mark::hit!(parse_macro_def_rules); while src.len() > 0 { - let rule = match Rule::parse(&mut src, true) { + let rule = match Rule::parse(&mut src, true, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -214,7 +220,7 @@ impl<S: Span> DeclarativeMacro<S> { } } else { cov_mark::hit!(parse_macro_def_simple); - match Rule::parse(&mut src, false) { + match Rule::parse(&mut src, false, new_meta_vars) { Ok(rule) => { if src.len() != 0 { err = Some(Box::new(ParseError::expected("remaining tokens in macro def"))); @@ -245,13 +251,19 @@ impl<S: Span> DeclarativeMacro<S> { &self, tt: &tt::Subtree<S>, marker: impl Fn(&mut S) + Copy, + new_meta_vars: bool, + call_site: S, ) -> ExpandResult<tt::Subtree<S>> { - expander::expand_rules(&self.rules, &tt, marker, self.is_2021) + expander::expand_rules(&self.rules, &tt, marker, self.is_2021, new_meta_vars, call_site) } } impl<S: Span> Rule<S> { - fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result<Self, ParseError> { + fn parse( + src: &mut TtIter<'_, S>, + expect_arrow: bool, + new_meta_vars: bool, + ) -> Result<Self, ParseError> { let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; if expect_arrow { src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; @@ -260,7 +272,7 @@ impl<S: Span> Rule<S> { let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; let lhs = MetaTemplate::parse_pattern(lhs)?; - let rhs = MetaTemplate::parse_template(rhs)?; + let rhs = MetaTemplate::parse_template(rhs, new_meta_vars)?; Ok(crate::Rule { lhs, rhs }) } diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 00ba35377a4..afdbbef2314 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -25,23 +25,26 @@ pub(crate) struct MetaTemplate<S>(pub(crate) Box<[Op<S>]>); impl<S: Span> MetaTemplate<S> { pub(crate) fn parse_pattern(pattern: &tt::Subtree<S>) -> Result<Self, ParseError> { - MetaTemplate::parse(pattern, Mode::Pattern) + MetaTemplate::parse(pattern, Mode::Pattern, false) } - pub(crate) fn parse_template(template: &tt::Subtree<S>) -> Result<Self, ParseError> { - MetaTemplate::parse(template, Mode::Template) + pub(crate) fn parse_template( + template: &tt::Subtree<S>, + new_meta_vars: bool, + ) -> Result<Self, ParseError> { + MetaTemplate::parse(template, Mode::Template, new_meta_vars) } pub(crate) fn iter(&self) -> impl Iterator<Item = &Op<S>> { self.0.iter() } - fn parse(tt: &tt::Subtree<S>, mode: Mode) -> Result<Self, ParseError> { + fn parse(tt: &tt::Subtree<S>, mode: Mode, new_meta_vars: bool) -> Result<Self, ParseError> { let mut src = TtIter::new(tt); let mut res = Vec::new(); while let Some(first) = src.peek_n(0) { - let op = next_op(first, &mut src, mode)?; + let op = next_op(first, &mut src, mode, new_meta_vars)?; res.push(op); } @@ -51,12 +54,35 @@ impl<S: Span> MetaTemplate<S> { #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum Op<S> { - Var { name: SmolStr, kind: Option<MetaVarKind>, id: S }, - Ignore { name: SmolStr, id: S }, - Index { depth: usize }, - Count { name: SmolStr, depth: Option<usize> }, - Repeat { tokens: MetaTemplate<S>, kind: RepeatKind, separator: Option<Separator<S>> }, - Subtree { tokens: MetaTemplate<S>, delimiter: tt::Delimiter<S> }, + Var { + name: SmolStr, + kind: Option<MetaVarKind>, + id: S, + }, + Ignore { + name: SmolStr, + id: S, + }, + Index { + depth: usize, + }, + Length { + depth: usize, + }, + Count { + name: SmolStr, + // FIXME: `usize`` once we drop support for 1.76 + depth: Option<usize>, + }, + Repeat { + tokens: MetaTemplate<S>, + kind: RepeatKind, + separator: Option<Separator<S>>, + }, + Subtree { + tokens: MetaTemplate<S>, + delimiter: tt::Delimiter<S>, + }, Literal(tt::Literal<S>), Punct(SmallVec<[tt::Punct<S>; 3]>), Ident(tt::Ident<S>), @@ -122,6 +148,7 @@ fn next_op<S: Span>( first_peeked: &tt::TokenTree<S>, src: &mut TtIter<'_, S>, mode: Mode, + new_meta_vars: bool, ) -> Result<Op<S>, ParseError> { let res = match first_peeked { tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { @@ -135,14 +162,14 @@ fn next_op<S: Span>( tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => { let (separator, kind) = parse_repeat(src)?; - let tokens = MetaTemplate::parse(subtree, mode)?; + let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?; Op::Repeat { tokens, separator, kind } } tt::DelimiterKind::Brace => match mode { Mode::Template => { - parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| { - ParseError::unexpected("invalid metavariable expression") - })? + parse_metavar_expr(new_meta_vars, &mut TtIter::new(subtree)).map_err( + |()| ParseError::unexpected("invalid metavariable expression"), + )? } Mode::Pattern => { return Err(ParseError::unexpected( @@ -206,7 +233,7 @@ fn next_op<S: Span>( tt::TokenTree::Subtree(subtree) => { src.next().expect("first token already peeked"); - let tokens = MetaTemplate::parse(subtree, mode)?; + let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?; Op::Subtree { tokens, delimiter: subtree.delimiter } } }; @@ -287,7 +314,7 @@ fn parse_repeat<S: Span>( Err(ParseError::InvalidRepeat) } -fn parse_metavar_expr<S: Span>(src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> { +fn parse_metavar_expr<S: Span>(new_meta_vars: bool, src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> { let func = src.expect_ident()?; let args = src.expect_subtree()?; @@ -299,14 +326,19 @@ fn parse_metavar_expr<S: Span>(src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> { let op = match &*func.text { "ignore" => { + if new_meta_vars { + args.expect_dollar()?; + } let ident = args.expect_ident()?; Op::Ignore { name: ident.text.clone(), id: ident.span } } "index" => Op::Index { depth: parse_depth(&mut args)? }, + "length" => Op::Length { depth: parse_depth(&mut args)? }, "count" => { + if new_meta_vars { + args.expect_dollar()?; + } let ident = args.expect_ident()?; - // `${count(t)}` and `${count(t,)}` have different meanings. Not sure if this is a bug - // but that's how it's implemented in rustc as of this writing. See rust-lang/rust#111904. let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None }; Op::Count { name: ident.text.clone(), depth } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b89bfd74a6e..8fa04ab983f 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -1,6 +1,7 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. use rustc_hash::{FxHashMap, FxHashSet}; +use span::{SpanAnchor, SpanData, SpanMap}; use stdx::{never, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, @@ -10,10 +11,10 @@ use syntax::{ }; use tt::{ buffer::{Cursor, TokenBuffer}, - Span, SpanData, SyntaxContext, + Span, }; -use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap}; +use crate::{to_parser_input::to_parser_input, tt_iter::TtIter}; #[cfg(test)] mod tests; @@ -36,66 +37,70 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM { /// Dummy things for testing where spans don't matter. pub(crate) mod dummy_test_span_utils { + use super::*; - pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>; - pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY; + pub type DummyTestSpanData = span::SpanData<DummyTestSyntaxContext>; + pub const DUMMY: DummyTestSpanData = span::SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: span::SpanAnchor { + file_id: span::FileId::BOGUS, + ast_id: span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: DummyTestSyntaxContext, + }; - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] - pub struct DummyTestSpanAnchor; - impl tt::SpanAnchor for DummyTestSpanAnchor { - const DUMMY: Self = DummyTestSpanAnchor; - } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct DummyTestSyntaxContext; - impl SyntaxContext for DummyTestSyntaxContext { - const DUMMY: Self = DummyTestSyntaxContext; - } pub struct DummyTestSpanMap; - impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap { - fn span_for( - &self, - range: syntax::TextRange, - ) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> { - tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext } + impl SpanMapper<span::SpanData<DummyTestSyntaxContext>> for DummyTestSpanMap { + fn span_for(&self, range: syntax::TextRange) -> span::SpanData<DummyTestSyntaxContext> { + span::SpanData { + range, + anchor: span::SpanAnchor { + file_id: span::FileId::BOGUS, + ast_id: span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: DummyTestSyntaxContext, + } } } } /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the /// subtree's spans. -pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>( +pub fn syntax_node_to_token_tree<Ctx, SpanMap>( node: &SyntaxNode, map: SpanMap, -) -> tt::Subtree<SpanData<Anchor, Ctx>> + span: SpanData<Ctx>, +) -> tt::Subtree<SpanData<Ctx>> where - SpanData<Anchor, Ctx>: Span, - Anchor: Copy, - Ctx: SyntaxContext, - SpanMap: SpanMapper<SpanData<Anchor, Ctx>>, + SpanData<Ctx>: Span, + Ctx: Copy, + SpanMap: SpanMapper<SpanData<Ctx>>, { - let mut c = Converter::new(node, map, Default::default(), Default::default()); + let mut c = Converter::new(node, map, Default::default(), Default::default(), span); convert_tokens(&mut c) } /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the /// subtree's spans. Additionally using the append and remove parameters, the additional tokens can /// be injected or hidden from the output. -pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>( +pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>( node: &SyntaxNode, map: SpanMap, - append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Anchor, Ctx>>>>, + append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>, remove: FxHashSet<SyntaxNode>, -) -> tt::Subtree<SpanData<Anchor, Ctx>> + call_site: SpanData<Ctx>, +) -> tt::Subtree<SpanData<Ctx>> where - SpanMap: SpanMapper<SpanData<Anchor, Ctx>>, - SpanData<Anchor, Ctx>: Span, - Anchor: Copy, - Ctx: SyntaxContext, + SpanMap: SpanMapper<SpanData<Ctx>>, + SpanData<Ctx>: Span, + Ctx: Copy, { - let mut c = Converter::new(node, map, append, remove); + let mut c = Converter::new(node, map, append, remove, call_site); convert_tokens(&mut c) } @@ -113,14 +118,13 @@ where /// Converts a [`tt::Subtree`] back to a [`SyntaxNode`]. /// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans. -pub fn token_tree_to_syntax_node<Anchor, Ctx>( - tt: &tt::Subtree<SpanData<Anchor, Ctx>>, +pub fn token_tree_to_syntax_node<Ctx>( + tt: &tt::Subtree<SpanData<Ctx>>, entry_point: parser::TopEntryPoint, -) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>) +) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>) where - SpanData<Anchor, Ctx>: Span, - Anchor: Copy, - Ctx: SyntaxContext, + SpanData<Ctx>: Span, + Ctx: Copy, { let buffer = match tt { tt::Subtree { @@ -150,21 +154,20 @@ where /// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided /// anchor with the given context. -pub fn parse_to_token_tree<Anchor, Ctx>( - anchor: Anchor, +pub fn parse_to_token_tree<Ctx>( + anchor: SpanAnchor, ctx: Ctx, text: &str, -) -> Option<tt::Subtree<SpanData<Anchor, Ctx>>> +) -> Option<tt::Subtree<SpanData<Ctx>>> where - SpanData<Anchor, Ctx>: Span, - Anchor: Copy, - Ctx: SyntaxContext, + SpanData<Ctx>: Span, + Ctx: Copy, { let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { return None; } - let mut conv = RawConverter { lexed, pos: 0, anchor, ctx }; + let mut conv = RawConverter { lexed, anchor, pos: 0, ctx }; Some(convert_tokens(&mut conv)) } @@ -182,7 +185,11 @@ where } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> { +pub fn parse_exprs_with_sep<S: Span>( + tt: &tt::Subtree<S>, + sep: char, + span: S, +) -> Vec<tt::Subtree<S>> { if tt.token_trees.is_empty() { return Vec::new(); } @@ -195,7 +202,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt:: res.push(match expanded.value { None => break, - Some(tt) => tt.subtree_or_wrap(), + Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }), }); let mut fork = iter.clone(); @@ -207,7 +214,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt:: if iter.peek_n(0).is_some() { res.push(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(span), token_trees: iter.cloned().collect(), }); } @@ -220,7 +227,10 @@ where C: TokenConverter<S>, S: Span, { - let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }; + let entry = tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(conv.call_site()), + token_trees: vec![], + }; let mut stack = NonEmptyVec::new(entry); while let Some((token, abs_range)) = conv.bump() { @@ -401,9 +411,20 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { text = &text[0..text.len() - 2]; } - // Quote the string + let mut num_of_hashes = 0; + let mut count = 0; + for ch in text.chars() { + count = match ch { + '"' => 1, + '#' if count > 0 => count + 1, + _ => 0, + }; + num_of_hashes = num_of_hashes.max(count); + } + + // Quote raw string with delimiters // Note that `tt::Literal` expect an escaped string - let text = format!("\"{}\"", text.escape_debug()); + let text = format!("r{delim}\"{text}\"{delim}", delim = "#".repeat(num_of_hashes)); text.into() } @@ -450,10 +471,10 @@ fn convert_doc_comment<S: Copy>( } /// A raw token (straight from lexer) converter -struct RawConverter<'a, Anchor, Ctx> { +struct RawConverter<'a, Ctx> { lexed: parser::LexedStr<'a>, pos: usize, - anchor: Anchor, + anchor: SpanAnchor, ctx: Ctx, } /// A raw token (straight from lexer) converter that gives every token the same span. @@ -485,18 +506,20 @@ trait TokenConverter<S>: Sized { fn peek(&self) -> Option<Self::Token>; fn span_for(&self, range: TextRange) -> S; + + fn call_site(&self) -> S; } -impl<Anchor, S, Ctx> SrcToken<RawConverter<'_, Anchor, Ctx>, S> for usize { - fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind { +impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize { + fn kind(&self, ctx: &RawConverter<'_, Ctx>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option<char> { + fn to_char(&self, ctx: &RawConverter<'_, Ctx>) -> Option<char> { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_, Ctx>) -> SmolStr { ctx.lexed.text(*self).into() } } @@ -515,18 +538,17 @@ impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize { } } -impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<SpanData<Anchor, Ctx>> - for RawConverter<'_, Anchor, Ctx> +impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx> where - SpanData<Anchor, Ctx>: Span, + SpanData<Ctx>: Span, { type Token = usize; fn convert_doc_comment( &self, &token: &usize, - span: SpanData<Anchor, Ctx>, - ) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> { + span: SpanData<Ctx>, + ) -> Option<Vec<tt::TokenTree<SpanData<Ctx>>>> { let text = self.lexed.text(token); convert_doc_comment(&doc_comment(text), span) } @@ -550,9 +572,13 @@ where Some(self.pos) } - fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx> { + fn span_for(&self, range: TextRange) -> SpanData<Ctx> { SpanData { range, anchor: self.anchor, ctx: self.ctx } } + + fn call_site(&self) -> SpanData<Ctx> { + SpanData { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx } + } } impl<S> TokenConverter<S> for StaticRawConverter<'_, S> @@ -588,6 +614,10 @@ where fn span_for(&self, _: TextRange) -> S { self.span } + + fn call_site(&self) -> S { + self.span + } } struct Converter<SpanMap, S> { @@ -600,6 +630,7 @@ struct Converter<SpanMap, S> { map: SpanMap, append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>, remove: FxHashSet<SyntaxNode>, + call_site: S, } impl<SpanMap, S> Converter<SpanMap, S> { @@ -608,6 +639,7 @@ impl<SpanMap, S> Converter<SpanMap, S> { map: SpanMap, append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>, remove: FxHashSet<SyntaxNode>, + call_site: S, ) -> Self { let mut this = Converter { current: None, @@ -617,6 +649,7 @@ impl<SpanMap, S> Converter<SpanMap, S> { map, append, remove, + call_site, current_leafs: vec![], }; let first = this.next_token(); @@ -776,24 +809,27 @@ where fn span_for(&self, range: TextRange) -> S { self.map.span_for(range) } + fn call_site(&self) -> S { + self.call_site + } } -struct TtTreeSink<'a, Anchor, Ctx> +struct TtTreeSink<'a, Ctx> where - SpanData<Anchor, Ctx>: Span, + SpanData<Ctx>: Span, { buf: String, - cursor: Cursor<'a, SpanData<Anchor, Ctx>>, + cursor: Cursor<'a, SpanData<Ctx>>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: SpanMap<SpanData<Anchor, Ctx>>, + token_map: SpanMap<SpanData<Ctx>>, } -impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx> +impl<'a, Ctx> TtTreeSink<'a, Ctx> where - SpanData<Anchor, Ctx>: Span, + SpanData<Ctx>: Span, { - fn new(cursor: Cursor<'a, SpanData<Anchor, Ctx>>) -> Self { + fn new(cursor: Cursor<'a, SpanData<Ctx>>) -> Self { TtTreeSink { buf: String::new(), cursor, @@ -803,7 +839,7 @@ where } } - fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>) { + fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>) { self.token_map.finish(); (self.inner.finish(), self.token_map) } @@ -821,9 +857,9 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { Some(&texts[idx..texts.len() - (1 - idx)]) } -impl<Anchor, Ctx> TtTreeSink<'_, Anchor, Ctx> +impl<Ctx> TtTreeSink<'_, Ctx> where - SpanData<Anchor, Ctx>: Span, + SpanData<Ctx>: Span, { /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index bd8187a148a..e5569138dbf 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -7,11 +7,11 @@ use tt::{ Leaf, Punct, Spacing, }; -use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap}; +use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMMY}; fn check_punct_spacing(fixture: &str) { let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); - let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap); + let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY); let mut annotations: HashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 40e8a2385f4..71513ef4391 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -51,6 +51,13 @@ impl<'a, S: Span> TtIter<'a, S> { } } + pub(crate) fn expect_dollar(&mut self) -> Result<(), ()> { + match self.expect_leaf()? { + tt::Leaf::Punct(tt::Punct { char: '$', .. }) => Ok(()), + _ => Err(()), + } + } + pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident<S>, ()> { match self.expect_leaf()? { tt::Leaf::Ident(it) if it.text != "_" => Ok(it), @@ -169,10 +176,10 @@ impl<'a, S: Span> TtIter<'a, S> { } self.inner = self.inner.as_slice()[res.len()..].iter(); - let res = match res.len() { - 0 | 1 => res.pop(), - _ => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + let res = match &*res { + [] | [_] => res.pop(), + [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(first.first_span()), token_trees: res, })), }; diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 2cbbc9489a2..209cbb945d7 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -33,6 +33,7 @@ tt.workspace = true stdx.workspace = true profile.workspace = true text-size.workspace = true +span.workspace = true # Ideally this crate would not depend on salsa things, but we need span information here which wraps # InternIds for the syntax context base-db.workspace = true diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index f697ecd3518..a87becd63e2 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -11,16 +11,19 @@ pub mod msg; mod process; mod version; -use base_db::span::SpanData; use indexmap::IndexSet; use paths::AbsPathBuf; +use span::Span; use std::{fmt, io, sync::Mutex}; use triomphe::Arc; use serde::{Deserialize, Serialize}; use crate::{ - msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS}, + msg::{ + deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro, + ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT, + }, process::ProcMacroProcessSrv, }; @@ -136,13 +139,13 @@ impl ProcMacro { pub fn expand( &self, - subtree: &tt::Subtree<SpanData>, - attr: Option<&tt::Subtree<SpanData>>, + subtree: &tt::Subtree<Span>, + attr: Option<&tt::Subtree<Span>>, env: Vec<(String, String)>, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, - ) -> Result<Result<tt::Subtree<SpanData>, PanicMessage>, ServerError> { + def_site: Span, + call_site: Span, + mixed_site: Span, + ) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> { let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); let current_dir = env .iter() @@ -166,6 +169,11 @@ impl ProcMacro { call_site, mixed_site, }, + span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { + serialize_span_data_index_map(&span_data_table) + } else { + Vec::new() + }, }; let response = self @@ -178,9 +186,14 @@ impl ProcMacro { msg::Response::ExpandMacro(it) => { Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table))) } - msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + msg::Response::ExpandMacroExtended(it) => Ok(it.map(|resp| { + FlatTree::to_subtree_resolved( + resp.tree, + version, + &deserialize_span_data_index_map(&resp.span_data_table), + ) + })), + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } } diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 1d3e45aff38..557ddba5c78 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -10,28 +10,63 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use crate::ProcMacroKind; -pub use crate::msg::flat::{FlatTree, TokenId}; +pub use crate::msg::flat::{ + deserialize_span_data_index_map, serialize_span_data_index_map, FlatTree, SpanDataIndexMap, + TokenId, +}; // The versions of the server protocol pub const NO_VERSION_CHECK_VERSION: u32 = 0; pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; pub const HAS_GLOBAL_SPANS: u32 = 3; +pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; -pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS; +pub const CURRENT_API_VERSION: u32 = RUST_ANALYZER_SPAN_SUPPORT; #[derive(Debug, Serialize, Deserialize)] pub enum Request { + /// Since [`NO_VERSION_CHECK_VERSION`] ListMacros { dylib_path: PathBuf }, + /// Since [`NO_VERSION_CHECK_VERSION`] ExpandMacro(ExpandMacro), + /// Since [`VERSION_CHECK_VERSION`] ApiVersionCheck {}, + /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] + SetConfig(ServerConfig), +} + +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] +pub enum SpanMode { + #[default] + Id, + RustAnalyzer, } #[derive(Debug, Serialize, Deserialize)] pub enum Response { + /// Since [`NO_VERSION_CHECK_VERSION`] ListMacros(Result<Vec<(String, ProcMacroKind)>, String>), + /// Since [`NO_VERSION_CHECK_VERSION`] ExpandMacro(Result<FlatTree, PanicMessage>), + /// Since [`NO_VERSION_CHECK_VERSION`] ApiVersionCheck(u32), + /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] + SetConfig(ServerConfig), + /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] + ExpandMacroExtended(Result<ExpandMacroExtended, PanicMessage>), +} + +#[derive(Debug, Serialize, Deserialize, Default)] +#[serde(default)] +pub struct ServerConfig { + pub span_mode: SpanMode, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpandMacroExtended { + pub tree: FlatTree, + pub span_data_table: Vec<u32>, } #[derive(Debug, Serialize, Deserialize)] @@ -64,9 +99,12 @@ pub struct ExpandMacro { #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] #[serde(default)] pub has_global_spans: ExpnGlobals, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + pub span_data_table: Vec<u32>, } -#[derive(Default, Debug, Serialize, Deserialize)] +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] pub struct ExpnGlobals { #[serde(skip_serializing)] #[serde(default)] @@ -136,29 +174,27 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { #[cfg(test)] mod tests { - use base_db::{ - span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, - FileId, - }; + use base_db::FileId; use la_arena::RawIdx; + use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; use text_size::{TextRange, TextSize}; use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree}; use super::*; - fn fixture_token_tree() -> Subtree<SpanData> { + fn fixture_token_tree() -> Subtree<Span> { let anchor = SpanAnchor { file_id: FileId::from_raw(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), }; let mut subtree = Subtree { delimiter: Delimiter { - open: SpanData { + open: Span { range: TextRange::empty(TextSize::new(0)), anchor, ctx: SyntaxContextId::ROOT, }, - close: SpanData { + close: Span { range: TextRange::empty(TextSize::new(13)), anchor, ctx: SyntaxContextId::ROOT, @@ -170,7 +206,7 @@ mod tests { subtree.token_trees.push(TokenTree::Leaf( Ident { text: "struct".into(), - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(0), TextSize::of("struct")), anchor, ctx: SyntaxContextId::ROOT, @@ -181,7 +217,7 @@ mod tests { subtree.token_trees.push(TokenTree::Leaf( Ident { text: "Foo".into(), - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), anchor, ctx: SyntaxContextId::ROOT, @@ -192,7 +228,7 @@ mod tests { subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { text: "Foo".into(), - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), anchor, ctx: SyntaxContextId::ROOT, @@ -200,7 +236,7 @@ mod tests { }))); subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { char: '@', - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(11), TextSize::of('@')), anchor, ctx: SyntaxContextId::ROOT, @@ -209,12 +245,12 @@ mod tests { }))); subtree.token_trees.push(TokenTree::Subtree(Subtree { delimiter: Delimiter { - open: SpanData { + open: Span { range: TextRange::at(TextSize::new(12), TextSize::of('{')), anchor, ctx: SyntaxContextId::ROOT, }, - close: SpanData { + close: Span { range: TextRange::at(TextSize::new(13), TextSize::of('}')), anchor, ctx: SyntaxContextId::ROOT, @@ -243,6 +279,7 @@ mod tests { call_site: 0, mixed_site: 0, }, + span_data_table: Vec::new(), }; let json = serde_json::to_string(&task).unwrap(); diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 5835718628e..8dfaba52625 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -37,13 +37,46 @@ use std::collections::{HashMap, VecDeque}; -use base_db::span::SpanData; use indexmap::IndexSet; +use la_arena::RawIdx; use serde::{Deserialize, Serialize}; +use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use text_size::TextRange; use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -type SpanDataIndexMap = IndexSet<SpanData>; +pub type SpanDataIndexMap = IndexSet<Span>; + +pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> { + map.iter() + .flat_map(|span| { + [ + span.anchor.file_id.index(), + span.anchor.ast_id.into_raw().into_u32(), + span.range.start().into(), + span.range.end().into(), + span.ctx.into_u32(), + ] + }) + .collect() +} + +pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { + debug_assert!(map.len() % 5 == 0); + map.chunks_exact(5) + .map(|span| { + let &[file_id, ast_id, start, end, e] = span else { unreachable!() }; + Span { + anchor: SpanAnchor { + file_id: FileId::from_raw(file_id), + ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)), + }, + range: TextRange::new(start.into(), end.into()), + ctx: SyntaxContextId::from_u32(e), + } + }) + .collect() +} #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct TokenId(pub u32); @@ -54,9 +87,7 @@ impl std::fmt::Debug for TokenId { } } -impl tt::Span for TokenId { - const DUMMY: Self = TokenId(!0); -} +impl tt::Span for TokenId {} #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { @@ -93,7 +124,7 @@ struct IdentRepr { impl FlatTree { pub fn new( - subtree: &tt::Subtree<SpanData>, + subtree: &tt::Subtree<Span>, version: u32, span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { @@ -158,7 +189,7 @@ impl FlatTree { self, version: u32, span_data_table: &SpanDataIndexMap, - ) -> tt::Subtree<SpanData> { + ) -> tt::Subtree<Span> { Reader { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { read_vec(self.subtree, SubtreeRepr::read_with_close_span) @@ -281,13 +312,13 @@ impl IdentRepr { } } -trait Span: Copy { +trait InternableSpan: Copy { type Table; fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId; fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self; } -impl Span for TokenId { +impl InternableSpan for TokenId { type Table = (); fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId { token_id @@ -297,8 +328,8 @@ impl Span for TokenId { id } } -impl Span for SpanData { - type Table = IndexSet<SpanData>; +impl InternableSpan for Span { + type Table = IndexSet<Span>; fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { TokenId(table.insert_full(span).0 as u32) } @@ -307,7 +338,7 @@ impl Span for SpanData { } } -struct Writer<'a, 'span, S: Span> { +struct Writer<'a, 'span, S: InternableSpan> { work: VecDeque<(usize, &'a tt::Subtree<S>)>, string_table: HashMap<&'a str, u32>, span_data_table: &'span mut S::Table, @@ -320,7 +351,7 @@ struct Writer<'a, 'span, S: Span> { text: Vec<String>, } -impl<'a, 'span, S: Span> Writer<'a, 'span, S> { +impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { fn write(&mut self, root: &'a tt::Subtree<S>) { self.enqueue(root); while let Some((idx, subtree)) = self.work.pop_front() { @@ -393,7 +424,7 @@ impl<'a, 'span, S: Span> Writer<'a, 'span, S> { } } -struct Reader<'span, S: Span> { +struct Reader<'span, S: InternableSpan> { subtree: Vec<SubtreeRepr>, literal: Vec<LiteralRepr>, punct: Vec<PunctRepr>, @@ -403,7 +434,7 @@ struct Reader<'span, S: Span> { span_data_table: &'span S::Table, } -impl<'span, S: Span> Reader<'span, S> { +impl<'span, S: InternableSpan> Reader<'span, S> { pub(crate) fn read(self) -> tt::Subtree<S> { let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()]; let read_span = |id| S::span_for_token_id(self.span_data_table, id); diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 9a20fa63ed7..3494164c067 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -9,7 +9,7 @@ use paths::{AbsPath, AbsPathBuf}; use stdx::JodChild; use crate::{ - msg::{Message, Request, Response, CURRENT_API_VERSION}, + msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT}, ProcMacroKind, ServerError, }; @@ -19,6 +19,7 @@ pub(crate) struct ProcMacroProcessSrv { stdin: ChildStdin, stdout: BufReader<ChildStdout>, version: u32, + mode: SpanMode, } impl ProcMacroProcessSrv { @@ -27,7 +28,13 @@ impl ProcMacroProcessSrv { let mut process = Process::run(process_path.clone(), null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); - io::Result::Ok(ProcMacroProcessSrv { _process: process, stdin, stdout, version: 0 }) + io::Result::Ok(ProcMacroProcessSrv { + _process: process, + stdin, + stdout, + version: 0, + mode: SpanMode::Id, + }) }; let mut srv = create_srv(true)?; tracing::info!("sending version check"); @@ -43,6 +50,11 @@ impl ProcMacroProcessSrv { tracing::info!("got version {v}"); srv = create_srv(false)?; srv.version = v; + if srv.version > RUST_ANALYZER_SPAN_SUPPORT { + if let Ok(mode) = srv.enable_rust_analyzer_spans() { + srv.mode = mode; + } + } Ok(srv) } Err(e) => { @@ -62,9 +74,19 @@ impl ProcMacroProcessSrv { match response { Response::ApiVersionCheck(version) => Ok(version), - Response::ExpandMacro { .. } | Response::ListMacros { .. } => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), + } + } + + fn enable_rust_analyzer_spans(&mut self) -> Result<SpanMode, ServerError> { + let request = Request::SetConfig(crate::msg::ServerConfig { + span_mode: crate::msg::SpanMode::RustAnalyzer, + }); + let response = self.send_task(request)?; + + match response { + Response::SetConfig(crate::msg::ServerConfig { span_mode }) => Ok(span_mode), + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } @@ -78,9 +100,7 @@ impl ProcMacroProcessSrv { match response { Response::ListMacros(it) => Ok(it), - Response::ExpandMacro { .. } | Response::ApiVersionCheck { .. } => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index 50ce586fc42..000a526e9f9 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -39,10 +39,22 @@ fn run() -> io::Result<()> { msg::Request::ListMacros { dylib_path } => { msg::Response::ListMacros(srv.list_macros(&dylib_path)) } - msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)), + msg::Request::ExpandMacro(task) => match srv.span_mode() { + msg::SpanMode::Id => msg::Response::ExpandMacro(srv.expand(task).map(|(it, _)| it)), + msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended( + srv.expand(task).map(|(tree, span_data_table)| msg::ExpandMacroExtended { + tree, + span_data_table, + }), + ), + }, msg::Request::ApiVersionCheck {} => { msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION) } + msg::Request::SetConfig(config) => { + srv.set_span_mode(config.span_mode); + msg::Response::SetConfig(config) + } }; write_response(res)? } diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index 99993f16e27..bef2c30e9fb 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -26,13 +26,15 @@ stdx.workspace = true tt.workspace = true mbe.workspace = true paths.workspace = true +base-db.workspace = true +span.workspace = true proc-macro-api.workspace = true [dev-dependencies] expect-test = "1.4.0" # used as proc macro test targets -proc-macro-test.workspace = true +proc-macro-test.path = "./proc-macro-test" [features] -sysroot-abi = [] +sysroot-abi = ["proc-macro-test/sysroot-abi"] diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml new file mode 100644 index 00000000000..55be6bc23bb --- /dev/null +++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "proc-macro-test" +version = "0.0.0" +publish = false + +edition = "2021" +license = "MIT OR Apache-2.0" + +[lib] +doctest = false + +[build-dependencies] +cargo_metadata = "0.18.1" + +# local deps +toolchain = { path = "../../toolchain", version = "0.0.0" } + +[features] +sysroot-abi = [] diff --git a/crates/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs index 7827157865a..7299147686d 100644 --- a/crates/proc-macro-test/build.rs +++ b/crates/proc-macro-srv/proc-macro-test/build.rs @@ -70,6 +70,9 @@ fn main() { // instance to use the same target directory. .arg("--target-dir") .arg(&target_dir); + if cfg!(feature = "sysroot-abi") { + cmd.args(["--features", "sysroot-abi"]); + } if let Ok(target) = std::env::var("TARGET") { cmd.args(["--target", &target]); diff --git a/crates/proc-macro-test/imp/.gitignore b/crates/proc-macro-srv/proc-macro-test/imp/.gitignore index 2c96eb1b651..2c96eb1b651 100644 --- a/crates/proc-macro-test/imp/.gitignore +++ b/crates/proc-macro-srv/proc-macro-test/imp/.gitignore diff --git a/crates/proc-macro-test/imp/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml index 2a36737cef0..dc94fcd61a4 100644 --- a/crates/proc-macro-test/imp/Cargo.toml +++ b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml @@ -9,8 +9,11 @@ publish = false doctest = false proc-macro = true -[workspace] - [dependencies] # this crate should not have any dependencies, since it uses its own workspace, # and its own `Cargo.lock` + +[features] +sysroot-abi = [] + +[workspace] diff --git a/crates/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index 32510fba2f8..03241b16be6 100644 --- a/crates/proc-macro-test/imp/src/lib.rs +++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -1,6 +1,8 @@ //! Exports a few trivial procedural macros for testing. +#![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![warn(rust_2018_idioms, unused_lifetimes)] +#![feature(proc_macro_span, proc_macro_def_site)] use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; @@ -49,6 +51,29 @@ pub fn fn_like_mk_idents(_args: TokenStream) -> TokenStream { TokenStream::from_iter(trees) } +#[proc_macro] +pub fn fn_like_span_join(args: TokenStream) -> TokenStream { + let args = &mut args.into_iter(); + let first = args.next().unwrap(); + let second = args.next().unwrap(); + TokenStream::from(TokenTree::from(Ident::new_raw( + "joined", + first.span().join(second.span()).unwrap(), + ))) +} + +#[proc_macro] +pub fn fn_like_span_ops(args: TokenStream) -> TokenStream { + let args = &mut args.into_iter(); + let mut first = args.next().unwrap(); + first.set_span(Span::def_site()); + let mut second = args.next().unwrap(); + second.set_span(second.span().resolved_at(Span::def_site())); + let mut third = args.next().unwrap(); + third.set_span(third.span().start()); + TokenStream::from_iter(vec![first, second, third]) +} + #[proc_macro_attribute] pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream { item diff --git a/crates/proc-macro-test/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/src/lib.rs index 739c6ec6f44..739c6ec6f44 100644 --- a/crates/proc-macro-test/src/lib.rs +++ b/crates/proc-macro-srv/proc-macro-test/src/lib.rs diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index f20e6832f6e..52b4cced5f5 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -11,7 +11,10 @@ use libloading::Library; use memmap2::Mmap; use object::Object; use paths::AbsPath; -use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind}; +use proc_macro::bridge; +use proc_macro_api::{read_dylib_info, ProcMacroKind}; + +use crate::ProcMacroSrvSpan; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; @@ -147,15 +150,18 @@ impl Expander { Ok(Expander { inner: library }) } - pub fn expand( + pub fn expand<S: ProcMacroSrvSpan>( &self, macro_name: &str, - macro_body: &crate::tt::Subtree, - attributes: Option<&crate::tt::Subtree>, - def_site: TokenId, - call_site: TokenId, - mixed_site: TokenId, - ) -> Result<crate::tt::Subtree, String> { + macro_body: tt::Subtree<S>, + attributes: Option<tt::Subtree<S>>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result<tt::Subtree<S>, String> + where + <S::Server as bridge::server::Types>::TokenStream: Default, + { let result = self .inner .proc_macros diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 56529f71d85..7cd6df2df86 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -32,36 +32,67 @@ use std::{ }; use proc_macro_api::{ - msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION}, + msg::{ + self, deserialize_span_data_index_map, serialize_span_data_index_map, ExpnGlobals, + SpanMode, TokenId, CURRENT_API_VERSION, + }, ProcMacroKind, }; +use span::Span; -mod tt { - pub use proc_macro_api::msg::TokenId; +use crate::server::TokenStream; - pub use ::tt::*; +// see `build.rs` +include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); - pub type Subtree = ::tt::Subtree<TokenId>; - pub type TokenTree = ::tt::TokenTree<TokenId>; - pub type Delimiter = ::tt::Delimiter<TokenId>; - pub type Leaf = ::tt::Leaf<TokenId>; - pub type Literal = ::tt::Literal<TokenId>; - pub type Punct = ::tt::Punct<TokenId>; - pub type Ident = ::tt::Ident<TokenId>; +trait ProcMacroSrvSpan: tt::Span { + type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; } -// see `build.rs` -include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); +impl ProcMacroSrvSpan for TokenId { + type Server = server::token_id::TokenIdServer; + + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site } + } +} +impl ProcMacroSrvSpan for Span { + type Server = server::rust_analyzer_span::RaSpanServer; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { + interner: &server::SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + tracked_env_vars: Default::default(), + tracked_paths: Default::default(), + } + } +} #[derive(Default)] pub struct ProcMacroSrv { expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, + span_mode: SpanMode, } const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; impl ProcMacroSrv { - pub fn expand(&mut self, task: msg::ExpandMacro) -> Result<msg::FlatTree, msg::PanicMessage> { + pub fn set_span_mode(&mut self, span_mode: SpanMode) { + self.span_mode = span_mode; + } + + pub fn span_mode(&self) -> SpanMode { + self.span_mode + } + + pub fn expand( + &mut self, + task: msg::ExpandMacro, + ) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> { + let span_mode = self.span_mode; let expander = self.expander(task.lib.as_ref()).map_err(|err| { debug_assert!(false, "should list macros before asking to expand"); msg::PanicMessage(format!("failed to load macro: {err}")) @@ -71,10 +102,10 @@ impl ProcMacroSrv { for (k, v) in &task.env { env::set_var(k, v); } - let prev_working_dir = match task.current_dir { + let prev_working_dir = match &task.current_dir { Some(dir) => { let prev_working_dir = std::env::current_dir().ok(); - if let Err(err) = std::env::set_current_dir(&dir) { + if let Err(err) = std::env::set_current_dir(dir) { eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}") } prev_working_dir @@ -83,38 +114,15 @@ impl ProcMacroSrv { }; let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans; - let def_site = TokenId(def_site as u32); - let call_site = TokenId(call_site as u32); - let mixed_site = TokenId(mixed_site as u32); - - let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); - let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); - let result = thread::scope(|s| { - let thread = thread::Builder::new() - .stack_size(EXPANDER_STACK_SIZE) - .name(task.macro_name.clone()) - .spawn_scoped(s, || { - expander - .expand( - &task.macro_name, - ¯o_body, - attributes.as_ref(), - def_site, - call_site, - mixed_site, - ) - .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) - }); - let res = match thread { - Ok(handle) => handle.join(), - Err(e) => std::panic::resume_unwind(Box::new(e)), - }; - - match res { - Ok(res) => res, - Err(e) => std::panic::resume_unwind(e), + + let result = match span_mode { + SpanMode::Id => { + expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![])) } - }); + SpanMode::RustAnalyzer => { + expand_ra_span(task, expander, def_site, call_site, mixed_site) + } + }; prev_env.rollback(); @@ -155,6 +163,98 @@ impl ProcMacroSrv { } } +fn expand_id( + task: msg::ExpandMacro, + expander: &dylib::Expander, + def_site: usize, + call_site: usize, + mixed_site: usize, +) -> Result<msg::FlatTree, String> { + let def_site = TokenId(def_site as u32); + let call_site = TokenId(call_site as u32); + let mixed_site = TokenId(mixed_site as u32); + + let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); + let result = thread::scope(|s| { + let thread = thread::Builder::new() + .stack_size(EXPANDER_STACK_SIZE) + .name(task.macro_name.clone()) + .spawn_scoped(s, || { + expander + .expand( + &task.macro_name, + macro_body, + attributes, + def_site, + call_site, + mixed_site, + ) + .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) + }); + let res = match thread { + Ok(handle) => handle.join(), + Err(e) => std::panic::resume_unwind(Box::new(e)), + }; + + match res { + Ok(res) => res, + Err(e) => std::panic::resume_unwind(e), + } + }); + result +} + +fn expand_ra_span( + task: msg::ExpandMacro, + expander: &dylib::Expander, + def_site: usize, + call_site: usize, + mixed_site: usize, +) -> Result<(msg::FlatTree, Vec<u32>), String> { + let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table); + + let def_site = span_data_table[def_site]; + let call_site = span_data_table[call_site]; + let mixed_site = span_data_table[mixed_site]; + + let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); + let attributes = + task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + let result = thread::scope(|s| { + let thread = thread::Builder::new() + .stack_size(EXPANDER_STACK_SIZE) + .name(task.macro_name.clone()) + .spawn_scoped(s, || { + expander + .expand( + &task.macro_name, + macro_body, + attributes, + def_site, + call_site, + mixed_site, + ) + .map(|it| { + ( + msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table), + serialize_span_data_index_map(&span_data_table), + ) + }) + }); + let res = match thread { + Ok(handle) => handle.join(), + Err(e) => std::panic::resume_unwind(Box::new(e)), + }; + + match res { + Ok(res) => res, + Err(e) => std::panic::resume_unwind(e), + } + }); + result +} + pub struct PanicMessage { message: Option<String>, } diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 716b85d096d..3fe968c81ca 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -2,9 +2,9 @@ use libloading::Library; use proc_macro::bridge; -use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo}; +use proc_macro_api::{ProcMacroKind, RustCInfo}; -use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt}; +use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan}; pub(crate) struct ProcMacros { exported_macros: Vec<bridge::client::ProcMacro>, @@ -40,19 +40,19 @@ impl ProcMacros { Err(LoadProcMacroDylibError::AbiMismatch(info.version_string)) } - pub(crate) fn expand( + pub(crate) fn expand<S: ProcMacroSrvSpan>( &self, macro_name: &str, - macro_body: &tt::Subtree, - attributes: Option<&tt::Subtree>, - def_site: TokenId, - call_site: TokenId, - mixed_site: TokenId, - ) -> Result<tt::Subtree, crate::PanicMessage> { - let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone()); + macro_body: tt::Subtree<S>, + attributes: Option<tt::Subtree<S>>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result<tt::Subtree<S>, crate::PanicMessage> { + let parsed_body = crate::server::TokenStream::with_subtree(macro_body); - let parsed_attributes = attributes.map_or(crate::server::TokenStream::new(), |attr| { - crate::server::TokenStream::with_subtree(attr.clone()) + let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| { + crate::server::TokenStream::with_subtree(attr) }); for proc_macro in &self.exported_macros { @@ -62,12 +62,7 @@ impl ProcMacros { { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_body, false, ); @@ -78,12 +73,7 @@ impl ProcMacros { bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_body, false, ); @@ -94,13 +84,7 @@ impl ProcMacros { bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_attributes, parsed_body, false, @@ -113,7 +97,7 @@ impl ProcMacros { } } - Err(bridge::PanicMessage::String("Nothing to expand".to_string()).into()) + Err(bridge::PanicMessage::String(format!("proc-macro `{macro_name}` is missing")).into()) } pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index 54430e0d190..1854322ddb5 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -8,222 +8,18 @@ //! //! FIXME: No span and source file information is implemented yet -use proc_macro::bridge::{self, server}; +use proc_macro::bridge; mod token_stream; -use proc_macro_api::msg::TokenId; pub use token_stream::TokenStream; -use token_stream::TokenStreamBuilder; +pub mod token_id; +pub mod rust_analyzer_span; mod symbol; pub use symbol::*; +use tt::Spacing; -use std::{ - iter, - ops::{Bound, Range}, -}; - -use crate::tt; - -type Group = tt::Subtree; -type TokenTree = tt::TokenTree; -#[allow(unused)] -type Punct = tt::Punct; -type Spacing = tt::Spacing; -#[allow(unused)] -type Literal = tt::Literal; -type Span = tt::TokenId; - -#[derive(Clone)] -pub struct SourceFile { - // FIXME stub -} - -pub struct FreeFunctions; - -pub struct RustAnalyzer { - // FIXME: store span information here. - pub(crate) interner: SymbolInternerRef, - pub call_site: TokenId, - pub def_site: TokenId, - pub mixed_site: TokenId, -} - -impl server::Types for RustAnalyzer { - type FreeFunctions = FreeFunctions; - type TokenStream = TokenStream; - type SourceFile = SourceFile; - type Span = Span; - type Symbol = Symbol; -} - -impl server::FreeFunctions for RustAnalyzer { - fn track_env_var(&mut self, _var: &str, _value: Option<&str>) { - // FIXME: track env var accesses - // https://github.com/rust-lang/rust/pull/71858 - } - fn track_path(&mut self, _path: &str) {} - - fn literal_from_str( - &mut self, - s: &str, - ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> { - // FIXME: keep track of LitKind and Suffix - Ok(bridge::Literal { - kind: bridge::LitKind::Err, - symbol: Symbol::intern(self.interner, s), - suffix: None, - span: self.call_site, - }) - } - - fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) { - // FIXME handle diagnostic - } -} - -impl server::TokenStream for RustAnalyzer { - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } - fn from_str(&mut self, src: &str) -> Self::TokenStream { - Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") - } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { - stream.to_string() - } - fn from_token_tree( - &mut self, - tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>, - ) -> Self::TokenStream { - match tree { - bridge::TokenTree::Group(group) => { - let group = Group { - delimiter: delim_to_internal(group.delimiter, group.span), - token_trees: match group.stream { - Some(stream) => stream.into_iter().collect(), - None => Vec::new(), - }, - }; - let tree = TokenTree::from(group); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Ident(ident) => { - let text = ident.sym.text(self.interner); - let text = - if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; - let ident: tt::Ident = tt::Ident { text, span: ident.span }; - let leaf = tt::Leaf::from(ident); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Literal(literal) => { - let literal = LiteralFormatter(literal); - let text = literal.with_stringify_parts(self.interner, |parts| { - ::tt::SmolStr::from_iter(parts.iter().copied()) - }); - - let literal = tt::Literal { text, span: literal.0.span }; - let leaf = tt::Leaf::from(literal); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Punct(p) => { - let punct = tt::Punct { - char: p.ch as char, - spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, - span: p.span, - }; - let leaf = tt::Leaf::from(punct); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - } - } - - fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> { - Ok(self_.clone()) - } - - fn concat_trees( - &mut self, - base: Option<Self::TokenStream>, - trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for tree in trees { - builder.push(self.from_token_tree(tree)); - } - builder.build() - } - - fn concat_streams( - &mut self, - base: Option<Self::TokenStream>, - streams: Vec<Self::TokenStream>, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for stream in streams { - builder.push(stream); - } - builder.build() - } - - fn into_trees( - &mut self, - stream: Self::TokenStream, - ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> { - stream - .into_iter() - .map(|tree| match tree { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(bridge::Ident { - sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), - is_raw: ident.text.starts_with("r#"), - span: ident.span, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { - bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Err, - symbol: Symbol::intern(self.interner, &lit.text), - // FIXME: handle suffixes - suffix: None, - span: lit.span, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { - bridge::TokenTree::Punct(bridge::Punct { - ch: punct.char as u8, - joint: punct.spacing == Spacing::Joint, - span: punct.span, - }) - } - tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { - delimiter: delim_to_external(subtree.delimiter), - stream: if subtree.token_trees.is_empty() { - None - } else { - Some(subtree.token_trees.into_iter().collect()) - }, - span: bridge::DelimSpan::from_single(subtree.delimiter.open), - }), - }) - .collect() - } -} - -fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan<Span>) -> tt::Delimiter { +fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> { let kind = match d { proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace, @@ -233,7 +29,7 @@ fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan<Span>) -> tt::Delimiter { open: span.open, close: span.close, kind } } -fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter { +fn delim_to_external<S>(d: tt::Delimiter<S>) -> proc_macro::Delimiter { match d.kind { tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis, tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace, @@ -258,121 +54,9 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { } } -impl server::SourceFile for RustAnalyzer { - // FIXME these are all stubs - fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { - true - } - fn path(&mut self, _file: &Self::SourceFile) -> String { - String::new() - } - fn is_real(&mut self, _file: &Self::SourceFile) -> bool { - true - } -} - -impl server::Span for RustAnalyzer { - fn debug(&mut self, span: Self::Span) -> String { - format!("{:?}", span.0) - } - fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { - SourceFile {} - } - fn save_span(&mut self, _span: Self::Span) -> usize { - // FIXME stub - 0 - } - fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { - // FIXME stub - self.call_site - } - /// Recent feature, not yet in the proc_macro - /// - /// See PR: - /// https://github.com/rust-lang/rust/pull/55780 - fn source_text(&mut self, _span: Self::Span) -> Option<String> { - None - } - - fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> { - // FIXME handle span - None - } - fn source(&mut self, span: Self::Span) -> Self::Span { - // FIXME handle span - span - } - fn byte_range(&mut self, _span: Self::Span) -> Range<usize> { - // FIXME handle span - Range { start: 0, end: 0 } - } - fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> { - // Just return the first span again, because some macros will unwrap the result. - Some(first) - } - fn subspan( - &mut self, - span: Self::Span, - _start: Bound<usize>, - _end: Bound<usize>, - ) -> Option<Self::Span> { - // Just return the span again, because some macros will unwrap the result. - Some(span) - } - fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { - // FIXME handle span - self.call_site - } - - fn end(&mut self, _self_: Self::Span) -> Self::Span { - self.call_site - } +struct LiteralFormatter<S>(bridge::Literal<S, Symbol>); - fn start(&mut self, _self_: Self::Span) -> Self::Span { - self.call_site - } - - fn line(&mut self, _span: Self::Span) -> usize { - // FIXME handle line - 0 - } - - fn column(&mut self, _span: Self::Span) -> usize { - // FIXME handle column - 0 - } -} - -impl server::Symbol for RustAnalyzer { - fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> { - // FIXME: nfc-normalize and validate idents - Ok(<Self as server::Server>::intern_symbol(string)) - } -} - -impl server::Server for RustAnalyzer { - fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> { - bridge::ExpnGlobals { - def_site: self.def_site, - call_site: self.call_site, - mixed_site: self.mixed_site, - } - } - - fn intern_symbol(ident: &str) -> Self::Symbol { - // FIXME: should be `self.interner` once the proc-macro api allows it. - Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) - } - - fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { - // FIXME: should be `self.interner` once the proc-macro api allows it. - f(symbol.text(&SYMBOL_INTERNER).as_str()) - } -} - -struct LiteralFormatter(bridge::Literal<tt::TokenId, Symbol>); - -impl LiteralFormatter { +impl<S> LiteralFormatter<S> { /// Invokes the callback with a `&[&str]` consisting of each part of the /// literal's representation. This is done to allow the `ToString` and /// `Display` implementations to borrow references to symbol values, and @@ -423,66 +107,3 @@ impl LiteralFormatter { f(symbol.as_str(), suffix.as_str()) } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ra_server_to_string() { - let s = TokenStream { - token_trees: vec![ - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "struct".into(), - span: tt::TokenId(0), - })), - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "T".into(), - span: tt::TokenId(0), - })), - tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId(0), - close: tt::TokenId(0), - kind: tt::DelimiterKind::Brace, - }, - token_trees: vec![], - }), - ], - }; - - assert_eq!(s.to_string(), "struct T {}"); - } - - #[test] - fn test_ra_server_from_str() { - let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId(0), - close: tt::TokenId(0), - kind: tt::DelimiterKind::Parenthesis, - }, - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "a".into(), - span: tt::TokenId(0), - }))], - }); - - let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); - assert_eq!(t1.token_trees.len(), 1); - assert_eq!(t1.token_trees[0], subtree_paren_a); - - let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); - assert_eq!(t2.token_trees.len(), 2); - assert_eq!(t2.token_trees[0], subtree_paren_a); - - let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); - assert_eq!( - underscore.token_trees[0], - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "_".into(), - span: tt::TokenId(0), - })) - ); - } -} diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs new file mode 100644 index 00000000000..bcf3600d273 --- /dev/null +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -0,0 +1,411 @@ +//! proc-macro server backend based on rust-analyzer's internal span represention +//! This backend is used solely by rust-analyzer as it ties into rust-analyzer internals. +//! +//! It is an unfortunate result of how the proc-macro API works that we need to look into the +//! concrete representation of the spans, and as such, RustRover cannot make use of this unless they +//! change their representation to be compatible with rust-analyzer's. +use std::{ + collections::{HashMap, HashSet}, + iter, + ops::{Bound, Range}, +}; + +use ::tt::{TextRange, TextSize}; +use proc_macro::bridge::{self, server}; +use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; + +use crate::server::{ + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, +}; +mod tt { + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree<super::Span>; + pub type TokenTree = ::tt::TokenTree<super::Span>; + pub type Leaf = ::tt::Leaf<super::Span>; + pub type Literal = ::tt::Literal<super::Span>; + pub type Punct = ::tt::Punct<super::Span>; + pub type Ident = ::tt::Ident<super::Span>; +} + +type TokenStream = crate::server::TokenStream<Span>; + +#[derive(Clone)] +pub struct SourceFile; +pub struct FreeFunctions; + +pub struct RaSpanServer { + pub(crate) interner: SymbolInternerRef, + // FIXME: Report this back to the caller to track as dependencies + pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>, + // FIXME: Report this back to the caller to track as dependencies + pub tracked_paths: HashSet<Box<str>>, + pub call_site: Span, + pub def_site: Span, + pub mixed_site: Span, +} + +impl server::Types for RaSpanServer { + type FreeFunctions = FreeFunctions; + type TokenStream = TokenStream; + type SourceFile = SourceFile; + type Span = Span; + type Symbol = Symbol; +} + +impl server::FreeFunctions for RaSpanServer { + fn injected_env_var(&mut self, _: &str) -> Option<std::string::String> { + None + } + + fn track_env_var(&mut self, var: &str, value: Option<&str>) { + self.tracked_env_vars.insert(var.into(), value.map(Into::into)); + } + fn track_path(&mut self, path: &str) { + self.tracked_paths.insert(path.into()); + } + + fn literal_from_str( + &mut self, + s: &str, + ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> { + // FIXME: keep track of LitKind and Suffix + Ok(bridge::Literal { + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, s), + suffix: None, + span: self.call_site, + }) + } + + fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) { + // FIXME handle diagnostic + } +} + +impl server::TokenStream for RaSpanServer { + fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { + stream.is_empty() + } + fn from_str(&mut self, src: &str) -> Self::TokenStream { + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { + stream.to_string() + } + fn from_token_tree( + &mut self, + tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>, + ) -> Self::TokenStream { + match tree { + bridge::TokenTree::Group(group) => { + let group = tt::Subtree { + delimiter: delim_to_internal(group.delimiter, group.span), + token_trees: match group.stream { + Some(stream) => stream.into_iter().collect(), + None => Vec::new(), + }, + }; + let tree = tt::TokenTree::from(group); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Ident(ident) => { + let text = ident.sym.text(self.interner); + let text = + if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; + let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let leaf = tt::Leaf::from(ident); + let tree = tt::TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Literal(literal) => { + let literal = LiteralFormatter(literal); + let text = literal.with_stringify_parts(self.interner, |parts| { + ::tt::SmolStr::from_iter(parts.iter().copied()) + }); + + let literal = tt::Literal { text, span: literal.0.span }; + let leaf: tt::Leaf = tt::Leaf::from(literal); + let tree = tt::TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Punct(p) => { + let punct = tt::Punct { + char: p.ch as char, + spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone }, + span: p.span, + }; + let leaf = tt::Leaf::from(punct); + let tree = tt::TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + } + } + + fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> { + // FIXME: requires db, more importantly this requires name resolution so we would need to + // eagerly expand this proc-macro, but we can't know that this proc-macro is eager until we + // expand it ... + // This calls for some kind of marker that a proc-macro wants to access this eager API, + // otherwise we need to treat every proc-macro eagerly / or not support this. + Ok(self_.clone()) + } + + fn concat_trees( + &mut self, + base: Option<Self::TokenStream>, + trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(self.from_token_tree(tree)); + } + builder.build() + } + + fn concat_streams( + &mut self, + base: Option<Self::TokenStream>, + streams: Vec<Self::TokenStream>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } + builder.build() + } + + fn into_trees( + &mut self, + stream: Self::TokenStream, + ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(bridge::Ident { + sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), + span: ident.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + bridge::TokenTree::Literal(bridge::Literal { + // FIXME: handle literal kinds + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, &lit.text), + // FIXME: handle suffixes + suffix: None, + span: lit.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + bridge::TokenTree::Punct(bridge::Punct { + ch: punct.char as u8, + joint: punct.spacing == tt::Spacing::Joint, + span: punct.span, + }) + } + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { + delimiter: delim_to_external(subtree.delimiter), + stream: if subtree.token_trees.is_empty() { + None + } else { + Some(subtree.token_trees.into_iter().collect()) + }, + span: bridge::DelimSpan::from_single(subtree.delimiter.open), + }), + }) + .collect() + } +} + +impl server::SourceFile for RaSpanServer { + // FIXME these are all stubs + fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { + true + } + fn path(&mut self, _file: &Self::SourceFile) -> String { + String::new() + } + fn is_real(&mut self, _file: &Self::SourceFile) -> bool { + true + } +} + +impl server::Span for RaSpanServer { + fn debug(&mut self, span: Self::Span) -> String { + format!("{:?}", span) + } + fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { + // FIXME stub, requires db + SourceFile {} + } + fn save_span(&mut self, _span: Self::Span) -> usize { + // FIXME stub, requires builtin quote! implementation + 0 + } + fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { + // FIXME stub, requires builtin quote! implementation + self.call_site + } + /// Recent feature, not yet in the proc_macro + /// + /// See PR: + /// https://github.com/rust-lang/rust/pull/55780 + fn source_text(&mut self, _span: Self::Span) -> Option<String> { + // FIXME requires db, needs special handling wrt fixup spans + None + } + + fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> { + // FIXME requires db, looks up the parent call site + None + } + fn source(&mut self, span: Self::Span) -> Self::Span { + // FIXME requires db, returns the top level call site + span + } + fn byte_range(&mut self, span: Self::Span) -> Range<usize> { + // FIXME requires db to resolve the ast id, THIS IS NOT INCREMENTAL + Range { start: span.range.start().into(), end: span.range.end().into() } + } + fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> { + // We can't modify the span range for fixup spans, those are meaningful to fixup, so just + // prefer the non-fixup span. + if first.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return Some(second); + } + if second.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return Some(first); + } + // FIXME: Once we can talk back to the client, implement a "long join" request for anchors + // that differ in [AstId]s as joining those spans requires resolving the AstIds. + if first.anchor != second.anchor { + return None; + } + // Differing context, we can't merge these so prefer the one that's root + if first.ctx != second.ctx { + if first.ctx.is_root() { + return Some(second); + } else if second.ctx.is_root() { + return Some(first); + } + } + Some(Span { + range: first.range.cover(second.range), + anchor: second.anchor, + ctx: second.ctx, + }) + } + fn subspan( + &mut self, + span: Self::Span, + start: Bound<usize>, + end: Bound<usize>, + ) -> Option<Self::Span> { + // We can't modify the span range for fixup spans, those are meaningful to fixup. + if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return Some(span); + } + let length = span.range.len().into(); + + let start: u32 = match start { + Bound::Included(lo) => lo, + Bound::Excluded(lo) => lo.checked_add(1)?, + Bound::Unbounded => 0, + } + .try_into() + .ok()?; + + let end: u32 = match end { + Bound::Included(hi) => hi.checked_add(1)?, + Bound::Excluded(hi) => hi, + Bound::Unbounded => span.range.len().into(), + } + .try_into() + .ok()?; + + // Bounds check the values, preventing addition overflow and OOB spans. + let span_start = span.range.start().into(); + if (u32::MAX - start) < span_start + || (u32::MAX - end) < span_start + || start >= end + || end > length + { + return None; + } + + Some(Span { + range: TextRange::new(TextSize::from(start), TextSize::from(end)) + span.range.start(), + ..span + }) + } + + fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span { + Span { ctx: at.ctx, ..span } + } + + fn end(&mut self, span: Self::Span) -> Self::Span { + // We can't modify the span range for fixup spans, those are meaningful to fixup. + if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return span; + } + Span { range: TextRange::empty(span.range.end()), ..span } + } + + fn start(&mut self, span: Self::Span) -> Self::Span { + // We can't modify the span range for fixup spans, those are meaningful to fixup. + if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return span; + } + Span { range: TextRange::empty(span.range.start()), ..span } + } + + fn line(&mut self, _span: Self::Span) -> usize { + // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL + 0 + } + + fn column(&mut self, _span: Self::Span) -> usize { + // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL + 0 + } +} + +impl server::Symbol for RaSpanServer { + fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> { + // FIXME: nfc-normalize and validate idents + Ok(<Self as server::Server>::intern_symbol(string)) + } +} + +impl server::Server for RaSpanServer { + fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> { + bridge::ExpnGlobals { + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, + } + } + + fn intern_symbol(ident: &str) -> Self::Symbol { + // FIXME: should be `self.interner` once the proc-macro api allows it. + Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + } + + fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { + // FIXME: should be `self.interner` once the proc-macro api allows it. + f(symbol.text(&SYMBOL_INTERNER).as_str()) + } +} diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs new file mode 100644 index 00000000000..12526ad4f3a --- /dev/null +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -0,0 +1,380 @@ +//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span. +//! This backend is rather inflexible, used by RustRover and older rust-analyzer versions. +use std::{ + iter, + ops::{Bound, Range}, +}; + +use proc_macro::bridge::{self, server}; + +use crate::server::{ + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, +}; +mod tt { + pub use proc_macro_api::msg::TokenId; + + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree<TokenId>; + pub type TokenTree = ::tt::TokenTree<TokenId>; + pub type Leaf = ::tt::Leaf<TokenId>; + pub type Literal = ::tt::Literal<TokenId>; + pub type Punct = ::tt::Punct<TokenId>; + pub type Ident = ::tt::Ident<TokenId>; +} +type Group = tt::Subtree; +type TokenTree = tt::TokenTree; +#[allow(unused)] +type Punct = tt::Punct; +type Spacing = tt::Spacing; +#[allow(unused)] +type Literal = tt::Literal; +type Span = tt::TokenId; +type TokenStream = crate::server::TokenStream<Span>; + +#[derive(Clone)] +pub struct SourceFile; +pub struct FreeFunctions; + +pub struct TokenIdServer { + pub(crate) interner: SymbolInternerRef, + pub call_site: Span, + pub def_site: Span, + pub mixed_site: Span, +} + +impl server::Types for TokenIdServer { + type FreeFunctions = FreeFunctions; + type TokenStream = TokenStream; + type SourceFile = SourceFile; + type Span = Span; + type Symbol = Symbol; +} + +impl server::FreeFunctions for TokenIdServer { + fn injected_env_var(&mut self, _: &str) -> Option<std::string::String> { + None + } + fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {} + fn track_path(&mut self, _path: &str) {} + fn literal_from_str( + &mut self, + s: &str, + ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> { + // FIXME: keep track of LitKind and Suffix + Ok(bridge::Literal { + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, s), + suffix: None, + span: self.call_site, + }) + } + + fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {} +} + +impl server::TokenStream for TokenIdServer { + fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { + stream.is_empty() + } + fn from_str(&mut self, src: &str) -> Self::TokenStream { + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { + stream.to_string() + } + fn from_token_tree( + &mut self, + tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>, + ) -> Self::TokenStream { + match tree { + bridge::TokenTree::Group(group) => { + let group = Group { + delimiter: delim_to_internal(group.delimiter, group.span), + token_trees: match group.stream { + Some(stream) => stream.into_iter().collect(), + None => Vec::new(), + }, + }; + let tree = TokenTree::from(group); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Ident(ident) => { + let text = ident.sym.text(self.interner); + let text = + if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; + let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let leaf = tt::Leaf::from(ident); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Literal(literal) => { + let literal = LiteralFormatter(literal); + let text = literal.with_stringify_parts(self.interner, |parts| { + ::tt::SmolStr::from_iter(parts.iter().copied()) + }); + + let literal = tt::Literal { text, span: literal.0.span }; + let leaf = tt::Leaf::from(literal); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Punct(p) => { + let punct = tt::Punct { + char: p.ch as char, + spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, + span: p.span, + }; + let leaf = tt::Leaf::from(punct); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + } + } + + fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> { + Ok(self_.clone()) + } + + fn concat_trees( + &mut self, + base: Option<Self::TokenStream>, + trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(self.from_token_tree(tree)); + } + builder.build() + } + + fn concat_streams( + &mut self, + base: Option<Self::TokenStream>, + streams: Vec<Self::TokenStream>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } + builder.build() + } + + fn into_trees( + &mut self, + stream: Self::TokenStream, + ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(bridge::Ident { + sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), + span: ident.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + bridge::TokenTree::Literal(bridge::Literal { + // FIXME: handle literal kinds + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, &lit.text), + // FIXME: handle suffixes + suffix: None, + span: lit.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + bridge::TokenTree::Punct(bridge::Punct { + ch: punct.char as u8, + joint: punct.spacing == Spacing::Joint, + span: punct.span, + }) + } + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { + delimiter: delim_to_external(subtree.delimiter), + stream: if subtree.token_trees.is_empty() { + None + } else { + Some(subtree.token_trees.into_iter().collect()) + }, + span: bridge::DelimSpan::from_single(subtree.delimiter.open), + }), + }) + .collect() + } +} + +impl server::SourceFile for TokenIdServer { + fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { + true + } + fn path(&mut self, _file: &Self::SourceFile) -> String { + String::new() + } + fn is_real(&mut self, _file: &Self::SourceFile) -> bool { + true + } +} + +impl server::Span for TokenIdServer { + fn debug(&mut self, span: Self::Span) -> String { + format!("{:?}", span.0) + } + fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { + SourceFile {} + } + fn save_span(&mut self, _span: Self::Span) -> usize { + 0 + } + fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { + self.call_site + } + /// Recent feature, not yet in the proc_macro + /// + /// See PR: + /// https://github.com/rust-lang/rust/pull/55780 + fn source_text(&mut self, _span: Self::Span) -> Option<String> { + None + } + + fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> { + None + } + fn source(&mut self, span: Self::Span) -> Self::Span { + span + } + fn byte_range(&mut self, _span: Self::Span) -> Range<usize> { + Range { start: 0, end: 0 } + } + fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> { + // Just return the first span again, because some macros will unwrap the result. + Some(first) + } + fn subspan( + &mut self, + span: Self::Span, + _start: Bound<usize>, + _end: Bound<usize>, + ) -> Option<Self::Span> { + // Just return the span again, because some macros will unwrap the result. + Some(span) + } + fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { + self.call_site + } + + fn end(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn start(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn line(&mut self, _span: Self::Span) -> usize { + 0 + } + + fn column(&mut self, _span: Self::Span) -> usize { + 0 + } +} + +impl server::Symbol for TokenIdServer { + fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> { + // FIXME: nfc-normalize and validate idents + Ok(<Self as server::Server>::intern_symbol(string)) + } +} + +impl server::Server for TokenIdServer { + fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> { + bridge::ExpnGlobals { + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, + } + } + + fn intern_symbol(ident: &str) -> Self::Symbol { + Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + } + + fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { + f(symbol.text(&SYMBOL_INTERNER).as_str()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ra_server_to_string() { + let s = TokenStream { + token_trees: vec![ + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "struct".into(), + span: tt::TokenId(0), + })), + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "T".into(), + span: tt::TokenId(0), + })), + tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: tt::TokenId(0), + close: tt::TokenId(0), + kind: tt::DelimiterKind::Brace, + }, + token_trees: vec![], + }), + ], + }; + + assert_eq!(s.to_string(), "struct T {}"); + } + + #[test] + fn test_ra_server_from_str() { + let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: tt::TokenId(0), + close: tt::TokenId(0), + kind: tt::DelimiterKind::Parenthesis, + }, + token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "a".into(), + span: tt::TokenId(0), + }))], + }); + + let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); + assert_eq!(t1.token_trees.len(), 1); + assert_eq!(t1.token_trees[0], subtree_paren_a); + + let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); + assert_eq!(t2.token_trees.len(), 2); + assert_eq!(t2.token_trees[0], subtree_paren_a); + + let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); + assert_eq!( + underscore.token_trees[0], + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "_".into(), + span: tt::TokenId(0), + })) + ); + } +} diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 36be8825038..8f669a30494 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -1,20 +1,24 @@ //! TokenStream implementation used by sysroot ABI -use proc_macro_api::msg::TokenId; +use tt::TokenTree; -use crate::tt::{self, TokenTree}; +#[derive(Debug, Clone)] +pub struct TokenStream<S> { + pub(super) token_trees: Vec<TokenTree<S>>, +} -#[derive(Debug, Default, Clone)] -pub struct TokenStream { - pub(super) token_trees: Vec<TokenTree>, +impl<S> Default for TokenStream<S> { + fn default() -> Self { + Self { token_trees: vec![] } + } } -impl TokenStream { +impl<S> TokenStream<S> { pub(crate) fn new() -> Self { - TokenStream::default() + TokenStream { token_trees: vec![] } } - pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { + pub(crate) fn with_subtree(subtree: tt::Subtree<S>) -> Self { if subtree.delimiter.kind != tt::DelimiterKind::Invisible { TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } } else { @@ -22,7 +26,10 @@ impl TokenStream { } } - pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree { + pub(crate) fn into_subtree(self, call_site: S) -> tt::Subtree<S> + where + S: Copy, + { tt::Subtree { delimiter: tt::Delimiter { open: call_site, @@ -39,37 +46,37 @@ impl TokenStream { } /// Creates a token stream containing a single token tree. -impl From<TokenTree> for TokenStream { - fn from(tree: TokenTree) -> TokenStream { +impl<S> From<TokenTree<S>> for TokenStream<S> { + fn from(tree: TokenTree<S>) -> TokenStream<S> { TokenStream { token_trees: vec![tree] } } } /// Collects a number of token trees into a single stream. -impl FromIterator<TokenTree> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { +impl<S> FromIterator<TokenTree<S>> for TokenStream<S> { + fn from_iter<I: IntoIterator<Item = TokenTree<S>>>(trees: I) -> Self { trees.into_iter().map(TokenStream::from).collect() } } /// A "flattening" operation on token streams, collects token trees /// from multiple token streams into a single stream. -impl FromIterator<TokenStream> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { +impl<S> FromIterator<TokenStream<S>> for TokenStream<S> { + fn from_iter<I: IntoIterator<Item = TokenStream<S>>>(streams: I) -> Self { let mut builder = TokenStreamBuilder::new(); streams.into_iter().for_each(|stream| builder.push(stream)); builder.build() } } -impl Extend<TokenTree> for TokenStream { - fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) { +impl<S> Extend<TokenTree<S>> for TokenStream<S> { + fn extend<I: IntoIterator<Item = TokenTree<S>>>(&mut self, trees: I) { self.extend(trees.into_iter().map(TokenStream::from)); } } -impl Extend<TokenStream> for TokenStream { - fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { +impl<S> Extend<TokenStream<S>> for TokenStream<S> { + fn extend<I: IntoIterator<Item = TokenStream<S>>>(&mut self, streams: I) { for item in streams { for tkn in item { match tkn { @@ -87,22 +94,21 @@ impl Extend<TokenStream> for TokenStream { } } -pub(super) struct TokenStreamBuilder { - acc: TokenStream, +pub(super) struct TokenStreamBuilder<S> { + acc: TokenStream<S>, } /// pub(super)lic implementation details for the `TokenStream` type, such as iterators. pub(super) mod token_stream { - use proc_macro_api::msg::TokenId; - use super::{tt, TokenStream, TokenTree}; + use super::{TokenStream, TokenTree}; /// An iterator over `TokenStream`'s `TokenTree`s. /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, /// and returns whole groups as token trees. - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = std::vec::IntoIter<TokenTree>; + impl<S> IntoIterator for TokenStream<S> { + type Item = TokenTree<S>; + type IntoIter = std::vec::IntoIter<TokenTree<S>>; fn into_iter(self) -> Self::IntoIter { self.token_trees.into_iter() @@ -119,71 +125,34 @@ pub(super) mod token_stream { /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. #[rustfmt::skip] - impl /*FromStr for*/ TokenStream { + impl<S: tt::Span> /*FromStr for*/ TokenStream<S> { // type Err = LexError; - pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result<TokenStream, LexError> { + pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, LexError> { let subtree = mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; - let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site); Ok(TokenStream::with_subtree(subtree)) } } - impl ToString for TokenStream { + impl<S> ToString for TokenStream<S> { fn to_string(&self) -> String { ::tt::pretty(&self.token_trees) } } - - fn subtree_replace_token_ids_with_call_site( - subtree: tt::Subtree, - call_site: TokenId, - ) -> tt::Subtree { - tt::Subtree { - delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter }, - token_trees: subtree - .token_trees - .into_iter() - .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site)) - .collect(), - } - } - - fn token_tree_replace_token_ids_with_call_site( - tt: tt::TokenTree, - call_site: TokenId, - ) -> tt::TokenTree { - match tt { - tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site)) - } - tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site)) - } - } - } - - fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf { - match leaf { - tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }), - tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }), - tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }), - } - } } -impl TokenStreamBuilder { - pub(super) fn new() -> TokenStreamBuilder { +impl<S> TokenStreamBuilder<S> { + pub(super) fn new() -> TokenStreamBuilder<S> { TokenStreamBuilder { acc: TokenStream::new() } } - pub(super) fn push(&mut self, stream: TokenStream) { + pub(super) fn push(&mut self, stream: TokenStream<S>) { self.acc.extend(stream.into_iter()) } - pub(super) fn build(self) -> TokenStream { + pub(super) fn build(self) -> TokenStream<S> { self.acc } } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index b04e3ca19ac..87d832cc76f 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -8,7 +8,7 @@ use expect_test::expect; #[test] fn test_derive_empty() { - assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]); + assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"], expect!["SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"]); } #[test] @@ -23,6 +23,13 @@ fn test_derive_error() { SUBTREE () 1 1 LITERAL "#[derive(DeriveError)] struct S ;" 1 PUNCH ; [alone] 1"##]], + expect![[r##" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "#[derive(DeriveError)] struct S ;" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]], ); } @@ -40,6 +47,15 @@ fn test_fn_like_macro_noop() { LITERAL 1 1 PUNCH , [alone] 1 SUBTREE [] 1 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 0 SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 8..9, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 1 SpanData { range: 10..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE [] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 14..15, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -53,6 +69,11 @@ fn test_fn_like_macro_clone_ident_subtree() { IDENT ident 1 PUNCH , [alone] 1 SUBTREE [] 1 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE [] SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -64,6 +85,41 @@ fn test_fn_like_macro_clone_raw_ident() { expect![[r#" SUBTREE $$ 1 1 IDENT r#async 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#async SpanData { range: 0..7, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + ); +} + +#[test] +fn test_fn_like_fn_like_span_join() { + assert_expand( + "fn_like_span_join", + "foo bar", + expect![[r#" + SUBTREE $$ 1 1 + IDENT r#joined 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#joined SpanData { range: 0..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + ); +} + +#[test] +fn test_fn_like_fn_like_span_ops() { + assert_expand( + "fn_like_span_ops", + "set_def_site resolved_at_def_site start_span", + expect![[r#" + SUBTREE $$ 1 1 + IDENT set_def_site 0 + IDENT resolved_at_def_site 1 + IDENT start_span 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT set_def_site SpanData { range: 0..150, anchor: SpanAnchor(FileId(41), 1), ctx: SyntaxContextId(0) } + IDENT resolved_at_def_site SpanData { range: 13..33, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT start_span SpanData { range: 34..34, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -81,6 +137,15 @@ fn test_fn_like_mk_literals() { LITERAL 3.14 1 LITERAL 123i64 1 LITERAL 123 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL b"byte_string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 'c' SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14f64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 123i64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 123 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -93,6 +158,10 @@ fn test_fn_like_mk_idents() { SUBTREE $$ 1 1 IDENT standard 1 IDENT r#raw 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT standard SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#raw SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -113,6 +182,18 @@ fn test_fn_like_macro_clone_literals() { LITERAL 3.14f32 1 PUNCH , [alone] 1 LITERAL "hello bridge" 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 2_u32 SpanData { range: 6..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH - [alone] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 4i64 SpanData { range: 14..18, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -132,6 +213,13 @@ fn test_attr_macro() { SUBTREE () 1 1 LITERAL "#[attr_error(some arguments)] mod m {}" 1 PUNCH ; [alone] 1"##]], + expect![[r##" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "#[attr_error(some arguments)] mod m {}" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]], ); } @@ -147,6 +235,8 @@ fn list_test_macros() { fn_like_clone_tokens [FuncLike] fn_like_mk_literals [FuncLike] fn_like_mk_idents [FuncLike] + fn_like_span_join [FuncLike] + fn_like_span_ops [FuncLike] attr_noop [Attr] attr_panic [Attr] attr_error [Attr] diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index c12096d140c..9a1311d9550 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -2,47 +2,96 @@ use expect_test::Expect; use proc_macro_api::msg::TokenId; +use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use tt::TextRange; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; -fn parse_string(code: &str, call_site: TokenId) -> Option<crate::server::TokenStream> { - // This is a bit strange. We need to parse a string into a token stream into - // order to create a tt::SubTree from it in fixtures. `into_subtree` is - // implemented by all the ABIs we have so we arbitrarily choose one ABI to - // write a `parse_string` function for and use that. The tests don't really - // care which ABI we're using as the `into_subtree` function isn't part of - // the ABI and shouldn't change between ABI versions. - crate::server::TokenStream::from_str(code, call_site).ok() +fn parse_string(call_site: TokenId, src: &str) -> crate::server::TokenStream<TokenId> { + crate::server::TokenStream::with_subtree( + mbe::parse_to_token_tree_static_span(call_site, src).unwrap(), + ) } -pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) { - assert_expand_impl(macro_name, ra_fixture, None, expect); +fn parse_string_spanned( + anchor: SpanAnchor, + call_site: SyntaxContextId, + src: &str, +) -> crate::server::TokenStream<Span> { + crate::server::TokenStream::with_subtree( + mbe::parse_to_token_tree(anchor, call_site, src).unwrap(), + ) } -pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, expect: Expect) { - assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect); +pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect, expect_s: Expect) { + assert_expand_impl(macro_name, ra_fixture, None, expect, expect_s); } -fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) { +pub fn assert_expand_attr( + macro_name: &str, + ra_fixture: &str, + attr_args: &str, + expect: Expect, + expect_s: Expect, +) { + assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect, expect_s); +} + +fn assert_expand_impl( + macro_name: &str, + input: &str, + attr: Option<&str>, + expect: Expect, + expect_s: Expect, +) { + let path = proc_macro_test_dylib_path(); + let expander = dylib::Expander::new(&path).unwrap(); + let def_site = TokenId(0); let call_site = TokenId(1); let mixed_site = TokenId(2); - let path = proc_macro_test_dylib_path(); - let expander = dylib::Expander::new(&path).unwrap(); - let fixture = parse_string(input, call_site).unwrap(); - let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); + let input_ts = parse_string(call_site, input); + let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site)); let res = expander .expand( macro_name, - &fixture.into_subtree(call_site), - attr.as_ref(), + input_ts.into_subtree(call_site), + attr_ts, def_site, call_site, mixed_site, ) .unwrap(); expect.assert_eq(&format!("{res:?}")); + + let def_site = Span { + range: TextRange::new(0.into(), 150.into()), + anchor: SpanAnchor { + file_id: FileId::from_raw(41), + ast_id: ErasedFileAstId::from_raw(From::from(1)), + }, + ctx: SyntaxContextId::ROOT, + }; + let call_site = Span { + range: TextRange::new(0.into(), 100.into()), + anchor: SpanAnchor { + file_id: FileId::from_raw(42), + ast_id: ErasedFileAstId::from_raw(From::from(2)), + }, + ctx: SyntaxContextId::ROOT, + }; + let mixed_site = call_site; + + let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, input); + let attr = attr.map(|attr| { + parse_string_spanned(call_site.anchor, call_site.ctx, attr).into_subtree(call_site) + }); + + let res = expander + .expand(macro_name, fixture.into_subtree(call_site), attr, def_site, call_site, mixed_site) + .unwrap(); + expect_s.assert_eq(&format!("{res:?}")); } pub(crate) fn list() -> Vec<String> { diff --git a/crates/proc-macro-test/Cargo.toml b/crates/proc-macro-test/Cargo.toml deleted file mode 100644 index 12d7c07d3ed..00000000000 --- a/crates/proc-macro-test/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "proc-macro-test" -version = "0.0.0" -publish = false - -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true - -[lib] -doctest = false - -[build-dependencies] -cargo_metadata.workspace = true - -proc-macro-test-impl = { path = "imp", version = "0.0.0" } - -# local deps -toolchain.workspace = true diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml index 3e48de6456b..a36c9aca52d 100644 --- a/crates/project-model/Cargo.toml +++ b/crates/project-model/Cargo.toml @@ -14,8 +14,8 @@ doctest = false [dependencies] anyhow.workspace = true cargo_metadata.workspace = true -rustc-hash = "1.1.0" -semver = "1.0.14" +rustc-hash.workspace = true +semver.workspace = true serde_json.workspace = true serde.workspace = true tracing.workspace = true diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index ca3d6e0596c..d89c4598afc 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -330,6 +330,7 @@ impl CargoWorkspace { cargo_metadata::Edition::E2015 => Edition::Edition2015, cargo_metadata::Edition::E2018 => Edition::Edition2018, cargo_metadata::Edition::E2021 => Edition::Edition2021, + cargo_metadata::Edition::_E2024 => Edition::Edition2024, _ => { tracing::error!("Unsupported edition `{:?}`", edition); Edition::CURRENT diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index 931eba11576..cf3231498f3 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -213,6 +213,8 @@ enum EditionData { Edition2018, #[serde(rename = "2021")] Edition2021, + #[serde(rename = "2024")] + Edition2024, } impl From<EditionData> for Edition { @@ -221,6 +223,7 @@ impl From<EditionData> for Edition { EditionData::Edition2015 => Edition::Edition2015, EditionData::Edition2018 => Edition::Edition2018, EditionData::Edition2021 => Edition::Edition2021, + EditionData::Edition2024 => Edition::Edition2024, } } } diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 9333570354a..4057493fa3a 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -7,7 +7,7 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr, use anyhow::{format_err, Context}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult, + Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; @@ -619,7 +619,7 @@ impl ProjectWorkspace { sysroot.as_ref().ok(), extra_env, Err("rust-project.json projects have no target layout set".into()), - toolchain.as_ref().and_then(|it| ReleaseChannel::from_str(it.pre.as_str())), + toolchain.clone(), ) } ProjectWorkspace::Cargo { @@ -644,7 +644,7 @@ impl ProjectWorkspace { Ok(it) => Ok(Arc::from(it.as_str())), Err(it) => Err(Arc::from(it.as_str())), }, - toolchain.as_ref().and_then(|it| ReleaseChannel::from_str(it.pre.as_str())), + toolchain.as_ref(), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { detached_files_to_crate_graph( @@ -733,7 +733,7 @@ fn project_json_to_crate_graph( sysroot: Option<&Sysroot>, extra_env: &FxHashMap<String, String>, target_layout: TargetLayoutLoadResult, - channel: Option<ReleaseChannel>, + toolchain: Option<Version>, ) -> (CrateGraph, ProcMacroPaths) { let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let (crate_graph, proc_macros) = &mut res; @@ -744,7 +744,7 @@ fn project_json_to_crate_graph( rustc_cfg.clone(), target_layout.clone(), load, - channel, + toolchain.as_ref(), ) }); @@ -807,7 +807,7 @@ fn project_json_to_crate_graph( CrateOrigin::Local { repo: None, name: None } }, target_layout.clone(), - channel, + toolchain.clone(), ); if *is_proc_macro { if let Some(path) = proc_macro_dylib_path.clone() { @@ -853,7 +853,7 @@ fn cargo_to_crate_graph( forced_cfg: Option<CfgOptions>, build_scripts: &WorkspaceBuildScripts, target_layout: TargetLayoutLoadResult, - channel: Option<ReleaseChannel>, + toolchain: Option<&Version>, ) -> (CrateGraph, ProcMacroPaths) { let _p = profile::span("cargo_to_crate_graph"); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); @@ -866,7 +866,7 @@ fn cargo_to_crate_graph( rustc_cfg.clone(), target_layout.clone(), load, - channel, + toolchain, ), None => (SysrootPublicDeps::default(), None), }; @@ -950,7 +950,7 @@ fn cargo_to_crate_graph( is_proc_macro, target_layout.clone(), false, - channel, + toolchain.cloned(), ); if kind == TargetKind::Lib { lib_tgt = Some((crate_id, name.clone())); @@ -1038,7 +1038,7 @@ fn cargo_to_crate_graph( rustc_build_scripts }, target_layout, - channel, + toolchain, ); } } @@ -1117,7 +1117,7 @@ fn handle_rustc_crates( override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, target_layout: TargetLayoutLoadResult, - channel: Option<ReleaseChannel>, + toolchain: Option<&Version>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1172,7 +1172,7 @@ fn handle_rustc_crates( rustc_workspace[tgt].is_proc_macro, target_layout.clone(), true, - channel, + toolchain.cloned(), ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -1248,7 +1248,7 @@ fn add_target_crate_root( is_proc_macro: bool, target_layout: TargetLayoutLoadResult, rustc_crate: bool, - channel: Option<ReleaseChannel>, + toolchain: Option<Version>, ) -> CrateId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1304,7 +1304,7 @@ fn add_target_crate_root( CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() } }, target_layout, - channel, + toolchain, ); if is_proc_macro { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { @@ -1346,7 +1346,7 @@ fn sysroot_to_crate_graph( rustc_cfg: Vec<CfgFlag>, target_layout: TargetLayoutLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option<FileId>, - channel: Option<ReleaseChannel>, + toolchain: Option<&Version>, ) -> (SysrootPublicDeps, Option<CrateId>) { let _p = profile::span("sysroot_to_crate_graph"); let cfg_options = create_cfg_options(rustc_cfg.clone()); @@ -1357,7 +1357,7 @@ fn sysroot_to_crate_graph( rustc_cfg, cfg_options, target_layout, - channel, + toolchain, crate_graph, sysroot, ), @@ -1380,7 +1380,7 @@ fn sysroot_to_crate_graph( false, CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)), target_layout.clone(), - channel, + toolchain.cloned(), ); Some((krate, crate_id)) }) @@ -1412,7 +1412,7 @@ fn handle_hack_cargo_workspace( rustc_cfg: Vec<CfgFlag>, cfg_options: CfgOptions, target_layout: Result<Arc<str>, Arc<str>>, - channel: Option<ReleaseChannel>, + toolchain: Option<&Version>, crate_graph: &mut CrateGraph, sysroot: &Sysroot, ) -> FxHashMap<SysrootCrate, CrateId> { @@ -1426,7 +1426,7 @@ fn handle_hack_cargo_workspace( Some(cfg_options), &WorkspaceBuildScripts::default(), target_layout, - channel, + toolchain, ); crate_graph.extend(cg, &mut pm); for crate_name in ["std", "alloc", "core"] { diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index e98f016ca7d..d8d9e559e5c 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -62,7 +62,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -135,7 +135,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -208,7 +208,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -281,7 +281,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -350,6 +350,6 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index e98f016ca7d..d8d9e559e5c 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -62,7 +62,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -135,7 +135,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -208,7 +208,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -281,7 +281,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -350,6 +350,6 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index 7ecd53572e2..e0ba5ed498f 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -61,7 +61,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -133,7 +133,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -205,7 +205,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -277,7 +277,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -346,6 +346,6 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index 581a6afc148..e35f0fc7327 100644 --- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -39,7 +39,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -72,7 +72,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -105,7 +105,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -138,7 +138,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -188,7 +188,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 5: CrateData { root_file_id: FileId( @@ -221,7 +221,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 6: CrateData { root_file_id: FileId( @@ -319,7 +319,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 7: CrateData { root_file_id: FileId( @@ -352,7 +352,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 8: CrateData { root_file_id: FileId( @@ -385,7 +385,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 9: CrateData { root_file_id: FileId( @@ -418,7 +418,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 10: CrateData { root_file_id: FileId( @@ -495,6 +495,6 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 408c1fb6f39..a9bf4d8c336 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -24,12 +24,12 @@ crossbeam-channel = "0.5.5" dissimilar.workspace = true itertools.workspace = true scip = "0.3.1" -lsp-types = { version = "=0.94.0", features = ["proposed"] } +lsp-types = { version = "=0.95.0", features = ["proposed"] } parking_lot = "0.12.1" xflags = "0.3.0" oorandom = "11.1.3" rayon.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true serde_json = { workspace = true, features = ["preserve_order"] } serde.workspace = true num_cpus = "1.15.0" @@ -42,6 +42,7 @@ tracing-tree.workspace = true triomphe.workspace = true nohash-hasher.workspace = true always-assert = "0.1.2" +walkdir = "2.3.2" cfg.workspace = true flycheck.workspace = true @@ -75,6 +76,7 @@ expect-test = "1.4.0" xshell.workspace = true test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true mbe.workspace = true diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 29bd02f92da..6f40a4c88ed 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -87,6 +87,7 @@ fn main() -> anyhow::Result<()> { flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, + flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?, } Ok(()) } @@ -171,7 +172,15 @@ fn run_server() -> anyhow::Result<()> { let (connection, io_threads) = Connection::stdio(); - let (initialize_id, initialize_params) = connection.initialize_start()?; + let (initialize_id, initialize_params) = match connection.initialize_start() { + Ok(it) => it, + Err(e) => { + if e.channel_is_disconnected() { + io_threads.join()?; + } + return Err(e.into()); + } + }; tracing::info!("InitializeParams: {}", initialize_params); let lsp_types::InitializeParams { root_uri, @@ -239,7 +248,12 @@ fn run_server() -> anyhow::Result<()> { let initialize_result = serde_json::to_value(initialize_result).unwrap(); - connection.initialize_finish(initialize_id, initialize_result)?; + if let Err(e) = connection.initialize_finish(initialize_id, initialize_result) { + if e.channel_is_disconnected() { + io_threads.join()?; + } + return Err(e.into()); + } if !config.has_linked_projects() && config.detached_files().is_empty() { config.rediscover_workspaces(); diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 8c9261ab05e..94eab97e8fc 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs @@ -157,6 +157,8 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { "ssr": true, "workspaceSymbolScopeKindFiltering": true, })), + diagnostic_provider: None, + inline_completion_provider: None, } } diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 728bade0d0a..0190ca3cab8 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -209,7 +209,7 @@ mod tests { use super::*; use cfg::CfgExpr; - use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; + use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, SmolStr, @@ -219,7 +219,7 @@ mod tests { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY); CfgExpr::parse(&tt) }; diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index 64646b33ad4..de00c4192b4 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs @@ -10,6 +10,7 @@ mod ssr; mod lsif; mod scip; mod run_tests; +mod rustc_tests; mod progress_report; diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index fe5022f8606..5633c0c488a 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -98,6 +98,15 @@ xflags::xflags! { required path: PathBuf } + /// Run unit tests of the project using mir interpreter + cmd rustc-tests { + /// Directory with Cargo.toml. + required rustc_repo: PathBuf + + /// Only run tests with filter as substring + optional --filter path: String + } + cmd diagnostics { /// Directory with Cargo.toml. required path: PathBuf @@ -159,6 +168,7 @@ pub enum RustAnalyzerCmd { Highlight(Highlight), AnalysisStats(AnalysisStats), RunTests(RunTests), + RustcTests(RustcTests), Diagnostics(Diagnostics), Ssr(Ssr), Search(Search), @@ -212,6 +222,12 @@ pub struct RunTests { } #[derive(Debug)] +pub struct RustcTests { + pub rustc_repo: PathBuf, + pub filter: Option<String>, +} + +#[derive(Debug)] pub struct Diagnostics { pub path: PathBuf, diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs new file mode 100644 index 00000000000..b8f6138161e --- /dev/null +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -0,0 +1,236 @@ +//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter. + +use std::{ + cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf, +}; + +use hir::{Change, Crate}; +use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; +use profile::StopWatch; +use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; + +use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; +use triomphe::Arc; +use vfs::{AbsPathBuf, FileId}; +use walkdir::WalkDir; + +use crate::cli::{flags, report_metric, Result}; + +struct Tester { + host: AnalysisHost, + root_file: FileId, + pass_count: u64, + ignore_count: u64, + fail_count: u64, + stopwatch: StopWatch, +} + +fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode { + thread_local! { + static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new()); + } + LEAK_STORE.with_borrow_mut(|s| match s.get(code) { + Some(c) => *c, + None => { + let v = DiagnosticCode::RustcHardError(format!("E{code}").leak()); + s.insert(code.to_owned(), v); + v + } + }) +} + +fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> { + let text = read_to_string(p).unwrap(); + let mut result = HashMap::new(); + { + let mut text = &*text; + while let Some(p) = text.find("error[E") { + text = &text[p + 7..]; + let code = string_to_diagnostic_code_leaky(&text[..4]); + *result.entry(code).or_insert(0) += 1; + } + } + result +} + +impl Tester { + fn new() -> Result<Self> { + let tmp_file = AbsPathBuf::assert("/tmp/ra-rustc-test.rs".into()); + std::fs::write(&tmp_file, "")?; + let mut cargo_config = CargoConfig::default(); + cargo_config.sysroot = Some(RustLibSource::Discover); + let workspace = ProjectWorkspace::DetachedFiles { + files: vec![tmp_file.clone()], + sysroot: Ok( + Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env).unwrap() + ), + rustc_cfg: vec![], + }; + let load_cargo_config = LoadCargoConfig { + load_out_dirs_from_check: false, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, + prefill_caches: false, + }; + let (host, _vfs, _proc_macro) = + load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; + let db = host.raw_database(); + let krates = Crate::all(db); + let root_crate = krates.iter().cloned().find(|krate| krate.origin(db).is_local()).unwrap(); + let root_file = root_crate.root_file(db); + Ok(Self { + host, + root_file, + pass_count: 0, + ignore_count: 0, + fail_count: 0, + stopwatch: StopWatch::start(), + }) + } + + fn test(&mut self, p: PathBuf) { + if p.parent().unwrap().file_name().unwrap() == "auxiliary" { + // These are not tests + return; + } + if IGNORED_TESTS.iter().any(|ig| p.file_name().is_some_and(|x| x == *ig)) { + println!("{p:?} IGNORE"); + self.ignore_count += 1; + return; + } + let stderr_path = p.with_extension("stderr"); + let expected = if stderr_path.exists() { + detect_errors_from_rustc_stderr_file(stderr_path) + } else { + HashMap::new() + }; + let text = read_to_string(&p).unwrap(); + let mut change = Change::new(); + // Ignore unstable tests, since they move too fast and we do not intend to support all of them. + let mut ignore_test = text.contains("#![feature"); + // Ignore test with extern crates, as this infra don't support them yet. + ignore_test |= text.contains("// aux-build:") || text.contains("// aux-crate:"); + // Ignore test with extern modules similarly. + ignore_test |= text.contains("mod "); + // These should work, but they don't, and I don't know why, so ignore them. + ignore_test |= text.contains("extern crate proc_macro"); + let should_have_no_error = text.contains("// check-pass") + || text.contains("// build-pass") + || text.contains("// run-pass"); + change.change_file(self.root_file, Some(Arc::from(text))); + self.host.apply_change(change); + let diagnostic_config = DiagnosticsConfig::test_sample(); + let diags = self + .host + .analysis() + .diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file) + .unwrap(); + let mut actual = HashMap::new(); + for diag in diags { + if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) { + continue; + } + if !should_have_no_error && !SUPPORTED_DIAGNOSTICS.contains(&diag.code) { + continue; + } + *actual.entry(diag.code).or_insert(0) += 1; + } + // Ignore tests with diagnostics that we don't emit. + ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k)); + if ignore_test { + println!("{p:?} IGNORE"); + self.ignore_count += 1; + } else if actual == expected { + println!("{p:?} PASS"); + self.pass_count += 1; + } else { + println!("{p:?} FAIL"); + println!("actual (r-a) = {:?}", actual); + println!("expected (rustc) = {:?}", expected); + self.fail_count += 1; + } + } + + fn report(&mut self) { + println!( + "Pass count = {}, Fail count = {}, Ignore count = {}", + self.pass_count, self.fail_count, self.ignore_count + ); + println!("Testing time and memory = {}", self.stopwatch.elapsed()); + report_metric("rustc failed tests", self.fail_count, "#"); + report_metric("rustc testing time", self.stopwatch.elapsed().time.as_millis() as u64, "ms"); + } +} + +/// These tests break rust-analyzer (either by panicking or hanging) so we should ignore them. +const IGNORED_TESTS: &[&str] = &[ + "trait-with-missing-associated-type-restriction.rs", // #15646 + "trait-with-missing-associated-type-restriction-fixable.rs", // #15646 + "resolve-self-in-impl.rs", + "basic.rs", // ../rust/tests/ui/associated-type-bounds/return-type-notation/basic.rs + "issue-26056.rs", + "float-field.rs", + "invalid_operator_trait.rs", + "type-alias-impl-trait-assoc-dyn.rs", + "deeply-nested_closures.rs", // exponential time + "hang-on-deeply-nested-dyn.rs", // exponential time + "dyn-rpit-and-let.rs", // unexpected free variable with depth `^1.0` with outer binder ^0 + "issue-16098.rs", // Huge recursion limit for macros? + "issue-83471.rs", // crates/hir-ty/src/builder.rs:78:9: assertion failed: self.remaining() > 0 +]; + +const SUPPORTED_DIAGNOSTICS: &[DiagnosticCode] = &[ + DiagnosticCode::RustcHardError("E0023"), + DiagnosticCode::RustcHardError("E0046"), + DiagnosticCode::RustcHardError("E0063"), + DiagnosticCode::RustcHardError("E0107"), + DiagnosticCode::RustcHardError("E0117"), + DiagnosticCode::RustcHardError("E0133"), + DiagnosticCode::RustcHardError("E0210"), + DiagnosticCode::RustcHardError("E0268"), + DiagnosticCode::RustcHardError("E0308"), + DiagnosticCode::RustcHardError("E0384"), + DiagnosticCode::RustcHardError("E0407"), + DiagnosticCode::RustcHardError("E0432"), + DiagnosticCode::RustcHardError("E0451"), + DiagnosticCode::RustcHardError("E0507"), + DiagnosticCode::RustcHardError("E0583"), + DiagnosticCode::RustcHardError("E0559"), + DiagnosticCode::RustcHardError("E0616"), + DiagnosticCode::RustcHardError("E0618"), + DiagnosticCode::RustcHardError("E0624"), + DiagnosticCode::RustcHardError("E0774"), + DiagnosticCode::RustcHardError("E0767"), + DiagnosticCode::RustcHardError("E0777"), +]; + +impl flags::RustcTests { + pub fn run(self) -> Result<()> { + let mut tester = Tester::new()?; + let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui")); + for i in walk_dir { + let i = i?; + let p = i.into_path(); + if let Some(f) = &self.filter { + if !p.as_os_str().to_string_lossy().contains(f) { + continue; + } + } + if p.extension().map_or(true, |x| x != "rs") { + continue; + } + if let Err(e) = std::panic::catch_unwind({ + let tester = AssertUnwindSafe(&mut tester); + let p = p.clone(); + move || { + let tester = tester; + tester.0.test(p); + } + }) { + println!("panic detected at test {:?}", p); + std::panic::resume_unwind(e); + } + } + tester.report(); + Ok(()) + } +} diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 30e11402cd8..95c8798d43c 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -278,8 +278,8 @@ fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> { mod test { use super::*; use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize}; - use ide_db::base_db::fixture::ChangeFixture; use scip::symbol::format_symbol; + use test_fixture::ChangeFixture; fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) { let mut host = AnalysisHost::default(); diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 90d1d6b0555..e057c7d2861 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -105,6 +105,9 @@ config_data! { /// ``` /// . cargo_buildScripts_overrideCommand: Option<Vec<String>> = "null", + /// Rerun proc-macros building/build-scripts running when proc-macro + /// or build-script sources change and are saved. + cargo_buildScripts_rebuildOnSave: bool = "false", /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to /// avoid checking unnecessary things. cargo_buildScripts_useRustcWrapper: bool = "true", @@ -164,15 +167,15 @@ config_data! { /// Specifies the working directory for running checks. /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. // FIXME: Ideally we would support this in some way - /// This falls back to "root" if `#rust-analyzer.cargo.check.invocationStrategy#` is set to `once`. + /// This falls back to "root" if `#rust-analyzer.check.invocationStrategy#` is set to `once`. /// - "root": run checks in the project's root directory. - /// This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#` + /// This config only has an effect when `#rust-analyzer.check.overrideCommand#` /// is set. check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", /// Specifies the invocation strategy to use when running the check command. /// If `per_workspace` is set, the command will be executed for each workspace. /// If `once` is set, the command will be executed once. - /// This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#` + /// This config only has an effect when `#rust-analyzer.check.overrideCommand#` /// is set. check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", /// Whether to pass `--no-default-features` to Cargo. Defaults to @@ -191,8 +194,8 @@ config_data! { /// If there are multiple linked projects/workspaces, this command is invoked for /// each of them, with the working directory being the workspace root /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten - /// by changing `#rust-analyzer.cargo.check.invocationStrategy#` and - /// `#rust-analyzer.cargo.check.invocationLocation#`. + /// by changing `#rust-analyzer.check.invocationStrategy#` and + /// `#rust-analyzer.check.invocationLocation#`. /// /// An example command would be: /// @@ -1354,6 +1357,7 @@ impl Config { } } + // FIXME: This should be an AbsolutePathBuf fn target_dir_from_config(&self) -> Option<PathBuf> { self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir { TargetDirectory::UseSubdirectory(yes) if *yes => { @@ -1368,6 +1372,10 @@ impl Config { self.data.checkOnSave } + pub fn script_rebuild_on_save(&self) -> bool { + self.data.cargo_buildScripts_rebuildOnSave + } + pub fn runnables(&self) -> RunnablesConfig { RunnablesConfig { override_cargo: self.data.runnables_command.clone(), diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 0f31fe16054..f57a27305f0 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -7,7 +7,8 @@ use std::time::Instant; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; -use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId}; +use hir::Change; +use ide::{Analysis, AnalysisHost, Cancellable, FileId}; use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase}; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index f9070d27353..7e6219991b6 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -130,6 +130,13 @@ pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, ) -> anyhow::Result<()> { + if state.config.script_rebuild_on_save() && state.proc_macro_changed { + // reset the flag + state.proc_macro_changed = false; + // rebuild the proc macros + state.fetch_build_data_queue.request_op("ScriptRebuildOnSave".to_owned(), ()); + } + if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { // Re-fetch workspaces if a workspace related file has changed if let Some(abs_path) = vfs_path.as_path() { diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index d8a590c8088..6b7bc944d52 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -453,7 +453,7 @@ pub(crate) fn handle_document_symbol( pub(crate) fn handle_workspace_symbol( snap: GlobalStateSnapshot, params: WorkspaceSymbolParams, -) -> anyhow::Result<Option<Vec<SymbolInformation>>> { +) -> anyhow::Result<Option<lsp_types::WorkspaceSymbolResponse>> { let _p = profile::span("handle_workspace_symbol"); let config = snap.config.workspace_symbol(); @@ -479,7 +479,7 @@ pub(crate) fn handle_workspace_symbol( res = exec_query(&snap, query)?; } - return Ok(Some(res)); + return Ok(Some(lsp_types::WorkspaceSymbolResponse::Nested(res))); fn decide_search_scope_and_kind( params: &WorkspaceSymbolParams, @@ -519,13 +519,12 @@ pub(crate) fn handle_workspace_symbol( fn exec_query( snap: &GlobalStateSnapshot, query: Query, - ) -> anyhow::Result<Vec<SymbolInformation>> { + ) -> anyhow::Result<Vec<lsp_types::WorkspaceSymbol>> { let mut res = Vec::new(); for nav in snap.analysis.symbol_search(query)? { let container_name = nav.container_name.as_ref().map(|v| v.to_string()); - #[allow(deprecated)] - let info = SymbolInformation { + let info = lsp_types::WorkspaceSymbol { name: match &nav.alias { Some(alias) => format!("{} (alias for {})", alias, nav.name), None => format!("{}", nav.name), @@ -534,10 +533,11 @@ pub(crate) fn handle_workspace_symbol( .kind .map(to_proto::symbol_kind) .unwrap_or(lsp_types::SymbolKind::VARIABLE), + // FIXME: Set deprecation tags: None, - location: to_proto::location_from_nav(snap, nav)?, container_name, - deprecated: None, + location: lsp_types::OneOf::Left(to_proto::location_from_nav(snap, nav)?), + data: None, }; res.push(info); } diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 41ff17f5e43..d94f7cefa60 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -10,7 +10,8 @@ //! in release mode in VS Code. There's however "rust-analyzer: Copy Run Command Line" //! which you can use to paste the command in terminal and add `--release` manually. -use ide::{CallableSnippets, Change, CompletionConfig, FilePosition, TextSize}; +use hir::Change; +use ide::{CallableSnippets, CompletionConfig, FilePosition, TextSize}; use ide_db::{ imports::insert_use::{ImportGranularity, InsertUseConfig}, SnippetCap, diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs index ad56899163d..35c8fad3741 100644 --- a/crates/rust-analyzer/src/lsp/ext.rs +++ b/crates/rust-analyzer/src/lsp/ext.rs @@ -627,7 +627,7 @@ pub enum WorkspaceSymbol {} impl Request for WorkspaceSymbol { type Params = WorkspaceSymbolParams; - type Result = Option<Vec<lsp_types::SymbolInformation>>; + type Result = Option<lsp_types::WorkspaceSymbolResponse>; const METHOD: &'static str = "workspace/symbol"; } diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index dae560c5de1..7f3c3aa7a15 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -857,7 +857,7 @@ pub(crate) fn location_from_nav( ) -> Cancellable<lsp_types::Location> { let url = url(snap, nav.file_id); let line_index = snap.file_line_index(nav.file_id)?; - let range = range(&line_index, nav.full_range); + let range = range(&line_index, nav.focus_or_full_range()); let loc = lsp_types::Location::new(url, range); Ok(loc) } diff --git a/crates/rust-analyzer/src/lsp/utils.rs b/crates/rust-analyzer/src/lsp/utils.rs index b388b317599..a4417e4d4a1 100644 --- a/crates/rust-analyzer/src/lsp/utils.rs +++ b/crates/rust-analyzer/src/lsp/utils.rs @@ -171,30 +171,19 @@ pub(crate) fn apply_document_changes( file_contents: impl FnOnce() -> String, mut content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>, ) -> String { - // Skip to the last full document change, as it invalidates all previous changes anyways. - let mut start = content_changes - .iter() - .rev() - .position(|change| change.range.is_none()) - .map(|idx| content_changes.len() - idx - 1) - .unwrap_or(0); - - let mut text: String = match content_changes.get_mut(start) { - // peek at the first content change as an optimization - Some(lsp_types::TextDocumentContentChangeEvent { range: None, text, .. }) => { - let text = mem::take(text); - start += 1; - - // The only change is a full document update - if start == content_changes.len() { - return text; + // If at least one of the changes is a full document change, use the last + // of them as the starting point and ignore all previous changes. + let (mut text, content_changes) = + match content_changes.iter().rposition(|change| change.range.is_none()) { + Some(idx) => { + let text = mem::take(&mut content_changes[idx].text); + (text, &content_changes[idx + 1..]) } - text - } - Some(_) => file_contents(), - // we received no content changes - None => return file_contents(), - }; + None => (file_contents(), &content_changes[..]), + }; + if content_changes.is_empty() { + return text; + } let mut line_index = LineIndex { // the index will be overwritten in the bottom loop's first iteration diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 7ab528f4975..cc7cb276ad0 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -16,10 +16,9 @@ use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; -use hir::db::DefDatabase; -use ide::Change; +use hir::{db::DefDatabase, Change, ProcMacros}; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros}, + base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, FxHashMap, }; use itertools::Itertools; diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index ec8e5c6dd96..78411e2d58d 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -832,7 +832,7 @@ fn main() { } #[test] -#[cfg(feature = "sysroot-abi")] +#[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn resolve_proc_macro() { use expect_test::expect; if skip_slow_tests() { diff --git a/crates/sourcegen/Cargo.toml b/crates/sourcegen/Cargo.toml index 0514af8e783..8e3e426ae13 100644 --- a/crates/sourcegen/Cargo.toml +++ b/crates/sourcegen/Cargo.toml @@ -2,6 +2,7 @@ name = "sourcegen" version = "0.0.0" description = "TBD" +publish = false authors.workspace = true edition.workspace = true diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml new file mode 100644 index 00000000000..8b078f9df13 --- /dev/null +++ b/crates/span/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "span" +version = "0.0.0" +rust-version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true + + +[dependencies] +la-arena.workspace = true +rust-analyzer-salsa.workspace = true + + +# local deps +vfs.workspace = true +syntax.workspace = true +stdx.workspace = true diff --git a/crates/base-db/src/span.rs b/crates/span/src/lib.rs index 3464f4cb6d1..7617acde64a 100644 --- a/crates/base-db/src/span.rs +++ b/crates/span/src/lib.rs @@ -1,10 +1,28 @@ //! File and span related types. -// FIXME: This should probably be moved into its own crate. +// FIXME: This should be moved into its own crate to get rid of the dependency inversion, base-db +// has business depending on tt, tt should depend on a span crate only (which unforunately will have +// to depend on salsa) use std::fmt; use salsa::InternId; -use tt::SyntaxContext; -use vfs::FileId; + +mod map; + +pub use crate::map::{RealSpanMap, SpanMap}; +pub use syntax::{TextRange, TextSize}; +pub use vfs::FileId; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct FilePosition { + pub file_id: FileId, + pub offset: TextSize, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct FileRange { + pub file_id: FileId, + pub range: TextRange, +} pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>; @@ -12,7 +30,33 @@ pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>; pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); -pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>; +/// FileId used as the span for syntax node fixups. Any Span containing this file id is to be +/// considered fake. +pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId = + // we pick the second to last for this in case we every consider making this a NonMaxU32, this + // is required to be stable for the proc-macro-server + la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(!0 - 1)); + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub struct SpanData<Ctx> { + /// The text range of this span, relative to the anchor. + /// We need the anchor for incrementality, as storing absolute ranges will require + /// recomputation on every change in a file at all times. + pub range: TextRange, + pub anchor: SpanAnchor, + /// The syntax context of the span. + pub ctx: Ctx, +} +impl Span { + #[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"] + pub const DUMMY: Self = SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, + ctx: SyntaxContextId::ROOT, + }; +} + +pub type Span = SpanData<SyntaxContextId>; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SyntaxContextId(InternId); @@ -33,7 +77,15 @@ impl fmt::Debug for SyntaxContextId { } } } -crate::impl_intern_key!(SyntaxContextId); + +impl salsa::InternKey for SyntaxContextId { + fn from_intern_id(v: salsa::InternId) -> Self { + SyntaxContextId(v) + } + fn as_intern_id(&self) -> salsa::InternId { + self.0 + } +} impl fmt::Display for SyntaxContextId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -41,9 +93,6 @@ impl fmt::Display for SyntaxContextId { } } -impl SyntaxContext for SyntaxContextId { - const DUMMY: Self = Self::ROOT; -} // inherent trait impls please tyvm impl SyntaxContextId { pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) }); @@ -55,6 +104,14 @@ impl SyntaxContextId { pub fn is_root(self) -> bool { self == Self::ROOT } + + pub fn into_u32(self) -> u32 { + self.0.as_u32() + } + + pub fn from_u32(u32: u32) -> Self { + Self(InternId::from(u32)) + } } #[derive(Copy, Clone, PartialEq, Eq, Hash)] @@ -69,10 +126,6 @@ impl fmt::Debug for SpanAnchor { } } -impl tt::SpanAnchor for SpanAnchor { - const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }; -} - /// Input to the analyzer is a set of files, where each file is identified by /// `FileId` and contains source code. However, another source of source code in /// Rust are macros: each macro can be thought of as producing a "temporary @@ -90,6 +143,7 @@ impl tt::SpanAnchor for SpanAnchor { /// The two variants are encoded in a single u32 which are differentiated by the MSB. /// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a /// `MacroCallId`. +// FIXME: Give this a better fitting name #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct HirFileId(u32); @@ -120,7 +174,15 @@ pub struct MacroFileId { /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct MacroCallId(salsa::InternId); -crate::impl_intern_key!(MacroCallId); + +impl salsa::InternKey for MacroCallId { + fn from_intern_id(v: salsa::InternId) -> Self { + MacroCallId(v) + } + fn as_intern_id(&self) -> salsa::InternId { + self.0 + } +} impl MacroCallId { pub fn as_file(self) -> HirFileId { @@ -151,21 +213,26 @@ impl fmt::Debug for HirFileIdRepr { impl From<FileId> for HirFileId { fn from(id: FileId) -> Self { - assert!(id.index() < Self::MAX_FILE_ID); + _ = Self::ASSERT_MAX_FILE_ID_IS_SAME; + assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index()); HirFileId(id.index()) } } impl From<MacroFileId> for HirFileId { fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self { + _ = Self::ASSERT_MAX_FILE_ID_IS_SAME; let id = id.as_u32(); - assert!(id < Self::MAX_FILE_ID); + assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id); HirFileId(id | Self::MACRO_FILE_TAG_MASK) } } impl HirFileId { - const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const ASSERT_MAX_FILE_ID_IS_SAME: () = + [()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize]; + + const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; const MACRO_FILE_TAG_MASK: u32 = 1 << 31; #[inline] diff --git a/crates/mbe/src/token_map.rs b/crates/span/src/map.rs index 7d15812f8cb..d69df91b63e 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/span/src/map.rs @@ -1,18 +1,23 @@ -//! Mapping between `TokenId`s and the token's position in macro definitions or inputs. +//! A map that maps a span to every position in a file. Usually maps a span to some range of positions. +//! Allows bidirectional lookup. use std::hash::Hash; use stdx::{always, itertools::Itertools}; use syntax::{TextRange, TextSize}; -use tt::Span; +use vfs::FileId; + +use crate::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; /// Maps absolute text ranges for the corresponding file to the relevant span data. #[derive(Debug, PartialEq, Eq, Clone, Hash)] -pub struct SpanMap<S: Span> { +pub struct SpanMap<S> { spans: Vec<(TextSize, S)>, + // FIXME: Should be + // spans: Vec<(TextSize, crate::SyntaxContextId)>, } -impl<S: Span> SpanMap<S> { +impl<S: Copy> SpanMap<S> { /// Creates a new empty [`SpanMap`]. pub fn empty() -> Self { Self { spans: Vec::new() } @@ -44,7 +49,10 @@ impl<S: Span> SpanMap<S> { /// Returns all [`TextRange`]s that correspond to the given span. /// /// Note this does a linear search through the entire backing vector. - pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ { + pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ + where + S: Eq, + { // FIXME: This should ignore the syntax context! self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { if s != span { @@ -74,3 +82,50 @@ impl<S: Span> SpanMap<S> { self.spans.iter().copied() } } + +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct RealSpanMap { + file_id: FileId, + /// Invariant: Sorted vec over TextSize + // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? + pairs: Box<[(TextSize, ErasedFileAstId)]>, + end: TextSize, +} + +impl RealSpanMap { + /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). + pub fn absolute(file_id: FileId) -> Self { + RealSpanMap { + file_id, + pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]), + end: TextSize::new(!0), + } + } + + pub fn from_file( + file_id: FileId, + pairs: Box<[(TextSize, ErasedFileAstId)]>, + end: TextSize, + ) -> Self { + Self { file_id, pairs, end } + } + + pub fn span_for_range(&self, range: TextRange) -> Span { + assert!( + range.end() <= self.end, + "range {range:?} goes beyond the end of the file {:?}", + self.end + ); + let start = range.start(); + let idx = self + .pairs + .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less)) + .unwrap_err(); + let (offset, ast_id) = self.pairs[idx - 1]; + Span { + range: range - offset, + anchor: SpanAnchor { file_id: self.file_id, ast_id }, + ctx: SyntaxContextId::ROOT, + } + } +} diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 7a7c0d267fe..1cb9a4aedc5 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -17,7 +17,7 @@ cov-mark = "2.0.0-pre.1" either.workspace = true itertools.workspace = true rowan = "0.15.15" -rustc-hash = "1.1.0" +rustc-hash.workspace = true once_cell = "1.17.0" indexmap.workspace = true smol_str.workspace = true diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 37d8212042d..916bbbad409 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -414,6 +414,7 @@ impl ast::UseTree { u.remove_recursive(); } } + u.remove_unnecessary_braces(); } } diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index f81dff8840c..a253827495f 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -11,7 +11,7 @@ use rowan::{GreenNodeData, GreenTokenData}; use crate::{ ast::{self, support, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, SyntaxNode}, - NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, + ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, }; impl ast::Lifetime { @@ -323,6 +323,10 @@ impl ast::UseTree { pub fn is_simple_path(&self) -> bool { self.use_tree_list().is_none() && self.star_token().is_none() } + + pub fn parent_use_tree_list(&self) -> Option<ast::UseTreeList> { + self.syntax().parent().and_then(ast::UseTreeList::cast) + } } impl ast::UseTreeList { @@ -340,6 +344,34 @@ impl ast::UseTreeList { .find_map(ast::Comment::cast) .is_some() } + + pub fn comma(&self) -> impl Iterator<Item = SyntaxToken> { + self.syntax() + .children_with_tokens() + .filter_map(|it| it.into_token().filter(|it| it.kind() == T![,])) + } + + /// Remove the unnecessary braces in current `UseTreeList` + pub fn remove_unnecessary_braces(mut self) { + let remove_brace_in_use_tree_list = |u: &ast::UseTreeList| { + let use_tree_count = u.use_trees().count(); + if use_tree_count == 1 { + u.l_curly_token().map(ted::remove); + u.r_curly_token().map(ted::remove); + u.comma().for_each(ted::remove); + } + }; + + // take `use crate::{{{{A}}}}` for example + // the below remove the innermost {}, got `use crate::{{{A}}}` + remove_brace_in_use_tree_list(&self); + + // the below remove othe unnecessary {}, got `use crate::A` + while let Some(parent_use_tree_list) = self.parent_use_tree().parent_use_tree_list() { + remove_brace_in_use_tree_list(&parent_use_tree_list); + self = parent_use_tree_list; + } + } } impl ast::Impl { diff --git a/crates/test-fixture/Cargo.toml b/crates/test-fixture/Cargo.toml new file mode 100644 index 00000000000..ff921aa83d3 --- /dev/null +++ b/crates/test-fixture/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "test-fixture" +version = "0.0.0" +rust-version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true +publish = false + +[dependencies] +hir-expand.workspace = true +test-utils.workspace = true +tt.workspace = true +cfg.workspace = true +base-db.workspace = true +rustc-hash.workspace = true +span.workspace = true +stdx.workspace = true diff --git a/crates/base-db/src/fixture.rs b/crates/test-fixture/src/lib.rs index bfdd21555f0..1a042b2dea2 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/test-fixture/src/lib.rs @@ -1,27 +1,30 @@ //! A set of high-level utility fixture methods to use in tests. -use std::{mem, str::FromStr, sync}; +use std::{mem, ops::Not, str::FromStr, sync}; +use base_db::{ + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, + Edition, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, + VfsPath, +}; use cfg::CfgOptions; +use hir_expand::{ + change::Change, + db::ExpandDatabase, + proc_macro::{ + ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacros, + }, +}; use rustc_hash::FxHashMap; +use span::{FileId, FilePosition, FileRange, Span}; use test_utils::{ extract_range_or_offset, Fixture, FixtureWithProjectMeta, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER, }; -use triomphe::Arc; use tt::{Leaf, Subtree, TokenTree}; -use vfs::{file_set::FileSet, VfsPath}; - -use crate::{ - input::{CrateName, CrateOrigin, LangCrateOrigin}, - span::SpanData, - Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env, - FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId, -}; -pub const WORKSPACE: SourceRootId = SourceRootId(0); +pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0); -pub trait WithFixture: Default + SourceDatabaseExt + 'static { +pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { #[track_caller] fn with_single_file(ra_fixture: &str) -> (Self, FileId) { let fixture = ChangeFixture::parse(ra_fixture); @@ -80,6 +83,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static { let fixture = ChangeFixture::parse(ra_fixture); let mut db = Self::default(); fixture.change.apply(&mut db); + let (file_id, range_or_offset) = fixture .file_position .expect("Could not find file position in fixture. Did you forget to add an `$0`?"); @@ -95,7 +99,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static { } } -impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {} +impl<DB: ExpandDatabase + SourceDatabaseExt + Default + 'static> WithFixture for DB {} pub struct ChangeFixture { pub file_position: Option<(FileId, RangeOrOffset)>, @@ -116,13 +120,11 @@ impl ChangeFixture { ) -> ChangeFixture { let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = FixtureWithProjectMeta::parse(ra_fixture); - let toolchain = toolchain - .map(|it| { - ReleaseChannel::from_str(&it) - .unwrap_or_else(|| panic!("unknown release channel found: {it}")) - }) - .unwrap_or(ReleaseChannel::Stable); - let mut change = Change::new(); + let toolchain = Some({ + let channel = toolchain.as_deref().unwrap_or("stable"); + Version::parse(&format!("1.76.0-{channel}")).unwrap() + }); + let mut source_change = FileChange::new(); let mut files = Vec::new(); let mut crate_graph = CrateGraph::default(); @@ -187,9 +189,9 @@ impl ChangeFixture { origin, meta.target_data_layout .as_deref() - .map(Arc::from) + .map(From::from) .ok_or_else(|| "target_data_layout unset".into()), - Some(toolchain), + toolchain.clone(), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); @@ -206,7 +208,7 @@ impl ChangeFixture { default_target_data_layout = meta.target_data_layout; } - change.change_file(file_id, Some(Arc::from(text))); + source_change.change_file(file_id, Some(text.into())); let path = VfsPath::new_virtual_path(meta.path); file_set.insert(file_id, path); files.push(file_id); @@ -229,7 +231,7 @@ impl ChangeFixture { default_target_data_layout .map(|it| it.into()) .ok_or_else(|| "target_data_layout unset".into()), - Some(toolchain), + toolchain.clone(), ); } else { for (from, to, prelude) in crate_deps { @@ -261,7 +263,7 @@ impl ChangeFixture { fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string())); roots.push(SourceRoot::new_library(fs)); - change.change_file(core_file, Some(Arc::from(mini_core.source_code()))); + source_change.change_file(core_file, Some(mini_core.source_code().into())); let all_crates = crate_graph.crates_in_topological_order(); @@ -276,7 +278,7 @@ impl ChangeFixture { false, CrateOrigin::Lang(LangCrateOrigin::Core), target_layout.clone(), - Some(toolchain), + toolchain.clone(), ); for krate in all_crates { @@ -306,7 +308,7 @@ impl ChangeFixture { ); roots.push(SourceRoot::new_library(fs)); - change.change_file(proc_lib_file, Some(Arc::from(source))); + source_change.change_file(proc_lib_file, Some(source.into())); let all_crates = crate_graph.crates_in_topological_order(); @@ -321,7 +323,7 @@ impl ChangeFixture { true, CrateOrigin::Local { repo: None, name: None }, target_layout, - Some(toolchain), + toolchain, ); proc_macros.insert(proc_macros_crate, Ok(proc_macro)); @@ -344,11 +346,17 @@ impl ChangeFixture { SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)), }; roots.push(root); - change.set_roots(roots); - change.set_crate_graph(crate_graph); - change.set_proc_macros(proc_macros); - - ChangeFixture { file_position, files, change } + source_change.set_roots(roots); + source_change.set_crate_graph(crate_graph); + + ChangeFixture { + file_position, + files, + change: Change { + source_change, + proc_macros: proc_macros.is_empty().not().then(|| proc_macros), + }, + } } } @@ -364,7 +372,7 @@ pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream { .into(), ProcMacro { name: "identity".into(), - kind: crate::ProcMacroKind::Attr, + kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityProcMacroExpander), }, ), @@ -378,7 +386,7 @@ pub fn derive_identity(item: TokenStream) -> TokenStream { .into(), ProcMacro { name: "DeriveIdentity".into(), - kind: crate::ProcMacroKind::CustomDerive, + kind: ProcMacroKind::CustomDerive, expander: sync::Arc::new(IdentityProcMacroExpander), }, ), @@ -392,7 +400,7 @@ pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream { .into(), ProcMacro { name: "input_replace".into(), - kind: crate::ProcMacroKind::Attr, + kind: ProcMacroKind::Attr, expander: sync::Arc::new(AttributeInputReplaceProcMacroExpander), }, ), @@ -406,7 +414,7 @@ pub fn mirror(input: TokenStream) -> TokenStream { .into(), ProcMacro { name: "mirror".into(), - kind: crate::ProcMacroKind::FuncLike, + kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(MirrorProcMacroExpander), }, ), @@ -420,7 +428,7 @@ pub fn shorten(input: TokenStream) -> TokenStream { .into(), ProcMacro { name: "shorten".into(), - kind: crate::ProcMacroKind::FuncLike, + kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(ShortenProcMacroExpander), }, ), @@ -539,13 +547,13 @@ struct IdentityProcMacroExpander; impl ProcMacroExpander for IdentityProcMacroExpander { fn expand( &self, - subtree: &Subtree<SpanData>, - _: Option<&Subtree<SpanData>>, + subtree: &Subtree<Span>, + _: Option<&Subtree<Span>>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result<Subtree<Span>, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -556,13 +564,13 @@ struct AttributeInputReplaceProcMacroExpander; impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander { fn expand( &self, - _: &Subtree<SpanData>, - attrs: Option<&Subtree<SpanData>>, + _: &Subtree<Span>, + attrs: Option<&Subtree<Span>>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result<Subtree<Span>, ProcMacroExpansionError> { attrs .cloned() .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into())) @@ -574,14 +582,14 @@ struct MirrorProcMacroExpander; impl ProcMacroExpander for MirrorProcMacroExpander { fn expand( &self, - input: &Subtree<SpanData>, - _: Option<&Subtree<SpanData>>, + input: &Subtree<Span>, + _: Option<&Subtree<Span>>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> { - fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> { + _: Span, + _: Span, + _: Span, + ) -> Result<Subtree<Span>, ProcMacroExpansionError> { + fn traverse(input: &Subtree<Span>) -> Subtree<Span> { let mut token_trees = vec![]; for tt in input.token_trees.iter().rev() { let tt = match tt { @@ -604,16 +612,16 @@ struct ShortenProcMacroExpander; impl ProcMacroExpander for ShortenProcMacroExpander { fn expand( &self, - input: &Subtree<SpanData>, - _: Option<&Subtree<SpanData>>, + input: &Subtree<Span>, + _: Option<&Subtree<Span>>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result<Subtree<Span>, ProcMacroExpansionError> { return Ok(traverse(input)); - fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> { + fn traverse(input: &Subtree<Span>) -> Subtree<Span> { let token_trees = input .token_trees .iter() @@ -625,7 +633,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander { Subtree { delimiter: input.delimiter, token_trees } } - fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> { + fn modify_leaf(leaf: &Leaf<Span>) -> Leaf<Span> { let mut leaf = leaf.clone(); match &mut leaf { Leaf::Literal(it) => { diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index 438b599ffaa..8b3924dd86b 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -15,7 +15,7 @@ doctest = false # Avoid adding deps here, this crate is widely used in tests it should compile fast! dissimilar = "1.0.7" text-size.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true stdx.workspace = true profile.workspace = true diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index f766747d707..1f3136404c6 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -1381,6 +1381,7 @@ mod macros { // region:assert #[macro_export] #[rustc_builtin_macro] + #[allow_internal_unstable(core_panic, edition_panic, generic_assert_internals)] macro_rules! assert { ($($arg:tt)*) => { /* compiler built-in */ @@ -1389,6 +1390,7 @@ mod macros { // endregion:assert // region:fmt + #[allow_internal_unstable(fmt_internals, const_fmt_arguments_new)] #[macro_export] #[rustc_builtin_macro] macro_rules! const_format_args { @@ -1396,6 +1398,7 @@ mod macros { ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } + #[allow_internal_unstable(fmt_internals)] #[macro_export] #[rustc_builtin_macro] macro_rules! format_args { @@ -1403,6 +1406,7 @@ mod macros { ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } + #[allow_internal_unstable(fmt_internals)] #[macro_export] #[rustc_builtin_macro] macro_rules! format_args_nl { diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml index 57222449790..e2c0f7d9c79 100644 --- a/crates/tt/Cargo.toml +++ b/crates/tt/Cargo.toml @@ -16,3 +16,6 @@ smol_str.workspace = true text-size.workspace = true stdx.workspace = true + +# FIXME: Remove this dependency once the `Span` trait is gone (that is once Span::DUMMY has been removed) +span.workspace = true diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 481d575403a..b3b0eeda75a 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -11,46 +11,10 @@ use stdx::impl_from; pub use smol_str::SmolStr; pub use text_size::{TextRange, TextSize}; -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub struct SpanData<Anchor, Ctx> { - /// The text range of this span, relative to the anchor. - /// We need the anchor for incrementality, as storing absolute ranges will require - /// recomputation on every change in a file at all times. - pub range: TextRange, - pub anchor: Anchor, - /// The syntax context of the span. - pub ctx: Ctx, -} - -impl<Anchor: SpanAnchor, Ctx: SyntaxContext> Span for SpanData<Anchor, Ctx> { - #[allow(deprecated)] - const DUMMY: Self = SpanData { - range: TextRange::empty(TextSize::new(0)), - anchor: Anchor::DUMMY, - ctx: Ctx::DUMMY, - }; -} - -pub trait Span: std::fmt::Debug + Copy + Sized + Eq { - // FIXME: Should not exist. Dummy spans will always be wrong if they leak somewhere. Instead, - // the call site or def site spans should be used in relevant places, its just that we don't - // expose those everywhere in the yet. - const DUMMY: Self; -} +pub trait Span: std::fmt::Debug + Copy + Sized + Eq {} -// FIXME: Should not exist -pub trait SpanAnchor: - std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash -{ - #[deprecated(note = "this should not exist")] - const DUMMY: Self; -} - -// FIXME: Should not exist -pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { - #[deprecated(note = "this should not exist")] - const DUMMY: Self; -} +impl<Ctx> Span for span::SpanData<Ctx> where span::SpanData<Ctx>: std::fmt::Debug + Copy + Sized + Eq +{} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TokenTree<S> { @@ -66,15 +30,7 @@ impl<S: Span> TokenTree<S> { }) } - pub fn subtree_or_wrap(self) -> Subtree<S> { - match self { - TokenTree::Leaf(_) => { - Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] } - } - TokenTree::Subtree(s) => s, - } - } - pub fn subtree_or_wrap2(self, span: DelimSpan<S>) -> Subtree<S> { + pub fn subtree_or_wrap(self, span: DelimSpan<S>) -> Subtree<S> { match self { TokenTree::Leaf(_) => Subtree { delimiter: Delimiter::invisible_delim_spanned(span), @@ -83,6 +39,13 @@ impl<S: Span> TokenTree<S> { TokenTree::Subtree(s) => s, } } + + pub fn first_span(&self) -> S { + match self { + TokenTree::Leaf(l) => *l.span(), + TokenTree::Subtree(s) => s.delimiter.open, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -134,11 +97,6 @@ pub struct DelimSpan<S> { pub close: S, } -impl<S: Span> DelimSpan<S> { - // FIXME should not exist - pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY }; -} - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct Delimiter<S> { pub open: S, @@ -147,15 +105,6 @@ pub struct Delimiter<S> { } impl<S: Span> Delimiter<S> { - // FIXME should not exist - pub const DUMMY_INVISIBLE: Self = - Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible }; - - // FIXME should not exist - pub const fn dummy_invisible() -> Self { - Self::DUMMY_INVISIBLE - } - pub const fn invisible_spanned(span: S) -> Self { Delimiter { open: span, close: span, kind: DelimiterKind::Invisible } } diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml index 11409f2eb81..af4cc034c3c 100644 --- a/crates/vfs/Cargo.toml +++ b/crates/vfs/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -rustc-hash = "1.1.0" +rustc-hash.workspace = true fst = "0.4.7" indexmap.workspace = true nohash-hasher.workspace = true diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 8ffda5d78d1..ef5b10ee9db 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -61,13 +61,17 @@ pub use paths::{AbsPath, AbsPathBuf}; /// Most functions in rust-analyzer use this when they need to refer to a file. #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct FileId(u32); +// pub struct FileId(NonMaxU32); impl FileId { /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics! + // FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages. pub const BOGUS: FileId = FileId(0xe4e4e); + pub const MAX_FILE_ID: u32 = 0x7fff_ffff; #[inline] - pub fn from_raw(raw: u32) -> FileId { + pub const fn from_raw(raw: u32) -> FileId { + assert!(raw <= Self::MAX_FILE_ID); FileId(raw) } diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index b66c9c943a1..3251dd75268 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ <!--- -lsp/ext.rs hash: 121482ee911854da +lsp/ext.rs hash: dff0b009e82ef06a If you need to change the above hash to make the test pass, please check if you need to adjust this doc as well and ping this issue: diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 8a2d0808443..c3f249e0ce2 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -71,6 +71,12 @@ cargo check --quiet --workspace --message-format=json --all-targets ``` . -- +[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `false`):: ++ +-- +Rerun proc-macros building/build-scripts running when proc-macro +or build-script sources change and are saved. +-- [[rust-analyzer.cargo.buildScripts.useRustcWrapper]]rust-analyzer.cargo.buildScripts.useRustcWrapper (default: `true`):: + -- @@ -179,9 +185,9 @@ For example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables` -- Specifies the working directory for running checks. - "workspace": run checks for workspaces in the corresponding workspaces' root directories. - This falls back to "root" if `#rust-analyzer.cargo.check.invocationStrategy#` is set to `once`. + This falls back to "root" if `#rust-analyzer.check.invocationStrategy#` is set to `once`. - "root": run checks in the project's root directory. -This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#` +This config only has an effect when `#rust-analyzer.check.overrideCommand#` is set. -- [[rust-analyzer.check.invocationStrategy]]rust-analyzer.check.invocationStrategy (default: `"per_workspace"`):: @@ -190,7 +196,7 @@ is set. Specifies the invocation strategy to use when running the check command. If `per_workspace` is set, the command will be executed for each workspace. If `once` is set, the command will be executed once. -This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#` +This config only has an effect when `#rust-analyzer.check.overrideCommand#` is set. -- [[rust-analyzer.check.noDefaultFeatures]]rust-analyzer.check.noDefaultFeatures (default: `null`):: @@ -215,8 +221,8 @@ Cargo, you might also want to change If there are multiple linked projects/workspaces, this command is invoked for each of them, with the working directory being the workspace root (i.e., the folder containing the `Cargo.toml`). This can be overwritten -by changing `#rust-analyzer.cargo.check.invocationStrategy#` and -`#rust-analyzer.cargo.check.invocationLocation#`. +by changing `#rust-analyzer.check.invocationStrategy#` and +`#rust-analyzer.check.invocationLocation#`. An example command would be: diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json index 1c94f13d745..8b9d5d4a746 100644 --- a/editors/code/package-lock.json +++ b/editors/code/package-lock.json @@ -21,7 +21,7 @@ "@types/vscode": "~1.78.1", "@typescript-eslint/eslint-plugin": "^6.0.0", "@typescript-eslint/parser": "^6.0.0", - "@vscode/test-electron": "^2.3.3", + "@vscode/test-electron": "^2.3.8", "@vscode/vsce": "^2.19.0", "esbuild": "^0.18.12", "eslint": "^8.44.0", @@ -763,15 +763,15 @@ } }, "node_modules/@vscode/test-electron": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.3.tgz", - "integrity": "sha512-hgXCkDP0ibboF1K6seqQYyHAzCURgTwHS/6QU7slhwznDLwsRwg9bhfw1CZdyUEw8vvCmlrKWnd7BlQnI0BC4w==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.8.tgz", + "integrity": "sha512-b4aZZsBKtMGdDljAsOPObnAi7+VWIaYl3ylCz1jTs+oV6BZ4TNHcVNC3xUn0azPeszBmwSBDQYfFESIaUQnrOg==", "dev": true, "dependencies": { "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", "jszip": "^3.10.1", - "semver": "^7.3.8" + "semver": "^7.5.2" }, "engines": { "node": ">=16" diff --git a/editors/code/package.json b/editors/code/package.json index cfaf4213277..27ed8ac502b 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -57,7 +57,7 @@ "@types/vscode": "~1.78.1", "@typescript-eslint/eslint-plugin": "^6.0.0", "@typescript-eslint/parser": "^6.0.0", - "@vscode/test-electron": "^2.3.3", + "@vscode/test-electron": "^2.3.8", "@vscode/vsce": "^2.19.0", "esbuild": "^0.18.12", "eslint": "^8.44.0", @@ -586,6 +586,11 @@ "type": "string" } }, + "rust-analyzer.cargo.buildScripts.rebuildOnSave": { + "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.", + "default": false, + "type": "boolean" + }, "rust-analyzer.cargo.buildScripts.useRustcWrapper": { "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.", "default": true, @@ -731,7 +736,7 @@ "uniqueItems": true }, "rust-analyzer.check.invocationLocation": { - "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.check.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`\nis set.", + "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.check.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.", "default": "workspace", "type": "string", "enum": [ @@ -744,7 +749,7 @@ ] }, "rust-analyzer.check.invocationStrategy": { - "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`\nis set.", + "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.", "default": "per_workspace", "type": "string", "enum": [ @@ -765,7 +770,7 @@ ] }, "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.check.invocationStrategy#` and\n`#rust-analyzer.cargo.check.invocationLocation#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", @@ -1690,11 +1695,14 @@ "name": "rust-panic", "patterns": [ { - "regexp": "^thread '.*' panicked at '(.*)', (.*):(\\d*):(\\d*)$", - "message": 1, - "file": 2, - "line": 3, - "column": 4 + "regexp": "^thread '.*' panicked at (.*):(\\d*):(\\d*):$", + "file": 1, + "line": 2, + "column": 3 + }, + { + "regexp": "(.*)", + "message": 1 } ] } diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts index 06034e16480..d9c6b6ac456 100644 --- a/editors/code/src/debug.ts +++ b/editors/code/src/debug.ts @@ -135,8 +135,10 @@ async function getDebugConfiguration( let sourceFileMap = debugOptions.sourceFileMap; if (sourceFileMap === "auto") { // let's try to use the default toolchain - const commitHash = await getRustcId(wsFolder); - const sysroot = await getSysroot(wsFolder); + const [commitHash, sysroot] = await Promise.all([ + getRustcId(wsFolder), + getSysroot(wsFolder), + ]); const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust"); sourceFileMap = {}; sourceFileMap[`/rustc/${commitHash}/`] = rustlib; diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts index 778cbc5762a..fc3f1acce54 100644 --- a/editors/code/src/run.ts +++ b/editors/code/src/run.ts @@ -43,7 +43,7 @@ export async function selectRunnable( return; } - // clear the list before we hook up listeners to to avoid invoking them + // clear the list before we hook up listeners to avoid invoking them // if the user happens to accept the placeholder item quickPick.items = []; diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml index 2a70aedbe8e..116b376b0b0 100644 --- a/lib/lsp-server/Cargo.toml +++ b/lib/lsp-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lsp-server" -version = "0.7.5" +version = "0.7.6" description = "Generic LSP server scaffold." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server" @@ -10,8 +10,8 @@ edition = "2021" log = "0.4.17" serde_json = "1.0.108" serde = { version = "1.0.192", features = ["derive"] } -crossbeam-channel = "0.5.6" +crossbeam-channel = "0.5.8" [dev-dependencies] -lsp-types = "=0.94" +lsp-types = "=0.95" ctrlc = "3.4.1" diff --git a/lib/lsp-server/examples/goto_def.rs b/lib/lsp-server/examples/goto_def.rs index 2f270afbbf1..71f66254069 100644 --- a/lib/lsp-server/examples/goto_def.rs +++ b/lib/lsp-server/examples/goto_def.rs @@ -64,7 +64,15 @@ fn main() -> Result<(), Box<dyn Error + Sync + Send>> { ..Default::default() }) .unwrap(); - let initialization_params = connection.initialize(server_capabilities)?; + let initialization_params = match connection.initialize(server_capabilities) { + Ok(it) => it, + Err(e) => { + if e.channel_is_disconnected() { + io_threads.join()?; + } + return Err(e.into()); + } + }; main_loop(connection, initialization_params)?; io_threads.join()?; diff --git a/lib/lsp-server/src/error.rs b/lib/lsp-server/src/error.rs index 755b3fd9596..ebdd153b5b3 100644 --- a/lib/lsp-server/src/error.rs +++ b/lib/lsp-server/src/error.rs @@ -3,7 +3,22 @@ use std::fmt; use crate::{Notification, Request}; #[derive(Debug, Clone, PartialEq)] -pub struct ProtocolError(pub(crate) String); +pub struct ProtocolError(String, bool); + +impl ProtocolError { + pub(crate) fn new(msg: impl Into<String>) -> Self { + ProtocolError(msg.into(), false) + } + + pub(crate) fn disconnected() -> ProtocolError { + ProtocolError("disconnected channel".into(), true) + } + + /// Whether this error occured due to a disconnected channel. + pub fn channel_is_disconnected(&self) -> bool { + self.1 + } +} impl std::error::Error for ProtocolError {} diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs index 2797a6b60de..6b732d47029 100644 --- a/lib/lsp-server/src/lib.rs +++ b/lib/lsp-server/src/lib.rs @@ -17,7 +17,7 @@ use std::{ net::{TcpListener, TcpStream, ToSocketAddrs}, }; -use crossbeam_channel::{Receiver, RecvTimeoutError, Sender}; +use crossbeam_channel::{Receiver, RecvError, RecvTimeoutError, Sender}; pub use crate::{ error::{ExtractError, ProtocolError}, @@ -158,11 +158,7 @@ impl Connection { Err(RecvTimeoutError::Timeout) => { continue; } - Err(e) => { - return Err(ProtocolError(format!( - "expected initialize request, got error: {e}" - ))) - } + Err(RecvTimeoutError::Disconnected) => return Err(ProtocolError::disconnected()), }; match msg { @@ -181,12 +177,14 @@ impl Connection { continue; } msg => { - return Err(ProtocolError(format!("expected initialize request, got {msg:?}"))); + return Err(ProtocolError::new(format!( + "expected initialize request, got {msg:?}" + ))); } }; } - return Err(ProtocolError(String::from( + return Err(ProtocolError::new(String::from( "Initialization has been aborted during initialization", ))); } @@ -201,12 +199,10 @@ impl Connection { self.sender.send(resp.into()).unwrap(); match &self.receiver.recv() { Ok(Message::Notification(n)) if n.is_initialized() => Ok(()), - Ok(msg) => { - Err(ProtocolError(format!(r#"expected initialized notification, got: {msg:?}"#))) - } - Err(e) => { - Err(ProtocolError(format!("expected initialized notification, got error: {e}",))) - } + Ok(msg) => Err(ProtocolError::new(format!( + r#"expected initialized notification, got: {msg:?}"# + ))), + Err(RecvError) => Err(ProtocolError::disconnected()), } } @@ -231,10 +227,8 @@ impl Connection { Err(RecvTimeoutError::Timeout) => { continue; } - Err(e) => { - return Err(ProtocolError(format!( - "expected initialized notification, got error: {e}", - ))); + Err(RecvTimeoutError::Disconnected) => { + return Err(ProtocolError::disconnected()); } }; @@ -243,14 +237,14 @@ impl Connection { return Ok(()); } msg => { - return Err(ProtocolError(format!( + return Err(ProtocolError::new(format!( r#"expected initialized notification, got: {msg:?}"# ))); } } } - return Err(ProtocolError(String::from( + return Err(ProtocolError::new(String::from( "Initialization has been aborted during initialization", ))); } @@ -359,9 +353,18 @@ impl Connection { match &self.receiver.recv_timeout(std::time::Duration::from_secs(30)) { Ok(Message::Notification(n)) if n.is_exit() => (), Ok(msg) => { - return Err(ProtocolError(format!("unexpected message during shutdown: {msg:?}"))) + return Err(ProtocolError::new(format!( + "unexpected message during shutdown: {msg:?}" + ))) + } + Err(RecvTimeoutError::Timeout) => { + return Err(ProtocolError::new(format!("timed out waiting for exit notification"))) + } + Err(RecvTimeoutError::Disconnected) => { + return Err(ProtocolError::new(format!( + "channel disconnected waiting for exit notification" + ))) } - Err(e) => return Err(ProtocolError(format!("unexpected error during shutdown: {e}"))), } Ok(true) } @@ -426,7 +429,7 @@ mod tests { initialize_start_test(TestCase { test_messages: vec![notification_msg.clone()], - expected_resp: Err(ProtocolError(format!( + expected_resp: Err(ProtocolError::new(format!( "expected initialize request, got {:?}", notification_msg ))), diff --git a/lib/lsp-server/src/msg.rs b/lib/lsp-server/src/msg.rs index 730ad51f424..ba318dd1690 100644 --- a/lib/lsp-server/src/msg.rs +++ b/lib/lsp-server/src/msg.rs @@ -264,12 +264,12 @@ fn read_msg_text(inp: &mut dyn BufRead) -> io::Result<Option<String>> { let mut parts = buf.splitn(2, ": "); let header_name = parts.next().unwrap(); let header_value = - parts.next().ok_or_else(|| invalid_data!("malformed header: {:?}", buf))?; + parts.next().ok_or_else(|| invalid_data(format!("malformed header: {:?}", buf)))?; if header_name.eq_ignore_ascii_case("Content-Length") { size = Some(header_value.parse::<usize>().map_err(invalid_data)?); } } - let size: usize = size.ok_or_else(|| invalid_data!("no Content-Length"))?; + let size: usize = size.ok_or_else(|| invalid_data("no Content-Length".to_string()))?; let mut buf = buf.into_bytes(); buf.resize(size, 0); inp.read_exact(&mut buf)?; diff --git a/lib/lsp-server/src/stdio.rs b/lib/lsp-server/src/stdio.rs index e487b9b4622..cea199d0293 100644 --- a/lib/lsp-server/src/stdio.rs +++ b/lib/lsp-server/src/stdio.rs @@ -15,8 +15,7 @@ pub(crate) fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThread let writer = thread::spawn(move || { let stdout = stdout(); let mut stdout = stdout.lock(); - writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout))?; - Ok(()) + writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout)) }); let (reader_sender, reader_receiver) = bounded::<Message>(0); let reader = thread::spawn(move || { diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index e52cbfca3e6..092ab8c593c 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -110,6 +110,7 @@ pub struct PublishReleaseNotes { #[derive(Debug)] pub enum MeasurementType { Build, + RustcTests, AnalyzeSelf, AnalyzeRipgrep, AnalyzeWebRender, @@ -122,6 +123,7 @@ impl FromStr for MeasurementType { fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "build" => Ok(Self::Build), + "rustc_tests" => Ok(Self::RustcTests), "self" => Ok(Self::AnalyzeSelf), "ripgrep-13.0.0" => Ok(Self::AnalyzeRipgrep), "webrender-2022" => Ok(Self::AnalyzeWebRender), @@ -135,6 +137,7 @@ impl AsRef<str> for MeasurementType { fn as_ref(&self) -> &str { match self { Self::Build => "build", + Self::RustcTests => "rustc_tests", Self::AnalyzeSelf => "self", Self::AnalyzeRipgrep => "ripgrep-13.0.0", Self::AnalyzeWebRender => "webrender-2022", diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 59d41d8e4b8..845928432c4 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -36,6 +36,9 @@ impl flags::Metrics { MeasurementType::Build => { metrics.measure_build(sh)?; } + MeasurementType::RustcTests => { + metrics.measure_rustc_tests(sh)?; + } MeasurementType::AnalyzeSelf => { metrics.measure_analysis_stats_self(sh)?; } @@ -50,6 +53,7 @@ impl flags::Metrics { } None => { metrics.measure_build(sh)?; + metrics.measure_rustc_tests(sh)?; metrics.measure_analysis_stats_self(sh)?; metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeRipgrep.as_ref())?; metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeWebRender.as_ref())?; @@ -78,6 +82,19 @@ impl Metrics { self.report("build", time.as_millis() as u64, "ms".into()); Ok(()) } + + fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> { + eprintln!("\nMeasuring rustc tests"); + + cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?; + + let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?; + for (metric, value, unit) in parse_metrics(&output) { + self.report(metric, value, unit.into()); + } + Ok(()) + } + fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> { self.measure_analysis_stats_path(sh, "self", ".") } diff --git a/xtask/src/release/changelog.rs b/xtask/src/release/changelog.rs index 67538414840..817863336dd 100644 --- a/xtask/src/release/changelog.rs +++ b/xtask/src/release/changelog.rs @@ -30,34 +30,52 @@ pub(crate) fn get_changelog( // we don't use an HTTPS client or JSON parser to keep the build times low let pr = pr_num.to_string(); - let pr_json = - cmd!(sh, "curl -s -H {accept} -H {authorization} {pr_url}/{pr}").read()?; + let cmd = &cmd!(sh, "curl --fail -s -H {accept} -H {authorization} {pr_url}/{pr}"); + let pr_json = match cmd.read() { + Ok(pr_json) => pr_json, + Err(e) => { + // most likely a rust-lang/rust PR + eprintln!("Cannot get info for #{pr}: {e}"); + continue; + } + }; + let pr_title = cmd!(sh, "jq .title").stdin(&pr_json).read()?; let pr_title = unescape(&pr_title[1..pr_title.len() - 1]); let pr_comment = cmd!(sh, "jq .body").stdin(pr_json).read()?; - let comments_json = - cmd!(sh, "curl -s -H {accept} -H {authorization} {pr_url}/{pr}/comments").read()?; - let pr_comments = cmd!(sh, "jq .[].body").stdin(comments_json).read()?; - - let l = iter::once(pr_comment.as_str()) - .chain(pr_comments.lines()) - .rev() - .find_map(|it| { - let it = unescape(&it[1..it.len() - 1]); - it.lines().find_map(parse_changelog_line) - }) - .into_iter() - .next() - .unwrap_or_else(|| parse_title_line(&pr_title)); - let s = match l.kind { + let cmd = + &cmd!(sh, "curl --fail -s -H {accept} -H {authorization} {pr_url}/{pr}/comments"); + let pr_info = match cmd.read() { + Ok(comments_json) => { + let pr_comments = cmd!(sh, "jq .[].body").stdin(comments_json).read()?; + + iter::once(pr_comment.as_str()) + .chain(pr_comments.lines()) + .rev() + .find_map(|it| { + let it = unescape(&it[1..it.len() - 1]); + it.lines().find_map(parse_changelog_line) + }) + .into_iter() + .next() + } + Err(e) => { + eprintln!("Cannot get comments for #{pr}: {e}"); + None + } + }; + + let pr_info = pr_info.unwrap_or_else(|| parse_title_line(&pr_title)); + let s = match pr_info.kind { PrKind::Feature => &mut features, PrKind::Fix => &mut fixes, PrKind::Internal => &mut internal, PrKind::Other => &mut others, PrKind::Skip => continue, }; - writeln!(s, "* pr:{pr_num}[] {}", l.message.as_deref().unwrap_or(&pr_title)).unwrap(); + writeln!(s, "* pr:{pr_num}[] {}", pr_info.message.as_deref().unwrap_or(&pr_title)) + .unwrap(); } } |
