diff options
| author | bors <bors@rust-lang.org> | 2024-08-01 13:13:38 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2024-08-01 13:13:38 +0000 |
| commit | e60ebb2f2c1facba87e7971798f3cbdfd309cd23 (patch) | |
| tree | 33e8a29052698c8a839016e6794e46773e37357a | |
| parent | c0e32983f5b06a6f7d8cc776ccac71de6512ed6d (diff) | |
| parent | 9ec4844925a37aae5665491b43ca526d47501d48 (diff) | |
| download | rust-e60ebb2f2c1facba87e7971798f3cbdfd309cd23.tar.gz rust-e60ebb2f2c1facba87e7971798f3cbdfd309cd23.zip | |
Auto merge of #128490 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
955 files changed, 15238 insertions, 7886 deletions
diff --git a/src/tools/rust-analyzer/.github/workflows/ci.yaml b/src/tools/rust-analyzer/.github/workflows/ci.yaml index 87a1729d2b4..6d3e488bb08 100644 --- a/src/tools/rust-analyzer/.github/workflows/ci.yaml +++ b/src/tools/rust-analyzer/.github/workflows/ci.yaml @@ -15,7 +15,7 @@ env: CARGO_NET_RETRY: 10 CI: 1 RUST_BACKTRACE: short - RUSTFLAGS: "-D warnings -W unreachable-pub -W bare-trait-objects" + RUSTFLAGS: "-D warnings -D elided_lifetimes_in_paths -D explicit_outlives_requirements -D unsafe_op_in_unsafe_fn -D unused_extern_crates -D unused_lifetimes -D unreachable_pub" RUSTUP_MAX_RETRIES: 10 jobs: diff --git a/src/tools/rust-analyzer/.typos.toml b/src/tools/rust-analyzer/.typos.toml index c2e8b265218..e7e764ce035 100644 --- a/src/tools/rust-analyzer/.typos.toml +++ b/src/tools/rust-analyzer/.typos.toml @@ -14,6 +14,8 @@ extend-ignore-re = [ "\\w*\\.{3,4}\\w*", '"flate2"', "raison d'être", + "inout", + "optin" ] [default.extend-words] diff --git a/src/tools/rust-analyzer/.vscode/launch.json b/src/tools/rust-analyzer/.vscode/launch.json index c353737a35a..e83c03796a4 100644 --- a/src/tools/rust-analyzer/.vscode/launch.json +++ b/src/tools/rust-analyzer/.vscode/launch.json @@ -18,7 +18,8 @@ "args": [ // "--user-data-dir=${workspaceFolder}/target/code", "--disable-extensions", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" @@ -36,7 +37,8 @@ "runtimeExecutable": "${execPath}", "args": [ "--disable-extensions", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" @@ -57,7 +59,8 @@ "runtimeExecutable": "${execPath}", "args": [ "--disable-extensions", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" @@ -79,7 +82,8 @@ "runtimeExecutable": "${execPath}", "args": [ "--disable-extension", "rust-lang.rust-analyzer", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index e9ebe26f42c..b98a1195d8b 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] @@ -28,9 +28,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.83" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "arbitrary" @@ -52,16 +52,16 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" -version = "0.3.71" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.32.2", + "object 0.35.0", "rustc-demangle", ] @@ -70,6 +70,7 @@ name = "base-db" version = "0.0.0" dependencies = [ "cfg", + "intern", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "lz4_flex", "rustc-hash", @@ -103,9 +104,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "camino" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c" +checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" dependencies = [ "serde", ] @@ -135,9 +136,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" +checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" [[package]] name = "cfg" @@ -146,6 +147,7 @@ dependencies = [ "arbitrary", "derive_arbitrary", "expect-test", + "intern", "mbe", "oorandom", "rustc-hash", @@ -230,18 +232,18 @@ checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a" [[package]] name = "crc32fast" -version = "1.4.0" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" -version = "0.5.12" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ "crossbeam-utils", ] @@ -267,9 +269,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "ctrlc" @@ -364,9 +366,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" [[package]] name = "either" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" +checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" [[package]] name = "ena" @@ -429,6 +431,7 @@ dependencies = [ "crossbeam-channel", "paths", "process-wrap", + "project-model", "rustc-hash", "serde", "serde_json", @@ -474,9 +477,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "hashbrown" @@ -835,6 +838,7 @@ dependencies = [ "dashmap", "hashbrown", "rustc-hash", + "sptr", "triomphe", ] @@ -913,9 +917,9 @@ dependencies = [ [[package]] name = "libmimalloc-sys" -version = "0.1.37" +version = "0.1.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81eb4061c0582dedea1cbc7aff2240300dd6982e0239d1c99e65c1dbf4a30ba7" +checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6" dependencies = [ "cc", "libc", @@ -968,6 +972,7 @@ dependencies = [ "crossbeam-channel", "hir-expand", "ide-db", + "intern", "itertools", "paths", "proc-macro-api", @@ -1044,6 +1049,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "cov-mark", + "intern", "parser", "rustc-hash", "smallvec", @@ -1081,18 +1087,18 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.41" +version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f41a2280ded0da56c8cf898babb86e8f10651a34adcfff190ae9a1159c6908d" +checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176" dependencies = [ "libmimalloc-sys", ] [[package]] name = "miniz_oxide" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" dependencies = [ "adler", ] @@ -1157,9 +1163,9 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.49.0" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c073d3c1930d0751774acf49e66653acecb416c3a54c6ec095a9b11caddb5a68" +checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14" dependencies = [ "windows-sys 0.48.0", ] @@ -1182,18 +1188,18 @@ dependencies = [ [[package]] name = "object" -version = "0.32.2" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d" dependencies = [ "memchr", ] [[package]] name = "object" -version = "0.33.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d" +checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" dependencies = [ "memchr", ] @@ -1218,9 +1224,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "parking_lot" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -1323,6 +1329,7 @@ version = "0.0.0" dependencies = [ "base-db", "indexmap", + "intern", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", "rustc-hash", @@ -1341,6 +1348,7 @@ version = "0.0.0" dependencies = [ "base-db", "expect-test", + "intern", "libloading", "mbe", "memmap2", @@ -1372,9 +1380,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.82" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" +checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" dependencies = [ "unicode-ident", ] @@ -1411,6 +1419,7 @@ dependencies = [ "cargo_metadata", "cfg", "expect-test", + "intern", "itertools", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", @@ -1651,6 +1660,7 @@ dependencies = [ "ide", "ide-db", "ide-ssr", + "intern", "itertools", "load-cargo", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1791,18 +1801,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", @@ -1834,9 +1844,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" dependencies = [ "serde", ] @@ -1858,9 +1868,9 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smol_str" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6845563ada680337a52d43bb0b29f396f2d911616f6573012645b9e3d048a49" +checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead" dependencies = [ "serde", ] @@ -1886,6 +1896,12 @@ dependencies = [ ] [[package]] +name = "sptr" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" + +[[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1907,9 +1923,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.63" +version = "2.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf5be731623ca1a1fb7d8be6f261a3be6d3e2337b8a1f97be944d020c8fcb704" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" dependencies = [ "proc-macro2", "quote", @@ -1958,6 +1974,7 @@ dependencies = [ "base-db", "cfg", "hir-expand", + "intern", "rustc-hash", "span", "stdx", @@ -1993,18 +2010,18 @@ checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "thiserror" -version = "1.0.60" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.60" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", @@ -2088,9 +2105,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" +checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" dependencies = [ "serde", "serde_spanned", @@ -2100,18 +2117,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.12" +version = "0.22.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3328d4f68a705b2a4498da1d580585d39a6510f98318a2cec3018a7ec61ddef" +checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" dependencies = [ "indexmap", "serde", @@ -2185,9 +2202,9 @@ dependencies = [ [[package]] name = "tracing-tree" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65139ecd2c3f6484c3b99bc01c77afe21e95473630747c7aca525e78b0666675" +checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe" dependencies = [ "nu-ansi-term", "tracing-core", @@ -2197,9 +2214,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" +checksum = "1b2cb4fbb9995eeb36ac86fadf24031ccd58f99d6b4b2d7b911db70bddb80d90" dependencies = [ "serde", "stable_deref_trait", @@ -2210,7 +2227,8 @@ name = "tt" version = "0.0.0" dependencies = [ "arrayvec", - "smol_str", + "intern", + "ra-ap-rustc_lexer", "stdx", "text-size", ] @@ -2538,9 +2556,9 @@ checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" -version = "0.6.8" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c52e9c97a68071b23e836c9380edae937f17b9c4667bd021973efc689f618d" +checksum = "56c52728401e1dc672a56e81e593e912aa54c78f40246869f78359a2bf24d29d" dependencies = [ "memchr", ] diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index d4c3b7a3bfb..c2f601a91bc 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -50,7 +50,7 @@ debug = 2 [workspace.dependencies] # local crates base-db = { path = "./crates/base-db", version = "0.0.0" } -cfg = { path = "./crates/cfg", version = "0.0.0" } +cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] } flycheck = { path = "./crates/flycheck", version = "0.0.0" } hir = { path = "./crates/hir", version = "0.0.0" } hir-def = { path = "./crates/hir-def", version = "0.0.0" } @@ -163,14 +163,14 @@ xshell = "0.2.5" dashmap = { version = "=5.5.3", features = ["raw-api"] } [workspace.lints.rust] -bare_trait_objects = "warn" +# remember to update RUSTFLAGS in ci.yml if you add something here + elided_lifetimes_in_paths = "warn" -ellipsis_inclusive_range_patterns = "warn" explicit_outlives_requirements = "warn" +unsafe_op_in_unsafe_fn = "warn" unused_extern_crates = "warn" unused_lifetimes = "warn" unreachable_pub = "warn" -semicolon_in_expressions_from_macros = "warn" [workspace.lints.clippy] # FIXME Remove the tidy test once the lint table is stable diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml index 4ab99fc33c4..1b1ee034cac 100644 --- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml @@ -27,6 +27,7 @@ stdx.workspace = true syntax.workspace = true vfs.workspace = true span.workspace = true +intern.workspace = true [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 1d172ab9e40..460581f4a6c 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -9,16 +9,14 @@ use std::{fmt, mem, ops}; use cfg::CfgOptions; +use intern::Symbol; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::{FxHashMap, FxHashSet}; -use span::Edition; -use syntax::SmolStr; +use span::{Edition, EditionedFileId}; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; -// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, -// then the crate for the proc-macro hasn't been build yet as the build data is missing. -pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>; +pub type ProcMacroPaths = FxHashMap<CrateId, Result<(String, AbsPathBuf), String>>; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct SourceRootId(pub u32); @@ -99,8 +97,8 @@ impl fmt::Debug for CrateGraph { pub type CrateId = Idx<CrateData>; -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct CrateName(SmolStr); +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CrateName(Symbol); impl CrateName { /// Creates a crate name, checking for dashes in the string provided. @@ -110,16 +108,16 @@ impl CrateName { if name.contains('-') { Err(name) } else { - Ok(Self(SmolStr::new(name))) + Ok(Self(Symbol::intern(name))) } } /// Creates a crate name, unconditionally replacing the dashes with underscores. pub fn normalize_dashes(name: &str) -> CrateName { - Self(SmolStr::new(name.replace('-', "_"))) + Self(Symbol::intern(&name.replace('-', "_"))) } - pub fn as_smol_str(&self) -> &SmolStr { + pub fn symbol(&self) -> &Symbol { &self.0 } } @@ -133,7 +131,7 @@ impl fmt::Display for CrateName { impl ops::Deref for CrateName { type Target = str; fn deref(&self) -> &str { - &self.0 + self.0.as_str() } } @@ -141,11 +139,11 @@ impl ops::Deref for CrateName { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum CrateOrigin { /// Crates that are from the rustc workspace. - Rustc { name: String }, + Rustc { name: Symbol }, /// Crates that are workspace members. - Local { repo: Option<String>, name: Option<String> }, + Local { repo: Option<String>, name: Option<Symbol> }, /// Crates that are non member libraries. - Library { repo: Option<String>, name: String }, + Library { repo: Option<String>, name: Symbol }, /// Crates that are provided by the language, like std, core, proc-macro, ... Lang(LangCrateOrigin), } @@ -201,16 +199,16 @@ impl fmt::Display for LangCrateOrigin { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateDisplayName { // The name we use to display various paths (with `_`). crate_name: CrateName, // The name as specified in Cargo.toml (with `-`). - canonical_name: String, + canonical_name: Symbol, } impl CrateDisplayName { - pub fn canonical_name(&self) -> &str { + pub fn canonical_name(&self) -> &Symbol { &self.canonical_name } pub fn crate_name(&self) -> &CrateName { @@ -220,7 +218,7 @@ impl CrateDisplayName { impl From<CrateName> for CrateDisplayName { fn from(crate_name: CrateName) -> CrateDisplayName { - let canonical_name = crate_name.to_string(); + let canonical_name = crate_name.0.clone(); CrateDisplayName { crate_name, canonical_name } } } @@ -239,9 +237,9 @@ impl ops::Deref for CrateDisplayName { } impl CrateDisplayName { - pub fn from_canonical_name(canonical_name: String) -> CrateDisplayName { - let crate_name = CrateName::normalize_dashes(&canonical_name); - CrateDisplayName { crate_name, canonical_name } + pub fn from_canonical_name(canonical_name: &str) -> CrateDisplayName { + let crate_name = CrateName::normalize_dashes(canonical_name); + CrateDisplayName { crate_name, canonical_name: Symbol::intern(canonical_name) } } } @@ -662,6 +660,10 @@ impl CrateData { fn add_dep(&mut self, dep: Dependency) { self.dependencies.push(dep) } + + pub fn root_file_id(&self) -> EditionedFileId { + EditionedFileId::new(self.root_file_id, self.edition) + } } impl Extend<(String, String)> for Env { diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index 96fbbc317d4..f319f98537b 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -6,8 +6,10 @@ mod input; use std::panic; use salsa::Durability; +use span::EditionedFileId; use syntax::{ast, Parse, SourceFile, SyntaxError}; use triomphe::Arc; +use vfs::FileId; pub use crate::{ change::FileChange, @@ -18,8 +20,7 @@ pub use crate::{ }, }; pub use salsa::{self, Cancelled}; -pub use span::{FilePosition, FileRange}; -pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath}; +pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath}; pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; @@ -41,9 +42,9 @@ pub trait Upcast<T: ?Sized> { fn upcast(&self) -> &T; } -pub const DEFAULT_FILE_TEXT_LRU_CAP: usize = 16; -pub const DEFAULT_PARSE_LRU_CAP: usize = 128; -pub const DEFAULT_BORROWCK_LRU_CAP: usize = 2024; +pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16; +pub const DEFAULT_PARSE_LRU_CAP: u16 = 128; +pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024; pub trait FileLoader { /// Text of the file. @@ -58,10 +59,11 @@ pub trait FileLoader { #[salsa::query_group(SourceDatabaseStorage)] pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// Parses the file into the syntax tree. - fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>; + #[salsa::lru] + fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>; /// Returns the set of errors obtained from parsing the file including validation errors. - fn parse_errors(&self, file_id: FileId) -> Option<Arc<[SyntaxError]>>; + fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>; /// The crate graph. #[salsa::input] @@ -82,14 +84,14 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) } -fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { +fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> { let _p = tracing::info_span!("parse", ?file_id).entered(); + let (file_id, edition) = file_id.unpack(); let text = db.file_text(file_id); - // FIXME: Edition based parsing - SourceFile::parse(&text, span::Edition::CURRENT) + SourceFile::parse(&text, edition) } -fn parse_errors(db: &dyn SourceDatabase, file_id: FileId) -> Option<Arc<[SyntaxError]>> { +fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> { let errors = db.parse(file_id).errors(); match &*errors { [] => None, @@ -104,6 +106,7 @@ pub trait SourceDatabaseExt: SourceDatabase { #[salsa::input] fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; + #[salsa::lru] fn file_text(&self, file_id: FileId) -> Arc<str>; /// Path to a file, relative to the root of its source root. diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index 9b3a5026ac8..faf93f62c6a 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -15,7 +15,8 @@ doctest = false rustc-hash.workspace = true # locals deps -tt.workspace = true +tt = { workspace = true, optional = true } +intern.workspace = true [dev-dependencies] expect-test = "1.4.1" diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs index b7dbb7b5fdd..35c0c89c70c 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs @@ -2,20 +2,20 @@ //! //! See: <https://doc.rust-lang.org/reference/conditional-compilation.html#conditional-compilation> -use std::{fmt, slice::Iter as SliceIter}; +use std::fmt; -use tt::SmolStr; +use intern::Symbol; /// A simple configuration value passed in from the outside. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum CfgAtom { /// eg. `#[cfg(test)]` - Flag(SmolStr), + Flag(Symbol), /// eg. `#[cfg(target_os = "linux")]` /// /// Note that a key can have multiple values that are all considered "active" at the same time. /// For example, `#[cfg(target_feature = "sse")]` and `#[cfg(target_feature = "sse2")]`. - KeyValue { key: SmolStr, value: SmolStr }, + KeyValue { key: Symbol, value: Symbol }, } impl fmt::Display for CfgAtom { @@ -32,8 +32,8 @@ impl fmt::Display for CfgAtom { pub enum CfgExpr { Invalid, Atom(CfgAtom), - All(Vec<CfgExpr>), - Any(Vec<CfgExpr>), + All(Box<[CfgExpr]>), + Any(Box<[CfgExpr]>), Not(Box<CfgExpr>), } @@ -44,6 +44,7 @@ impl From<CfgAtom> for CfgExpr { } impl CfgExpr { + #[cfg(feature = "tt")] pub fn parse<S>(tt: &tt::Subtree<S>) -> CfgExpr { next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) } @@ -63,10 +64,14 @@ impl CfgExpr { } } } -fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> { + +#[cfg(feature = "tt")] +fn next_cfg_expr<S>(it: &mut std::slice::Iter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> { + use intern::sym; + let name = match it.next() { None => return None, - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(), Some(_) => return Some(CfgExpr::Invalid), }; @@ -77,10 +82,7 @@ fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { it.next(); it.next(); - // FIXME: escape? raw string? - let value = - SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); - CfgAtom::KeyValue { key: name, value }.into() + CfgAtom::KeyValue { key: name, value: literal.symbol.clone() }.into() } _ => return Some(CfgExpr::Invalid), } @@ -88,11 +90,13 @@ fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> Some(tt::TokenTree::Subtree(subtree)) => { it.next(); let mut sub_it = subtree.token_trees.iter(); - let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)).collect(); - match name.as_str() { - "all" => CfgExpr::All(subs), - "any" => CfgExpr::Any(subs), - "not" => CfgExpr::Not(Box::new(subs.pop().unwrap_or(CfgExpr::Invalid))), + let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)); + match name { + s if s == sym::all => CfgExpr::All(subs.collect()), + s if s == sym::any => CfgExpr::Any(subs.collect()), + s if s == sym::not => { + CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid))) + } _ => CfgExpr::Invalid, } } @@ -112,11 +116,11 @@ fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> impl arbitrary::Arbitrary<'_> for CfgAtom { fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> { if u.arbitrary()? { - Ok(CfgAtom::Flag(String::arbitrary(u)?.into())) + Ok(CfgAtom::Flag(Symbol::intern(<_>::arbitrary(u)?))) } else { Ok(CfgAtom::KeyValue { - key: String::arbitrary(u)?.into(), - value: String::arbitrary(u)?.into(), + key: Symbol::intern(<_>::arbitrary(u)?), + value: Symbol::intern(<_>::arbitrary(u)?), }) } } diff --git a/src/tools/rust-analyzer/crates/cfg/src/dnf.rs b/src/tools/rust-analyzer/crates/cfg/src/dnf.rs index fd80e1ebe68..f3ebca04650 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/dnf.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/dnf.rs @@ -27,7 +27,7 @@ struct Literal { } impl DnfExpr { - pub fn new(expr: CfgExpr) -> Self { + pub fn new(expr: &CfgExpr) -> Self { let builder = Builder { expr: DnfExpr { conjunctions: Vec::new() } }; builder.lower(expr) @@ -66,9 +66,9 @@ impl DnfExpr { } } - res.enabled.sort_unstable(); + res.enabled.sort_unstable_by(compare); res.enabled.dedup(); - res.disabled.sort_unstable(); + res.disabled.sort_unstable_by(compare); res.disabled.dedup(); Some(res) } @@ -114,14 +114,25 @@ impl DnfExpr { }; // Undo the FxHashMap randomization for consistent output. - diff.enable.sort_unstable(); - diff.disable.sort_unstable(); + diff.enable.sort_unstable_by(compare); + diff.disable.sort_unstable_by(compare); Some(diff) }) } } +fn compare(a: &CfgAtom, b: &CfgAtom) -> std::cmp::Ordering { + match (a, b) { + (CfgAtom::Flag(a), CfgAtom::Flag(b)) => a.as_str().cmp(b.as_str()), + (CfgAtom::Flag(_), CfgAtom::KeyValue { .. }) => std::cmp::Ordering::Less, + (CfgAtom::KeyValue { .. }, CfgAtom::Flag(_)) => std::cmp::Ordering::Greater, + (CfgAtom::KeyValue { key, value }, CfgAtom::KeyValue { key: key2, value: value2 }) => { + key.as_str().cmp(key2.as_str()).then(value.as_str().cmp(value2.as_str())) + } + } +} + impl fmt::Display for DnfExpr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.conjunctions.len() != 1 { @@ -143,9 +154,9 @@ impl fmt::Display for DnfExpr { } impl Conjunction { - fn new(parts: Vec<CfgExpr>) -> Self { + fn new(parts: Box<[CfgExpr]>) -> Self { let mut literals = Vec::new(); - for part in parts { + for part in parts.into_vec() { match part { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => { literals.push(Literal::new(part)); @@ -221,27 +232,28 @@ struct Builder { } impl Builder { - fn lower(mut self, expr: CfgExpr) -> DnfExpr { + fn lower(mut self, expr: &CfgExpr) -> DnfExpr { let expr = make_nnf(expr); let expr = make_dnf(expr); match expr { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => { - self.expr.conjunctions.push(Conjunction::new(vec![expr])); + self.expr.conjunctions.push(Conjunction::new(Box::new([expr]))); } CfgExpr::All(conj) => { self.expr.conjunctions.push(Conjunction::new(conj)); } - CfgExpr::Any(mut disj) => { + CfgExpr::Any(disj) => { + let mut disj = disj.into_vec(); disj.reverse(); while let Some(conj) = disj.pop() { match conj { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::All(_) | CfgExpr::Not(_) => { - self.expr.conjunctions.push(Conjunction::new(vec![conj])); + self.expr.conjunctions.push(Conjunction::new(Box::new([conj]))); } CfgExpr::Any(inner_disj) => { // Flatten. - disj.extend(inner_disj.into_iter().rev()); + disj.extend(inner_disj.into_vec().into_iter().rev()); } } } @@ -255,11 +267,11 @@ impl Builder { fn make_dnf(expr: CfgExpr) -> CfgExpr { match expr { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => expr, - CfgExpr::Any(e) => flatten(CfgExpr::Any(e.into_iter().map(make_dnf).collect())), + CfgExpr::Any(e) => flatten(CfgExpr::Any(e.into_vec().into_iter().map(make_dnf).collect())), CfgExpr::All(e) => { - let e = e.into_iter().map(make_dnf).collect::<Vec<_>>(); + let e = e.into_vec().into_iter().map(make_dnf).collect::<Vec<_>>(); - flatten(CfgExpr::Any(distribute_conj(&e))) + flatten(CfgExpr::Any(distribute_conj(&e).into_boxed_slice())) } } } @@ -270,7 +282,7 @@ fn distribute_conj(conj: &[CfgExpr]) -> Vec<CfgExpr> { match rest { [head, tail @ ..] => match head { CfgExpr::Any(disj) => { - for part in disj { + for part in disj.iter() { with.push(part.clone()); go(out, with, tail); with.pop(); @@ -284,7 +296,7 @@ fn distribute_conj(conj: &[CfgExpr]) -> Vec<CfgExpr> { }, _ => { // Turn accumulated parts into a new conjunction. - out.push(CfgExpr::All(with.clone())); + out.push(CfgExpr::All(with.clone().into_boxed_slice())); } } } @@ -297,25 +309,27 @@ fn distribute_conj(conj: &[CfgExpr]) -> Vec<CfgExpr> { out } -fn make_nnf(expr: CfgExpr) -> CfgExpr { +fn make_nnf(expr: &CfgExpr) -> CfgExpr { match expr { - CfgExpr::Invalid | CfgExpr::Atom(_) => expr, - CfgExpr::Any(expr) => CfgExpr::Any(expr.into_iter().map(make_nnf).collect()), - CfgExpr::All(expr) => CfgExpr::All(expr.into_iter().map(make_nnf).collect()), - CfgExpr::Not(operand) => match *operand { - CfgExpr::Invalid | CfgExpr::Atom(_) => CfgExpr::Not(operand.clone()), // Original negated expr - CfgExpr::Not(expr) => { - // Remove double negation. - make_nnf(*expr) - } - // Convert negated conjunction/disjunction using DeMorgan's Law. - CfgExpr::Any(inner) => CfgExpr::All( - inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(), - ), - CfgExpr::All(inner) => CfgExpr::Any( - inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(), - ), - }, + CfgExpr::Invalid | CfgExpr::Atom(_) => expr.clone(), + CfgExpr::Any(expr) => CfgExpr::Any(expr.iter().map(make_nnf).collect()), + CfgExpr::All(expr) => CfgExpr::All(expr.iter().map(make_nnf).collect()), + CfgExpr::Not(operand) => make_nnf_neg(operand), + } +} + +fn make_nnf_neg(operand: &CfgExpr) -> CfgExpr { + match operand { + // Original negated expr + CfgExpr::Invalid => CfgExpr::Not(Box::new(CfgExpr::Invalid)), // Original negated expr + // Original negated expr + CfgExpr::Atom(atom) => CfgExpr::Not(Box::new(CfgExpr::Atom(atom.clone()))), + // Remove double negation. + CfgExpr::Not(expr) => make_nnf(expr), + // Convert negated conjunction/disjunction using DeMorgan's Law. + CfgExpr::Any(inner) => CfgExpr::All(inner.iter().map(make_nnf_neg).collect()), + // Convert negated conjunction/disjunction using DeMorgan's Law. + CfgExpr::All(inner) => CfgExpr::Any(inner.iter().map(make_nnf_neg).collect()), } } @@ -324,20 +338,22 @@ fn flatten(expr: CfgExpr) -> CfgExpr { match expr { CfgExpr::All(inner) => CfgExpr::All( inner - .into_iter() + .iter() .flat_map(|e| match e { - CfgExpr::All(inner) => inner, - _ => vec![e], + CfgExpr::All(inner) => inner.as_ref(), + _ => std::slice::from_ref(e), }) + .cloned() .collect(), ), CfgExpr::Any(inner) => CfgExpr::Any( inner - .into_iter() + .iter() .flat_map(|e| match e { - CfgExpr::Any(inner) => inner, - _ => vec![e], + CfgExpr::Any(inner) => inner.as_ref(), + _ => std::slice::from_ref(e), }) + .cloned() .collect(), ), _ => expr, diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index 8b30286a0a8..6d46dfb9994 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -8,7 +8,8 @@ mod tests; use std::fmt; use rustc_hash::FxHashSet; -use tt::SmolStr; + +use intern::Symbol; pub use cfg_expr::{CfgAtom, CfgExpr}; pub use dnf::DnfExpr; @@ -48,11 +49,11 @@ impl CfgOptions { cfg.fold(&|atom| self.enabled.contains(atom)) } - pub fn insert_atom(&mut self, key: SmolStr) { + pub fn insert_atom(&mut self, key: Symbol) { self.enabled.insert(CfgAtom::Flag(key)); } - pub fn insert_key_value(&mut self, key: SmolStr, value: SmolStr) { + pub fn insert_key_value(&mut self, key: Symbol, value: Symbol) { self.enabled.insert(CfgAtom::KeyValue { key, value }); } @@ -66,19 +67,16 @@ impl CfgOptions { } } - pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> { + pub fn get_cfg_keys(&self) -> impl Iterator<Item = &Symbol> { self.enabled.iter().map(|it| match it { CfgAtom::Flag(key) => key, CfgAtom::KeyValue { key, .. } => key, }) } - pub fn get_cfg_values<'a>( - &'a self, - cfg_key: &'a str, - ) -> impl Iterator<Item = &'a SmolStr> + 'a { + pub fn get_cfg_values<'a>(&'a self, cfg_key: &'a str) -> impl Iterator<Item = &'a Symbol> + 'a { self.enabled.iter().filter_map(move |it| match it { - CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value), + CfgAtom::KeyValue { key, value } if cfg_key == key.as_str() => Some(value), _ => None, }) } diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs index dddaf2cce18..597023a792b 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs @@ -1,5 +1,6 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; +use intern::Symbol; use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; use syntax::{ast, AstNode, Edition}; @@ -28,7 +29,7 @@ fn check_dnf(input: &str, expect: Expect) { DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); - let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); + let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); expect.assert_eq(&actual); } @@ -42,7 +43,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); - let dnf = DnfExpr::new(cfg); + let dnf = DnfExpr::new(&cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); expect.assert_eq(&why_inactive); } @@ -58,40 +59,51 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); - let dnf = DnfExpr::new(cfg); + let dnf = DnfExpr::new(&cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>(); assert_eq!(hints, expected_hints); } #[test] fn test_cfg_expr_parser() { - assert_parse_result("#![cfg(foo)]", CfgAtom::Flag("foo".into()).into()); - assert_parse_result("#![cfg(foo,)]", CfgAtom::Flag("foo".into()).into()); + assert_parse_result("#![cfg(foo)]", CfgAtom::Flag(Symbol::intern("foo")).into()); + assert_parse_result("#![cfg(foo,)]", CfgAtom::Flag(Symbol::intern("foo")).into()); assert_parse_result( "#![cfg(not(foo))]", - CfgExpr::Not(Box::new(CfgAtom::Flag("foo".into()).into())), + CfgExpr::Not(Box::new(CfgAtom::Flag(Symbol::intern("foo")).into())), ); assert_parse_result("#![cfg(foo(bar))]", CfgExpr::Invalid); // Only take the first - assert_parse_result(r#"#![cfg(foo, bar = "baz")]"#, CfgAtom::Flag("foo".into()).into()); + assert_parse_result( + r#"#![cfg(foo, bar = "baz")]"#, + CfgAtom::Flag(Symbol::intern("foo")).into(), + ); assert_parse_result( r#"#![cfg(all(foo, bar = "baz"))]"#, - CfgExpr::All(vec![ - CfgAtom::Flag("foo".into()).into(), - CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(), - ]), + CfgExpr::All( + vec![ + CfgAtom::Flag(Symbol::intern("foo")).into(), + CfgAtom::KeyValue { key: Symbol::intern("bar"), value: Symbol::intern("baz") } + .into(), + ] + .into_boxed_slice(), + ), ); assert_parse_result( r#"#![cfg(any(not(), all(), , bar = "baz",))]"#, - CfgExpr::Any(vec![ - CfgExpr::Not(Box::new(CfgExpr::Invalid)), - CfgExpr::All(vec![]), - CfgExpr::Invalid, - CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(), - ]), + CfgExpr::Any( + vec![ + CfgExpr::Not(Box::new(CfgExpr::Invalid)), + CfgExpr::All(Box::new([])), + CfgExpr::Invalid, + CfgAtom::KeyValue { key: Symbol::intern("bar"), value: Symbol::intern("baz") } + .into(), + ] + .into_boxed_slice(), + ), ); } @@ -167,7 +179,7 @@ fn hints() { check_enable_hints("#![cfg(all(a, b))]", &opts, &["enable a and b"]); - opts.insert_atom("test".into()); + opts.insert_atom(Symbol::intern("test")); check_enable_hints("#![cfg(test)]", &opts, &[]); check_enable_hints("#![cfg(not(test))]", &opts, &["disable test"]); @@ -180,7 +192,7 @@ fn hints_impossible() { check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]); - opts.insert_atom("test".into()); + opts.insert_atom(Symbol::intern("test")); check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]); } @@ -188,8 +200,8 @@ fn hints_impossible() { #[test] fn why_inactive() { let mut opts = CfgOptions::default(); - opts.insert_atom("test".into()); - opts.insert_atom("test2".into()); + opts.insert_atom(Symbol::intern("test")); + opts.insert_atom(Symbol::intern("test2")); check_why_inactive("#![cfg(a)]", &opts, expect![["a is disabled"]]); check_why_inactive("#![cfg(not(test))]", &opts, expect![["test is enabled"]]); @@ -231,6 +243,6 @@ fn proptest() { let mut u = Unstructured::new(&buf); let cfg = CfgExpr::arbitrary(&mut u).unwrap(); - DnfExpr::new(cfg); + DnfExpr::new(&cfg); } } diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml index d81a5fe3400..bb3a94c8da6 100644 --- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml +++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml @@ -24,6 +24,7 @@ process-wrap.workspace = true paths.workspace = true stdx.workspace = true toolchain.workspace = true +project-model.workspace = true [lints] workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs index 778def5d2be..3dd2a91d8fd 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs +++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs @@ -20,10 +20,11 @@ pub use cargo_metadata::diagnostic::{ use toolchain::Tool; mod command; +pub mod project_json; mod test_runner; use command::{CommandHandle, ParseFromLine}; -pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState}; +pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState, TestTarget}; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub enum InvocationStrategy { @@ -240,7 +241,7 @@ enum FlycheckStatus { Finished, } -const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; +pub const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; impl FlycheckActor { fn new( diff --git a/src/tools/rust-analyzer/crates/flycheck/src/project_json.rs b/src/tools/rust-analyzer/crates/flycheck/src/project_json.rs new file mode 100644 index 00000000000..b6e4495bc6d --- /dev/null +++ b/src/tools/rust-analyzer/crates/flycheck/src/project_json.rs @@ -0,0 +1,152 @@ +//! A `cargo-metadata`-equivalent for non-Cargo build systems. +use std::{io, process::Command}; + +use crossbeam_channel::Sender; +use paths::{AbsPathBuf, Utf8Path, Utf8PathBuf}; +use project_model::ProjectJsonData; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::command::{CommandHandle, ParseFromLine}; + +pub const ARG_PLACEHOLDER: &str = "{arg}"; + +/// A command wrapper for getting a `rust-project.json`. +/// +/// This is analogous to `cargo-metadata`, but for non-Cargo build systems. +pub struct Discover { + command: Vec<String>, + sender: Sender<DiscoverProjectMessage>, +} + +#[derive(PartialEq, Clone, Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum DiscoverArgument { + Path(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), + Buildfile(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), +} + +fn serialize_abs_pathbuf<S>(path: &AbsPathBuf, se: S) -> Result<S::Ok, S::Error> +where + S: serde::Serializer, +{ + let path: &Utf8Path = path.as_ref(); + se.serialize_str(path.as_str()) +} + +impl Discover { + /// Create a new [Discover]. + pub fn new(sender: Sender<DiscoverProjectMessage>, command: Vec<String>) -> Self { + Self { sender, command } + } + + /// Spawn the command inside [Discover] and report progress, if any. + pub fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result<DiscoverHandle> { + let command = &self.command[0]; + let args = &self.command[1..]; + + let args: Vec<String> = args + .iter() + .map(|arg| { + if arg == ARG_PLACEHOLDER { + serde_json::to_string(&discover_arg).expect("Unable to serialize args") + } else { + arg.to_owned() + } + }) + .collect(); + + let mut cmd = Command::new(command); + cmd.args(args); + + Ok(DiscoverHandle { _handle: CommandHandle::spawn(cmd, self.sender.clone())? }) + } +} + +/// A handle to a spawned [Discover]. +#[derive(Debug)] +pub struct DiscoverHandle { + _handle: CommandHandle<DiscoverProjectMessage>, +} + +/// An enum containing either progress messages, an error, +/// or the materialized `rust-project`. +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "kind")] +#[serde(rename_all = "snake_case")] +enum DiscoverProjectData { + Finished { buildfile: Utf8PathBuf, project: ProjectJsonData }, + Error { error: String, source: Option<String> }, + Progress { message: String }, +} + +#[derive(Debug, PartialEq, Clone)] +pub enum DiscoverProjectMessage { + Finished { project: ProjectJsonData, buildfile: AbsPathBuf }, + Error { error: String, source: Option<String> }, + Progress { message: String }, +} + +impl DiscoverProjectMessage { + fn new(data: DiscoverProjectData) -> Self { + match data { + DiscoverProjectData::Finished { project, buildfile, .. } => { + let buildfile = buildfile.try_into().expect("Unable to make path absolute"); + DiscoverProjectMessage::Finished { project, buildfile } + } + DiscoverProjectData::Error { error, source } => { + DiscoverProjectMessage::Error { error, source } + } + DiscoverProjectData::Progress { message } => { + DiscoverProjectMessage::Progress { message } + } + } + } +} + +impl ParseFromLine for DiscoverProjectMessage { + fn from_line(line: &str, _error: &mut String) -> Option<Self> { + // can the line even be deserialized as JSON? + let Ok(data) = serde_json::from_str::<Value>(line) else { + let err = DiscoverProjectData::Error { error: line.to_owned(), source: None }; + return Some(DiscoverProjectMessage::new(err)); + }; + + let Ok(data) = serde_json::from_value::<DiscoverProjectData>(data) else { + return None; + }; + + let msg = DiscoverProjectMessage::new(data); + Some(msg) + } + + fn from_eof() -> Option<Self> { + None + } +} + +#[test] +fn test_deserialization() { + let message = r#" + {"kind": "progress", "message":"querying build system","input":{"files":["src/main.rs"]}} + "#; + let message: DiscoverProjectData = + serde_json::from_str(message).expect("Unable to deserialize message"); + assert!(matches!(message, DiscoverProjectData::Progress { .. })); + + let message = r#" + {"kind": "error", "error":"failed to deserialize command output","source":"command"} + "#; + + let message: DiscoverProjectData = + serde_json::from_str(message).expect("Unable to deserialize message"); + assert!(matches!(message, DiscoverProjectData::Error { .. })); + + let message = r#" + {"kind": "finished", "project": {"sysroot": "foo", "crates": [], "runnables": []}, "buildfile":"rust-analyzer/BUILD"} + "#; + + let message: DiscoverProjectData = + serde_json::from_str(message).expect("Unable to deserialize message"); + assert!(matches!(message, DiscoverProjectData::Finished { .. })); +} diff --git a/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs b/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs index c136dd13664..74ebca34103 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs +++ b/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs @@ -59,19 +59,38 @@ pub struct CargoTestHandle { } // Example of a cargo test command: -// cargo test --workspace --no-fail-fast -- module::func -Z unstable-options --format=json +// cargo test --workspace --no-fail-fast -- -Z unstable-options --format=json +// or +// cargo test --package my-package --no-fail-fast -- module::func -Z unstable-options --format=json + +#[derive(Debug)] +pub enum TestTarget { + Workspace, + Package(String), +} impl CargoTestHandle { pub fn new( path: Option<&str>, options: CargoOptions, root: &AbsPath, + test_target: TestTarget, sender: Sender<CargoTestMessage>, ) -> std::io::Result<Self> { let mut cmd = Command::new(Tool::Cargo.path()); cmd.env("RUSTC_BOOTSTRAP", "1"); cmd.arg("test"); - cmd.arg("--workspace"); + + match &test_target { + TestTarget::Package(package) => { + cmd.arg("--package"); + cmd.arg(package); + } + TestTarget::Workspace => { + cmd.arg("--workspace"); + } + }; + // --no-fail-fast is needed to ensure that all requested tests will run cmd.arg("--no-fail-fast"); cmd.arg("--manifest-path"); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index 184dab8367c..ba88495e14d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -1,6 +1,6 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. -use std::{borrow::Cow, hash::Hash, ops, slice::Iter as SliceIter}; +use std::{borrow::Cow, hash::Hash, ops, slice}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; @@ -9,17 +9,18 @@ use hir_expand::{ attrs::{collect_attrs, Attr, AttrId, RawAttrs}, HirFileId, InFile, }; +use intern::{sym, Symbol}; use la_arena::{ArenaMap, Idx, RawIdx}; use mbe::DelimiterKind; use syntax::{ ast::{self, HasAttrs}, - AstPtr, SmolStr, + AstPtr, }; use triomphe::Arc; use crate::{ db::DefDatabase, - item_tree::{AttrOwner, Fields, ItemTreeNode}, + item_tree::{AttrOwner, FieldParent, ItemTreeNode}, lang_item::LangItem, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource}, @@ -75,40 +76,36 @@ impl Attrs { let mut res = ArenaMap::default(); let crate_graph = db.crate_graph(); - let (fields, item_tree, krate) = match v { + let item_tree; + let (parent, fields, krate) = match v { VariantId::EnumVariantId(it) => { let loc = it.lookup(db); let krate = loc.parent.lookup(db).container.krate; - let item_tree = loc.id.item_tree(db); + item_tree = loc.id.item_tree(db); let variant = &item_tree[loc.id.value]; - (variant.fields.clone(), item_tree, krate) + (FieldParent::Variant(loc.id.value), &variant.fields, krate) } VariantId::StructId(it) => { let loc = it.lookup(db); let krate = loc.container.krate; - let item_tree = loc.id.item_tree(db); + item_tree = loc.id.item_tree(db); let struct_ = &item_tree[loc.id.value]; - (struct_.fields.clone(), item_tree, krate) + (FieldParent::Struct(loc.id.value), &struct_.fields, krate) } VariantId::UnionId(it) => { let loc = it.lookup(db); let krate = loc.container.krate; - let item_tree = loc.id.item_tree(db); + item_tree = loc.id.item_tree(db); let union_ = &item_tree[loc.id.value]; - (union_.fields.clone(), item_tree, krate) + (FieldParent::Union(loc.id.value), &union_.fields, krate) } }; - let fields = match fields { - Fields::Record(fields) | Fields::Tuple(fields) => fields, - Fields::Unit => return Arc::new(res), - }; - let cfg_options = &crate_graph[krate].cfg_options; let mut idx = 0; - for field in fields { - let attrs = item_tree.attrs(db, krate, field.into()); + for (id, _field) in fields.iter().enumerate() { + let attrs = item_tree.attrs(db, krate, AttrOwner::make_field_indexed(parent, id)); if attrs.is_cfg_enabled(cfg_options) { res.insert(Idx::from_raw(RawIdx::from(idx)), attrs); idx += 1; @@ -120,12 +117,12 @@ impl Attrs { } impl Attrs { - pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> { + pub fn by_key<'attrs>(&'attrs self, key: &'attrs Symbol) -> AttrQuery<'_> { AttrQuery { attrs: self, key } } pub fn cfg(&self) -> Option<CfgExpr> { - let mut cfgs = self.by_key("cfg").tt_values().map(CfgExpr::parse); + let mut cfgs = self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse); let first = cfgs.next()?; match cfgs.next() { Some(second) => { @@ -137,7 +134,7 @@ impl Attrs { } pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ { - self.by_key("cfg").tt_values().map(CfgExpr::parse) + self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse) } pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool { @@ -147,50 +144,50 @@ impl Attrs { } } - pub fn lang(&self) -> Option<&str> { - self.by_key("lang").string_value() + pub fn lang(&self) -> Option<&Symbol> { + self.by_key(&sym::lang).string_value() } pub fn lang_item(&self) -> Option<LangItem> { - self.by_key("lang").string_value().and_then(LangItem::from_str) + self.by_key(&sym::lang).string_value().and_then(LangItem::from_symbol) } pub fn has_doc_hidden(&self) -> bool { - self.by_key("doc").tt_values().any(|tt| { + self.by_key(&sym::doc).tt_values().any(|tt| { tt.delimiter.kind == DelimiterKind::Parenthesis && - matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden") + matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden) }) } pub fn has_doc_notable_trait(&self) -> bool { - self.by_key("doc").tt_values().any(|tt| { + self.by_key(&sym::doc).tt_values().any(|tt| { tt.delimiter.kind == DelimiterKind::Parenthesis && - matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "notable_trait") + matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait) }) } pub fn doc_exprs(&self) -> impl Iterator<Item = DocExpr> + '_ { - self.by_key("doc").tt_values().map(DocExpr::parse) + self.by_key(&sym::doc).tt_values().map(DocExpr::parse) } - pub fn doc_aliases(&self) -> impl Iterator<Item = SmolStr> + '_ { + pub fn doc_aliases(&self) -> impl Iterator<Item = Symbol> + '_ { self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec()) } - pub fn export_name(&self) -> Option<&str> { - self.by_key("export_name").string_value() + pub fn export_name(&self) -> Option<&Symbol> { + self.by_key(&sym::export_name).string_value() } pub fn is_proc_macro(&self) -> bool { - self.by_key("proc_macro").exists() + self.by_key(&sym::proc_macro).exists() } pub fn is_proc_macro_attribute(&self) -> bool { - self.by_key("proc_macro_attribute").exists() + self.by_key(&sym::proc_macro_attribute).exists() } pub fn is_proc_macro_derive(&self) -> bool { - self.by_key("proc_macro_derive").exists() + self.by_key(&sym::proc_macro_derive).exists() } pub fn is_test(&self) -> bool { @@ -199,33 +196,37 @@ impl Attrs { .segments() .iter() .rev() - .zip(["core", "prelude", "v1", "test"].iter().rev()) - .all(|it| it.0.as_str() == Some(it.1)) + .zip( + [sym::core.clone(), sym::prelude.clone(), sym::v1.clone(), sym::test.clone()] + .iter() + .rev(), + ) + .all(|it| it.0 == it.1) }) } pub fn is_ignore(&self) -> bool { - self.by_key("ignore").exists() + self.by_key(&sym::ignore).exists() } pub fn is_bench(&self) -> bool { - self.by_key("bench").exists() + self.by_key(&sym::bench).exists() } pub fn is_unstable(&self) -> bool { - self.by_key("unstable").exists() + self.by_key(&sym::unstable).exists() } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum DocAtom { /// eg. `#[doc(hidden)]` - Flag(SmolStr), + Flag(Symbol), /// eg. `#[doc(alias = "it")]` /// /// Note that a key can have multiple values that are all considered "active" at the same time. /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`. - KeyValue { key: SmolStr, value: SmolStr }, + KeyValue { key: Symbol, value: Symbol }, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -234,7 +235,7 @@ pub enum DocExpr { /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]` Atom(DocAtom), /// eg. `#[doc(alias("x", "y"))]` - Alias(Vec<SmolStr>), + Alias(Vec<Symbol>), } impl From<DocAtom> for DocExpr { @@ -248,9 +249,9 @@ impl DocExpr { next_doc_expr(&mut tt.token_trees.iter()).unwrap_or(DocExpr::Invalid) } - pub fn aliases(&self) -> &[SmolStr] { + pub fn aliases(&self) -> &[Symbol] { match self { - DocExpr::Atom(DocAtom::KeyValue { key, value }) if key == "alias" => { + DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => { std::slice::from_ref(value) } DocExpr::Alias(aliases) => aliases, @@ -259,10 +260,10 @@ impl DocExpr { } } -fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr> { +fn next_doc_expr<S>(it: &mut slice::Iter<'_, tt::TokenTree<S>>) -> Option<DocExpr> { let name = match it.next() { None => return None, - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(), Some(_) => return Some(DocExpr::Invalid), }; @@ -270,13 +271,14 @@ fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr> let ret = match it.as_slice().first() { Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { match it.as_slice().get(1) { - Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { + Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + kind: tt::LitKind::Str, + .. + }))) => { it.next(); it.next(); - // FIXME: escape? raw string? - let value = - SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); - DocAtom::KeyValue { key: name, value }.into() + DocAtom::KeyValue { key: name, value: text.clone() }.into() } _ => return Some(DocExpr::Invalid), } @@ -284,8 +286,8 @@ fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr> Some(tt::TokenTree::Subtree(subtree)) => { it.next(); let subs = parse_comma_sep(subtree); - match name.as_str() { - "alias" => DocExpr::Alias(subs), + match &name { + s if *s == sym::alias => DocExpr::Alias(subs), _ => DocExpr::Invalid, } } @@ -301,15 +303,16 @@ fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr> Some(ret) } -fn parse_comma_sep<S>(subtree: &tt::Subtree<S>) -> Vec<SmolStr> { +fn parse_comma_sep<S>(subtree: &tt::Subtree<S>) -> Vec<Symbol> { subtree .token_trees .iter() .filter_map(|tt| match tt { - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { - // FIXME: escape? raw string? - Some(SmolStr::new(lit.text.trim_start_matches('"').trim_end_matches('"'))) - } + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + kind: tt::LitKind::Str, + symbol, + .. + })) => Some(symbol.clone()), _ => None, }) .collect() @@ -556,7 +559,7 @@ impl AttrSourceMap { #[derive(Debug, Clone, Copy)] pub struct AttrQuery<'attr> { attrs: &'attr Attrs, - key: &'static str, + key: &'attr Symbol, } impl<'attr> AttrQuery<'attr> { @@ -564,10 +567,14 @@ impl<'attr> AttrQuery<'attr> { self.attrs().filter_map(|attr| attr.token_tree_value()) } - pub fn string_value(self) -> Option<&'attr str> { + pub fn string_value(self) -> Option<&'attr Symbol> { self.attrs().find_map(|attr| attr.string_value()) } + pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> { + self.attrs().find_map(|attr| attr.string_value_with_span()) + } + pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> { self.attrs().find_map(|attr| attr.string_value_unescape()) } @@ -578,9 +585,7 @@ impl<'attr> AttrQuery<'attr> { pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone { let key = self.key; - self.attrs - .iter() - .filter(move |attr| attr.path.as_ident().map_or(false, |s| s.to_smol_str() == key)) + self.attrs.iter().filter(move |attr| attr.path.as_ident().map_or(false, |s| *s == *key)) } /// Find string value for a specific key inside token tree @@ -589,14 +594,14 @@ impl<'attr> AttrQuery<'attr> { /// #[doc(html_root_url = "url")] /// ^^^^^^^^^^^^^ key /// ``` - pub fn find_string_value_in_tt(self, key: &'attr str) -> Option<&SmolStr> { + pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&str> { self.tt_values().find_map(|tt| { let name = tt.token_trees.iter() - .skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, ..} )) if text == key)) + .skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == *key)) .nth(2); match name { - Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ ref text, ..}))) => Some(text), + Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()), _ => None } }) @@ -647,11 +652,13 @@ mod tests { //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. + use intern::Symbol; + use span::EditionedFileId; use triomphe::Arc; - use base_db::FileId; use hir_expand::span_map::{RealSpanMap, SpanMap}; use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode}; + use span::FileId; use syntax::{ast, AstNode, TextRange}; use crate::attr::{DocAtom, DocExpr}; @@ -659,7 +666,9 @@ mod tests { fn assert_parse_result(input: &str, expected: DocExpr) { let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); + let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute( + EditionedFileId::current_edition(FileId::from_raw(0)), + ))); let tt = syntax_node_to_token_tree( tt.syntax(), map.as_ref(), @@ -672,24 +681,29 @@ mod tests { #[test] fn test_doc_expr_parser() { - assert_parse_result("#![doc(hidden)]", DocAtom::Flag("hidden".into()).into()); + assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into()); assert_parse_result( r#"#![doc(alias = "foo")]"#, - DocAtom::KeyValue { key: "alias".into(), value: "foo".into() }.into(), + DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(), ); - assert_parse_result(r#"#![doc(alias("foo"))]"#, DocExpr::Alias(["foo".into()].into())); + assert_parse_result( + r#"#![doc(alias("foo"))]"#, + DocExpr::Alias([Symbol::intern("foo")].into()), + ); assert_parse_result( r#"#![doc(alias("foo", "bar", "baz"))]"#, - DocExpr::Alias(["foo".into(), "bar".into(), "baz".into()].into()), + DocExpr::Alias( + [Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(), + ), ); assert_parse_result( r#" #[doc(alias("Bar", "Qux"))] struct Foo;"#, - DocExpr::Alias(["Bar".into(), "Qux".into()].into()), + DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()), ); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index ca4a3f5217c..d3c134f3266 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -6,13 +6,14 @@ pub mod scope; #[cfg(test)] mod tests; -use std::ops::Index; +use std::ops::{Deref, Index}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; -use hir_expand::{name::Name, InFile}; -use la_arena::{Arena, ArenaMap}; +use hir_expand::{name::Name, ExpandError, InFile}; +use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use rustc_hash::FxHashMap; +use smallvec::SmallVec; use span::MacroFileId; use syntax::{ast, AstPtr, SyntaxNodePtr}; use triomphe::Arc; @@ -23,6 +24,7 @@ use crate::{ hir::{ dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId, RecordFieldPat, }, + item_tree::AttrOwner, nameres::DefMap, path::{ModPath, Path}, src::HasSource, @@ -91,6 +93,7 @@ pub struct BodySourceMap { label_map_back: ArenaMap<LabelId, LabelSource>, self_param: Option<InFile<AstPtr<ast::SelfParam>>>, + binding_definitions: FxHashMap<BindingId, SmallVec<[PatId; 4]>>, /// We don't create explicit nodes for record fields (`S { record_field: 92 }`). /// Instead, we use id of expression (`92`) to identify the field. @@ -112,8 +115,7 @@ pub struct SyntheticSyntax; #[derive(Debug, Eq, PartialEq)] pub enum BodyDiagnostic { InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, - MacroError { node: InFile<AstPtr<ast::MacroCall>>, message: String }, - UnresolvedProcMacro { node: InFile<AstPtr<ast::MacroCall>>, krate: CrateId }, + MacroError { node: InFile<AstPtr<ast::MacroCall>>, err: ExpandError }, UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath }, UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, UndeclaredLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, @@ -134,16 +136,23 @@ impl Body { let data = db.function_data(f); let f = f.lookup(db); let src = f.source(db); - params = src.value.param_list().map(|param_list| { + params = src.value.param_list().map(move |param_list| { let item_tree = f.id.item_tree(db); let func = &item_tree[f.id.value]; let krate = f.container.module(db).krate; let crate_graph = db.crate_graph(); ( param_list, - func.params.clone().map(move |param| { + (0..func.params.len()).map(move |idx| { item_tree - .attrs(db, krate, param.into()) + .attrs( + db, + krate, + AttrOwner::Param( + f.id.value, + Idx::from_raw(RawIdx::from(idx as u32)), + ), + ) .is_cfg_enabled(&crate_graph[krate].cfg_options) }), ) @@ -377,6 +386,10 @@ impl BodySourceMap { self.label_map_back[label] } + pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] { + self.binding_definitions.get(&binding).map_or(&[], Deref::deref) + } + pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> { let src = node.map(AstPtr::new); self.label_map.get(&src).cloned() @@ -428,6 +441,7 @@ impl BodySourceMap { expansions, format_args_template_map, diagnostics, + binding_definitions, } = self; format_args_template_map.shrink_to_fit(); expr_map.shrink_to_fit(); @@ -440,5 +454,6 @@ impl BodySourceMap { pat_field_map_back.shrink_to_fit(); expansions.shrink_to_fit(); diagnostics.shrink_to_fit(); + binding_definitions.shrink_to_fit(); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index be7068c807a..9e30aff8fe9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -4,13 +4,13 @@ use std::mem; use base_db::CrateId; +use either::Either; use hir_expand::{ - name::{name, AsName, Name}, - ExpandError, InFile, + name::{AsName, Name}, + InFile, }; -use intern::Interned; +use intern::{sym, Interned, Symbol}; use rustc_hash::FxHashMap; -use smallvec::SmallVec; use span::AstIdMap; use stdx::never; use syntax::{ @@ -187,8 +187,10 @@ impl ExprCollector<'_> { { let is_mutable = self_param.mut_token().is_some() && self_param.amp_token().is_none(); - let binding_id: la_arena::Idx<Binding> = - self.alloc_binding(name![self], BindingAnnotation::new(is_mutable, false)); + let binding_id: la_arena::Idx<Binding> = self.alloc_binding( + Name::new_symbol_root(sym::self_.clone()), + BindingAnnotation::new(is_mutable, false), + ); self.body.self_param = Some(binding_id); self.source_map.self_param = Some(self.expander.in_file(AstPtr::new(&self_param))); } @@ -299,7 +301,10 @@ impl ExprCollector<'_> { result_expr_id }) } - None => self.collect_block(e), + // FIXME + Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => { + self.collect_block(e) + } }, ast::Expr::LoopExpr(e) => { let label = e.label().map(|label| self.collect_label(label)); @@ -987,20 +992,11 @@ impl ExprCollector<'_> { } }; if record_diagnostics { - match &res.err { - Some(ExpandError::UnresolvedProcMacro(krate)) => { - self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro { - node: InFile::new(outer_file, syntax_ptr), - krate: *krate, - }); - } - Some(err) => { - self.source_map.diagnostics.push(BodyDiagnostic::MacroError { - node: InFile::new(outer_file, syntax_ptr), - message: err.to_string(), - }); - } - None => {} + if let Some(err) = res.err { + self.source_map.diagnostics.push(BodyDiagnostic::MacroError { + node: InFile::new(outer_file, syntax_ptr), + err, + }); } } @@ -1431,15 +1427,14 @@ impl ExprCollector<'_> { args: AstChildren<ast::Pat>, has_leading_comma: bool, binding_list: &mut BindingList, - ) -> (Box<[PatId]>, Option<usize>) { + ) -> (Box<[PatId]>, Option<u32>) { + let args: Vec<_> = args.map(|p| self.collect_pat_possibly_rest(p, binding_list)).collect(); // Find the location of the `..`, if there is one. Note that we do not // consider the possibility of there being multiple `..` here. - let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_))); + let ellipsis = args.iter().position(|p| p.is_right()).map(|it| it as u32); + // We want to skip the `..` pattern here, since we account for it above. - let mut args: Vec<_> = args - .filter(|p| !matches!(p, ast::Pat::RestPat(_))) - .map(|p| self.collect_pat(p, binding_list)) - .collect(); + let mut args: Vec<_> = args.into_iter().filter_map(Either::left).collect(); // if there is a leading comma, the user is most likely to type out a leading pattern // so we insert a missing pattern at the beginning for IDE features if has_leading_comma { @@ -1449,6 +1444,41 @@ impl ExprCollector<'_> { (args.into_boxed_slice(), ellipsis) } + // `collect_pat` rejects `ast::Pat::RestPat`, but it should be handled in some cases that + // it is the macro expansion result of an arg sub-pattern in a slice or tuple pattern. + fn collect_pat_possibly_rest( + &mut self, + pat: ast::Pat, + binding_list: &mut BindingList, + ) -> Either<PatId, ()> { + match &pat { + ast::Pat::RestPat(_) => Either::Right(()), + ast::Pat::MacroPat(mac) => match mac.macro_call() { + Some(call) => { + let macro_ptr = AstPtr::new(&call); + let src = self.expander.in_file(AstPtr::new(&pat)); + let pat = + self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { + if let Some(expanded_pat) = expanded_pat { + this.collect_pat_possibly_rest(expanded_pat, binding_list) + } else { + Either::Left(this.missing_pat()) + } + }); + if let Some(pat) = pat.left() { + self.source_map.pat_map.insert(src, pat); + } + pat + } + None => { + let ptr = AstPtr::new(&pat); + Either::Left(self.alloc_pat(Pat::Missing, ptr)) + } + }, + _ => Either::Left(self.collect_pat(pat, binding_list)), + } + } + // endregion: patterns /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when @@ -1473,7 +1503,7 @@ impl ExprCollector<'_> { } fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) { - self.body.bindings[binding_id].definitions.push(pat_id); + self.source_map.binding_definitions.entry(binding_id).or_default().push(pat_id); } // region: labels @@ -1588,18 +1618,22 @@ impl ExprCollector<'_> { }); let mut mappings = vec![]; let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { - Some((s, is_direct_literal)) => format_args::parse( - &s, - fmt_snippet, - args, - is_direct_literal, - |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), - |name, span| { - if let Some(span) = span { - mappings.push((span, name)) - } - }, - ), + Some((s, is_direct_literal)) => { + let call_ctx = self.expander.syntax_context(); + format_args::parse( + &s, + fmt_snippet, + args, + is_direct_literal, + |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), + |name, span| { + if let Some(span) = span { + mappings.push((span, name)) + } + }, + call_ctx, + ) + } None => FormatArgs { template: Default::default(), arguments: args.finish(), @@ -1617,30 +1651,29 @@ impl ExprCollector<'_> { } } - let lit_pieces = - fmt.template - .iter() - .enumerate() - .filter_map(|(i, piece)| { - match piece { - FormatArgsPiece::Literal(s) => Some( - self.alloc_expr_desugared(Expr::Literal(Literal::String(s.clone()))), - ), - &FormatArgsPiece::Placeholder(_) => { - // Inject empty string before placeholders when not already preceded by a literal piece. - if i == 0 - || matches!(fmt.template[i - 1], FormatArgsPiece::Placeholder(_)) - { - Some(self.alloc_expr_desugared(Expr::Literal(Literal::String( - "".into(), - )))) - } else { - None - } + let lit_pieces = fmt + .template + .iter() + .enumerate() + .filter_map(|(i, piece)| { + match piece { + FormatArgsPiece::Literal(s) => { + Some(self.alloc_expr_desugared(Expr::Literal(Literal::String(s.clone())))) + } + &FormatArgsPiece::Placeholder(_) => { + // Inject empty string before placeholders when not already preceded by a literal piece. + if i == 0 || matches!(fmt.template[i - 1], FormatArgsPiece::Placeholder(_)) + { + Some(self.alloc_expr_desugared(Expr::Literal(Literal::String( + Symbol::empty(), + )))) + } else { + None } } - }) - .collect(); + } + }) + .collect(); let lit_pieces = self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: lit_pieces, is_assignee_expr: false, @@ -1723,14 +1756,18 @@ impl ExprCollector<'_> { // unsafe { ::core::fmt::UnsafeArg::new() } // ) - let Some(new_v1_formatted) = - LangItem::FormatArguments.ty_rel_path(self.db, self.krate, name![new_v1_formatted]) - else { + let Some(new_v1_formatted) = LangItem::FormatArguments.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::new_v1_formatted.clone()), + ) else { return self.missing_expr(); }; - let Some(unsafe_arg_new) = - LangItem::FormatUnsafeArg.ty_rel_path(self.db, self.krate, name![new]) - else { + let Some(unsafe_arg_new) = LangItem::FormatUnsafeArg.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::new.clone()), + ) else { return self.missing_expr(); }; let new_v1_formatted = self.alloc_expr_desugared(Expr::Path(new_v1_formatted)); @@ -1812,10 +1849,10 @@ impl ExprCollector<'_> { self.db, self.krate, match alignment { - Some(FormatAlignment::Left) => name![Left], - Some(FormatAlignment::Right) => name![Right], - Some(FormatAlignment::Center) => name![Center], - None => name![Unknown], + Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left.clone()), + Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right.clone()), + Some(FormatAlignment::Center) => Name::new_symbol_root(sym::Center.clone()), + None => Name::new_symbol_root(sym::Unknown.clone()), }, ); match align { @@ -1838,8 +1875,11 @@ impl ExprCollector<'_> { let width = self.make_count(width, argmap); let format_placeholder_new = { - let format_placeholder_new = - LangItem::FormatPlaceholder.ty_rel_path(self.db, self.krate, name![new]); + let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::new.clone()), + ); match format_placeholder_new { Some(path) => self.alloc_expr_desugared(Expr::Path(path)), None => self.missing_expr(), @@ -1883,11 +1923,14 @@ impl ExprCollector<'_> { *n as u128, Some(BuiltinUint::Usize), ))); - let count_is = - match LangItem::FormatCount.ty_rel_path(self.db, self.krate, name![Is]) { - Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)), - None => self.missing_expr(), - }; + let count_is = match LangItem::FormatCount.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::Is.clone()), + ) { + Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)), + None => self.missing_expr(), + }; self.alloc_expr_desugared(Expr::Call { callee: count_is, args: Box::new([args]), @@ -1905,7 +1948,7 @@ impl ExprCollector<'_> { let count_param = match LangItem::FormatCount.ty_rel_path( self.db, self.krate, - name![Param], + Name::new_symbol_root(sym::Param.clone()), ) { Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)), None => self.missing_expr(), @@ -1921,7 +1964,11 @@ impl ExprCollector<'_> { self.missing_expr() } } - None => match LangItem::FormatCount.ty_rel_path(self.db, self.krate, name![Implied]) { + None => match LangItem::FormatCount.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::Implied.clone()), + ) { Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)), None => self.missing_expr(), }, @@ -1942,18 +1989,18 @@ impl ExprCollector<'_> { let new_fn = match LangItem::FormatArgument.ty_rel_path( self.db, self.krate, - match ty { - Format(Display) => name![new_display], - Format(Debug) => name![new_debug], - Format(LowerExp) => name![new_lower_exp], - Format(UpperExp) => name![new_upper_exp], - Format(Octal) => name![new_octal], - Format(Pointer) => name![new_pointer], - Format(Binary) => name![new_binary], - Format(LowerHex) => name![new_lower_hex], - Format(UpperHex) => name![new_upper_hex], - Usize => name![from_usize], - }, + Name::new_symbol_root(match ty { + Format(Display) => sym::new_display.clone(), + Format(Debug) => sym::new_debug.clone(), + Format(LowerExp) => sym::new_lower_exp.clone(), + Format(UpperExp) => sym::new_upper_exp.clone(), + Format(Octal) => sym::new_octal.clone(), + Format(Pointer) => sym::new_pointer.clone(), + Format(Binary) => sym::new_binary.clone(), + Format(LowerHex) => sym::new_lower_hex.clone(), + Format(UpperHex) => sym::new_upper_hex.clone(), + Usize => sym::from_usize.clone(), + }), ) { Some(new_fn) => self.alloc_expr_desugared(Expr::Path(new_fn)), None => self.missing_expr(), @@ -2002,12 +2049,7 @@ impl ExprCollector<'_> { } fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId { - let binding = self.body.bindings.alloc(Binding { - name, - mode, - definitions: SmallVec::new(), - problems: None, - }); + let binding = self.body.bindings.alloc(Binding { name, mode, problems: None }); if let Some(owner) = self.current_binding_owner { self.body.binding_owners.insert(binding, owner); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs index c48d16d0530..edaee60937d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs @@ -517,7 +517,7 @@ impl Printer<'_> { if i != 0 { w!(self, ", "); } - if *ellipsis == Some(i) { + if *ellipsis == Some(i as u32) { w!(self, ".., "); } self.print_pat(*pat); @@ -595,7 +595,7 @@ impl Printer<'_> { if i != 0 { w!(self, ", "); } - if *ellipsis == Some(i) { + if *ellipsis == Some(i as u32) { w!(self, ", .."); } self.print_pat(*arg); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs index fd685235e17..bf201ca8347 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs @@ -288,8 +288,9 @@ fn compute_expr_scopes( #[cfg(test)] mod tests { - use base_db::{FileId, SourceDatabase}; + use base_db::SourceDatabase; use hir_expand::{name::AsName, InFile}; + use span::FileId; use syntax::{algo::find_node_at_offset, ast, AstNode}; use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; @@ -325,7 +326,7 @@ mod tests { let file_syntax = db.parse(file_id).syntax_node(); let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap(); - let function = find_function(&db, file_id); + let function = find_function(&db, file_id.file_id()); let scopes = db.expr_scopes(function.into()); let (_body, source_map) = db.body_with_source_map(function.into()); @@ -338,7 +339,7 @@ mod tests { let actual = scopes .scope_chain(scope) .flat_map(|scope| scopes.entries(scope)) - .map(|it| it.name().to_smol_str()) + .map(|it| it.name().as_str()) .collect::<Vec<_>>() .join("\n"); let expected = expected.join("\n"); @@ -480,10 +481,10 @@ fn foo() { .expect("failed to find a name at the target offset"); let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap(); - let function = find_function(&db, file_id); + let function = find_function(&db, file_id.file_id()); let scopes = db.expr_scopes(function.into()); - let (body, source_map) = db.body_with_source_map(function.into()); + let (_, source_map) = db.body_with_source_map(function.into()); let expr_scope = { let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); @@ -494,7 +495,7 @@ fn foo() { let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap(); let pat_src = source_map - .pat_syntax(*body.bindings[resolved.binding()].definitions.first().unwrap()) + .pat_syntax(*source_map.binding_definitions[&resolved.binding()].first().unwrap()) .unwrap(); let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax()); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs index f9e55559dab..14b9af84e6f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs @@ -5,7 +5,8 @@ use std::fmt; -use hir_expand::name::{name, AsName, Name}; +use hir_expand::name::{AsName, Name}; +use intern::{sym, Symbol}; /// Different signed int types. #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum BuiltinInt { @@ -48,63 +49,67 @@ pub enum BuiltinType { impl BuiltinType { #[rustfmt::skip] - pub const ALL: &'static [(Name, BuiltinType)] = &[ - (name![char], BuiltinType::Char), - (name![bool], BuiltinType::Bool), - (name![str], BuiltinType::Str), - - (name![isize], BuiltinType::Int(BuiltinInt::Isize)), - (name![i8], BuiltinType::Int(BuiltinInt::I8)), - (name![i16], BuiltinType::Int(BuiltinInt::I16)), - (name![i32], BuiltinType::Int(BuiltinInt::I32)), - (name![i64], BuiltinType::Int(BuiltinInt::I64)), - (name![i128], BuiltinType::Int(BuiltinInt::I128)), - - (name![usize], BuiltinType::Uint(BuiltinUint::Usize)), - (name![u8], BuiltinType::Uint(BuiltinUint::U8)), - (name![u16], BuiltinType::Uint(BuiltinUint::U16)), - (name![u32], BuiltinType::Uint(BuiltinUint::U32)), - (name![u64], BuiltinType::Uint(BuiltinUint::U64)), - (name![u128], BuiltinType::Uint(BuiltinUint::U128)), - - (name![f16], BuiltinType::Float(BuiltinFloat::F16)), - (name![f32], BuiltinType::Float(BuiltinFloat::F32)), - (name![f64], BuiltinType::Float(BuiltinFloat::F64)), - (name![f128], BuiltinType::Float(BuiltinFloat::F128)), - ]; + pub fn all_builtin_types() -> [(Name, BuiltinType); 19] { + [ + (Name::new_symbol_root(sym::char.clone()), BuiltinType::Char), + (Name::new_symbol_root(sym::bool.clone()), BuiltinType::Bool), + (Name::new_symbol_root(sym::str.clone()), BuiltinType::Str), + + (Name::new_symbol_root(sym::isize.clone()), BuiltinType::Int(BuiltinInt::Isize)), + (Name::new_symbol_root(sym::i8.clone()), BuiltinType::Int(BuiltinInt::I8)), + (Name::new_symbol_root(sym::i16.clone()), BuiltinType::Int(BuiltinInt::I16)), + (Name::new_symbol_root(sym::i32.clone()), BuiltinType::Int(BuiltinInt::I32)), + (Name::new_symbol_root(sym::i64.clone()), BuiltinType::Int(BuiltinInt::I64)), + (Name::new_symbol_root(sym::i128.clone()), BuiltinType::Int(BuiltinInt::I128)), + + (Name::new_symbol_root(sym::usize.clone()), BuiltinType::Uint(BuiltinUint::Usize)), + (Name::new_symbol_root(sym::u8.clone()), BuiltinType::Uint(BuiltinUint::U8)), + (Name::new_symbol_root(sym::u16.clone()), BuiltinType::Uint(BuiltinUint::U16)), + (Name::new_symbol_root(sym::u32.clone()), BuiltinType::Uint(BuiltinUint::U32)), + (Name::new_symbol_root(sym::u64.clone()), BuiltinType::Uint(BuiltinUint::U64)), + (Name::new_symbol_root(sym::u128.clone()), BuiltinType::Uint(BuiltinUint::U128)), + + (Name::new_symbol_root(sym::f16.clone()), BuiltinType::Float(BuiltinFloat::F16)), + (Name::new_symbol_root(sym::f32.clone()), BuiltinType::Float(BuiltinFloat::F32)), + (Name::new_symbol_root(sym::f64.clone()), BuiltinType::Float(BuiltinFloat::F64)), + (Name::new_symbol_root(sym::f128.clone()), BuiltinType::Float(BuiltinFloat::F128)), + ] + } pub fn by_name(name: &Name) -> Option<Self> { - Self::ALL.iter().find_map(|(n, ty)| if n == name { Some(*ty) } else { None }) + Self::all_builtin_types() + .iter() + .find_map(|(n, ty)| if n == name { Some(*ty) } else { None }) } } impl AsName for BuiltinType { fn as_name(&self) -> Name { match self { - BuiltinType::Char => name![char], - BuiltinType::Bool => name![bool], - BuiltinType::Str => name![str], + BuiltinType::Char => Name::new_symbol_root(sym::char.clone()), + BuiltinType::Bool => Name::new_symbol_root(sym::bool.clone()), + BuiltinType::Str => Name::new_symbol_root(sym::str.clone()), BuiltinType::Int(it) => match it { - BuiltinInt::Isize => name![isize], - BuiltinInt::I8 => name![i8], - BuiltinInt::I16 => name![i16], - BuiltinInt::I32 => name![i32], - BuiltinInt::I64 => name![i64], - BuiltinInt::I128 => name![i128], + BuiltinInt::Isize => Name::new_symbol_root(sym::isize.clone()), + BuiltinInt::I8 => Name::new_symbol_root(sym::i8.clone()), + BuiltinInt::I16 => Name::new_symbol_root(sym::i16.clone()), + BuiltinInt::I32 => Name::new_symbol_root(sym::i32.clone()), + BuiltinInt::I64 => Name::new_symbol_root(sym::i64.clone()), + BuiltinInt::I128 => Name::new_symbol_root(sym::i128.clone()), }, BuiltinType::Uint(it) => match it { - BuiltinUint::Usize => name![usize], - BuiltinUint::U8 => name![u8], - BuiltinUint::U16 => name![u16], - BuiltinUint::U32 => name![u32], - BuiltinUint::U64 => name![u64], - BuiltinUint::U128 => name![u128], + BuiltinUint::Usize => Name::new_symbol_root(sym::usize.clone()), + BuiltinUint::U8 => Name::new_symbol_root(sym::u8.clone()), + BuiltinUint::U16 => Name::new_symbol_root(sym::u16.clone()), + BuiltinUint::U32 => Name::new_symbol_root(sym::u32.clone()), + BuiltinUint::U64 => Name::new_symbol_root(sym::u64.clone()), + BuiltinUint::U128 => Name::new_symbol_root(sym::u128.clone()), }, BuiltinType::Float(it) => match it { - BuiltinFloat::F16 => name![f16], - BuiltinFloat::F32 => name![f32], - BuiltinFloat::F64 => name![f64], - BuiltinFloat::F128 => name![f128], + BuiltinFloat::F16 => Name::new_symbol_root(sym::f16.clone()), + BuiltinFloat::F32 => Name::new_symbol_root(sym::f32.clone()), + BuiltinFloat::F64 => Name::new_symbol_root(sym::f64.clone()), + BuiltinFloat::F128 => Name::new_symbol_root(sym::f128.clone()), }, } } @@ -138,6 +143,18 @@ impl BuiltinInt { }; Some(res) } + pub fn from_suffix_sym(suffix: &Symbol) -> Option<BuiltinInt> { + let res = match suffix { + s if *s == sym::isize => Self::Isize, + s if *s == sym::i8 => Self::I8, + s if *s == sym::i16 => Self::I16, + s if *s == sym::i32 => Self::I32, + s if *s == sym::i64 => Self::I64, + s if *s == sym::i128 => Self::I128, + _ => return None, + }; + Some(res) + } } #[rustfmt::skip] @@ -155,6 +172,19 @@ impl BuiltinUint { }; Some(res) } + pub fn from_suffix_sym(suffix: &Symbol) -> Option<BuiltinUint> { + let res = match suffix { + s if *s == sym::usize => Self::Usize, + s if *s == sym::u8 => Self::U8, + s if *s == sym::u16 => Self::U16, + s if *s == sym::u32 => Self::U32, + s if *s == sym::u64 => Self::U64, + s if *s == sym::u128 => Self::U128, + + _ => return None, + }; + Some(res) + } } #[rustfmt::skip] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index 55043fdc4b0..c3c2e51fd03 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -6,7 +6,8 @@ use base_db::CrateId; use hir_expand::{ name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind, }; -use intern::Interned; +use intern::{sym, Interned, Symbol}; +use la_arena::{Idx, RawIdx}; use smallvec::SmallVec; use syntax::{ast, Parse}; use triomphe::Arc; @@ -38,8 +39,8 @@ pub struct FunctionData { pub ret_type: Interned<TypeRef>, pub attrs: Attrs, pub visibility: RawVisibility, - pub abi: Option<Interned<str>>, - pub legacy_const_generics_indices: Box<[u32]>, + pub abi: Option<Symbol>, + pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>, pub rustc_allow_incoherent_impl: bool, flags: FnFlags, } @@ -58,52 +59,52 @@ impl FunctionData { let crate_graph = db.crate_graph(); let cfg_options = &crate_graph[krate].cfg_options; - let enabled_params = func - .params - .clone() - .filter(|¶m| item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options)); - - // If last cfg-enabled param is a `...` param, it's a varargs function. - let is_varargs = enabled_params - .clone() - .next_back() - .map_or(false, |param| item_tree[param].type_ref.is_none()); + let attr_owner = |idx| { + item_tree::AttrOwner::Param(loc.id.value, Idx::from_raw(RawIdx::from(idx as u32))) + }; let mut flags = func.flags; - if is_varargs { - flags |= FnFlags::IS_VARARGS; - } if flags.contains(FnFlags::HAS_SELF_PARAM) { // If there's a self param in the syntax, but it is cfg'd out, remove the flag. - let is_cfgd_out = match func.params.clone().next() { - Some(param) => { - !item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options) - } - None => { - stdx::never!("fn HAS_SELF_PARAM but no parameters allocated"); - true - } - }; + let is_cfgd_out = + !item_tree.attrs(db, krate, attr_owner(0usize)).is_cfg_enabled(cfg_options); if is_cfgd_out { cov_mark::hit!(cfgd_out_self_param); flags.remove(FnFlags::HAS_SELF_PARAM); } } + if flags.contains(FnFlags::IS_VARARGS) { + if let Some((_, param)) = func.params.iter().enumerate().rev().find(|&(idx, _)| { + item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options) + }) { + if param.type_ref.is_some() { + flags.remove(FnFlags::IS_VARARGS); + } + } else { + flags.remove(FnFlags::IS_VARARGS); + } + } let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); let legacy_const_generics_indices = attrs - .by_key("rustc_legacy_const_generics") + .by_key(&sym::rustc_legacy_const_generics) .tt_values() .next() .map(parse_rustc_legacy_const_generics) - .unwrap_or_default(); - let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists(); + .filter(|it| !it.is_empty()) + .map(Box::new); + let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); Arc::new(FunctionData { name: func.name.clone(), - params: enabled_params - .clone() - .filter_map(|id| item_tree[id].type_ref.clone()) + params: func + .params + .iter() + .enumerate() + .filter(|&(idx, _)| { + item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options) + }) + .filter_map(|(_, param)| param.type_ref.clone()) .collect(), ret_type: func.ret_type.clone(), attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()), @@ -150,7 +151,7 @@ fn parse_rustc_legacy_const_generics(tt: &crate::tt::Subtree) -> Box<[u32]> { let mut indices = Vec::new(); for args in tt.token_trees.chunks(2) { match &args[0] { - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.text.parse() { + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() { Ok(index) => indices.push(index), Err(_) => break, }, @@ -200,8 +201,8 @@ impl TypeAliasData { ModItem::from(loc.id.value).into(), ); let rustc_has_incoherent_inherent_impls = - attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); - let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists(); + attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists(); + let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); Arc::new(TypeAliasData { name: typ.name.clone(), @@ -223,6 +224,7 @@ pub struct TraitData { pub is_unsafe: bool, pub rustc_has_incoherent_inherent_impls: bool, pub skip_array_during_method_dispatch: bool, + pub skip_boxed_slice_during_method_dispatch: bool, pub fundamental: bool, pub visibility: RawVisibility, /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore @@ -250,11 +252,20 @@ impl TraitData { let is_unsafe = tr_def.is_unsafe; let visibility = item_tree[tr_def.visibility].clone(); let attrs = item_tree.attrs(db, module_id.krate(), ModItem::from(tree_id.value).into()); - let skip_array_during_method_dispatch = - attrs.by_key("rustc_skip_array_during_method_dispatch").exists(); + let mut skip_array_during_method_dispatch = + attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists(); + let mut skip_boxed_slice_during_method_dispatch = false; + for tt in attrs.by_key(&sym::rustc_skip_during_method_dispatch).tt_values() { + for tt in tt.token_trees.iter() { + if let crate::tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt { + skip_array_during_method_dispatch |= ident.sym == sym::array; + skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice; + } + } + } let rustc_has_incoherent_inherent_impls = - attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); - let fundamental = attrs.by_key("fundamental").exists(); + attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists(); + let fundamental = attrs.by_key(&sym::fundamental).exists(); let mut collector = AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr)); collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items); @@ -269,6 +280,7 @@ impl TraitData { is_unsafe, visibility, skip_array_during_method_dispatch, + skip_boxed_slice_during_method_dispatch, rustc_has_incoherent_inherent_impls, fundamental, }), @@ -393,7 +405,7 @@ impl Macro2Data { let helpers = item_tree .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) - .by_key("rustc_builtin_macro") + .by_key(&sym::rustc_builtin_macro) .tt_values() .next() .and_then(|attr| parse_macro_name_and_helper_attrs(&attr.token_trees)) @@ -423,7 +435,7 @@ impl MacroRulesData { let macro_export = item_tree .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) - .by_key("macro_export") + .by_key(&sym::macro_export) .exists(); Arc::new(MacroRulesData { name: makro.name.clone(), macro_export }) @@ -485,7 +497,7 @@ impl ExternCrateDeclData { let name = extern_crate.name.clone(); let krate = loc.container.krate(); - let crate_id = if name == hir_expand::name![self] { + let crate_id = if name == sym::self_.clone() { Some(krate) } else { db.crate_def_map(krate) @@ -526,7 +538,7 @@ impl ConstData { let rustc_allow_incoherent_impl = item_tree .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into()) - .by_key("rustc_allow_incoherent_impl") + .by_key(&sym::rustc_allow_incoherent_impl) .exists(); Arc::new(ConstData { @@ -618,7 +630,8 @@ impl<'a> AssocItemCollector<'a> { if !attrs.is_cfg_enabled(self.expander.cfg_options()) { self.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id.local_id, - InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).erase()), + tree_id, + ModItem::from(item).into(), attrs.cfg().unwrap(), self.expander.cfg_options().clone(), )); @@ -644,22 +657,18 @@ impl<'a> AssocItemCollector<'a> { // crate failed), skip expansion like we would if it was // disabled. This is analogous to the handling in // `DefCollector::collect_macros`. - if exp.is_dummy() { - self.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + if let Some(err) = exp.as_expand_error(self.module_id.krate) { + self.diagnostics.push(DefDiagnostic::macro_error( self.module_id.local_id, - loc.kind, - loc.def.krate, + ast_id, + (*attr.path).clone(), + err, )); - - continue 'attrs; - } - if exp.is_disabled() { continue 'attrs; } } self.macro_calls.push((ast_id, call_id)); - let res = self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id); self.collect_macro_items(res); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs index 0fe73418e51..a70710e565c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs @@ -5,24 +5,20 @@ use bitflags::bitflags; use cfg::CfgOptions; use either::Either; -use hir_expand::{ - name::{AsName, Name}, - HirFileId, InFile, -}; -use intern::Interned; +use hir_expand::name::Name; +use intern::{sym, Interned}; use la_arena::Arena; use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; -use syntax::ast::{self, HasName, HasVisibility}; use triomphe::Arc; use crate::{ builtin_type::{BuiltinInt, BuiltinUint}, db::DefDatabase, - item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, + item_tree::{ + AttrOwner, Field, FieldParent, FieldsShape, ItemTree, ModItem, RawVisibilityId, TreeId, + }, lang_item::LangItem, - lower::LowerCtx, nameres::diagnostics::{DefDiagnostic, DefDiagnostics}, - trace::Trace, tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}, type_ref::TypeRef, visibility::RawVisibility, @@ -95,7 +91,7 @@ fn repr_from_value( item_tree: &ItemTree, of: AttrOwner, ) -> Option<ReprOptions> { - item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt) + item_tree.attrs(db, krate, of).by_key(&sym::repr).tt_values().find_map(parse_repr_tt) } fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> { @@ -112,12 +108,12 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> { let mut tts = tt.token_trees.iter().peekable(); while let Some(tt) = tts.next() { if let TokenTree::Leaf(Leaf::Ident(ident)) = tt { - flags.insert(match &*ident.text { - "packed" => { + flags.insert(match &ident.sym { + s if *s == sym::packed => { let pack = if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { - lit.text.parse().unwrap_or_default() + lit.symbol.as_str().parse().unwrap_or_default() } else { 0 } @@ -129,11 +125,11 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> { Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); ReprFlags::empty() } - "align" => { + s if *s == sym::align => { if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { - if let Ok(align) = lit.text.parse() { + if let Ok(align) = lit.symbol.as_str().parse() { let align = Align::from_bytes(align).ok(); max_align = max_align.max(align); } @@ -141,13 +137,13 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> { } ReprFlags::empty() } - "C" => ReprFlags::IS_C, - "transparent" => ReprFlags::IS_TRANSPARENT, - "simd" => ReprFlags::IS_SIMD, + s if *s == sym::C => ReprFlags::IS_C, + s if *s == sym::transparent => ReprFlags::IS_TRANSPARENT, + s if *s == sym::simd => ReprFlags::IS_SIMD, repr => { - if let Some(builtin) = BuiltinInt::from_suffix(repr) + if let Some(builtin) = BuiltinInt::from_suffix_sym(repr) .map(Either::Left) - .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) + .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right)) { int = Some(match builtin { Either::Left(bi) => match bi { @@ -194,10 +190,10 @@ impl StructData { let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); let mut flags = StructFlags::NO_FLAGS; - if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() { + if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL; } - if attrs.by_key("fundamental").exists() { + if attrs.by_key(&sym::fundamental).exists() { flags |= StructFlags::IS_FUNDAMENTAL; } if let Some(lang) = attrs.lang_item() { @@ -211,20 +207,25 @@ impl StructData { } let strukt = &item_tree[loc.id.value]; - let (variant_data, diagnostics) = lower_fields( + let (data, diagnostics) = lower_fields( db, krate, - loc.id.file_id(), loc.container.local_id, + loc.id.tree_id(), &item_tree, &db.crate_graph()[krate].cfg_options, + FieldParent::Struct(loc.id.value), &strukt.fields, None, ); ( Arc::new(StructData { name: strukt.name.clone(), - variant_data: Arc::new(variant_data), + variant_data: Arc::new(match strukt.shape { + FieldsShape::Record => VariantData::Record(data), + FieldsShape::Tuple => VariantData::Tuple(data), + FieldsShape::Unit => VariantData::Unit, + }), repr, visibility: item_tree[strukt.visibility].clone(), flags, @@ -248,28 +249,29 @@ impl StructData { let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); let mut flags = StructFlags::NO_FLAGS; - if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() { + if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL; } - if attrs.by_key("fundamental").exists() { + if attrs.by_key(&sym::fundamental).exists() { flags |= StructFlags::IS_FUNDAMENTAL; } let union = &item_tree[loc.id.value]; - let (variant_data, diagnostics) = lower_fields( + let (data, diagnostics) = lower_fields( db, krate, - loc.id.file_id(), loc.container.local_id, + loc.id.tree_id(), &item_tree, &db.crate_graph()[krate].cfg_options, + FieldParent::Union(loc.id.value), &union.fields, None, ); ( Arc::new(StructData { name: union.name.clone(), - variant_data: Arc::new(variant_data), + variant_data: Arc::new(VariantData::Record(data)), repr, visibility: item_tree[union.visibility].clone(), flags, @@ -287,7 +289,7 @@ impl EnumData { let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let rustc_has_incoherent_inherent_impls = item_tree .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) - .by_key("rustc_has_incoherent_inherent_impls") + .by_key(&sym::rustc_has_incoherent_inherent_impls) .exists(); let enum_ = &item_tree[loc.id.value]; @@ -336,13 +338,14 @@ impl EnumVariantData { let item_tree = loc.id.item_tree(db); let variant = &item_tree[loc.id.value]; - let (var_data, diagnostics) = lower_fields( + let (data, diagnostics) = lower_fields( db, krate, - loc.id.file_id(), container.local_id, + loc.id.tree_id(), &item_tree, &db.crate_graph()[krate].cfg_options, + FieldParent::Variant(loc.id.value), &variant.fields, Some(item_tree[loc.parent.lookup(db).id.value].visibility), ); @@ -350,7 +353,11 @@ impl EnumVariantData { ( Arc::new(EnumVariantData { name: variant.name.clone(), - variant_data: Arc::new(var_data), + variant_data: Arc::new(match variant.shape { + FieldsShape::Record => VariantData::Record(data), + FieldsShape::Tuple => VariantData::Tuple(data), + FieldsShape::Unit => VariantData::Unit, + }), }), DefDiagnostics::new(diagnostics), ) @@ -396,123 +403,35 @@ pub enum StructKind { Unit, } -pub(crate) fn lower_struct( - db: &dyn DefDatabase, - trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>, - ast: &InFile<ast::StructKind>, - krate: CrateId, - item_tree: &ItemTree, - fields: &Fields, -) -> StructKind { - let ctx = LowerCtx::new(db, ast.file_id); - - match (&ast.value, fields) { - (ast::StructKind::Tuple(fl), Fields::Tuple(fields)) => { - let cfg_options = &db.crate_graph()[krate].cfg_options; - for ((i, fd), item_tree_id) in fl.fields().enumerate().zip(fields.clone()) { - if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) { - continue; - } - - trace.alloc( - || Either::Left(fd.clone()), - || FieldData { - name: Name::new_tuple_field(i), - type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())), - visibility: RawVisibility::from_ast(db, fd.visibility(), &mut |range| { - ctx.span_map().span_for_range(range).ctx - }), - }, - ); - } - StructKind::Tuple - } - (ast::StructKind::Record(fl), Fields::Record(fields)) => { - let cfg_options = &db.crate_graph()[krate].cfg_options; - for (fd, item_tree_id) in fl.fields().zip(fields.clone()) { - if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) { - continue; - } - - trace.alloc( - || Either::Right(fd.clone()), - || FieldData { - name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing), - type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())), - visibility: RawVisibility::from_ast(db, fd.visibility(), &mut |range| { - ctx.span_map().span_for_range(range).ctx - }), - }, - ); - } - StructKind::Record - } - _ => StructKind::Unit, - } -} - fn lower_fields( db: &dyn DefDatabase, krate: CrateId, - current_file_id: HirFileId, container: LocalModuleId, + tree_id: TreeId, item_tree: &ItemTree, cfg_options: &CfgOptions, - fields: &Fields, + parent: FieldParent, + fields: &[Field], override_visibility: Option<RawVisibilityId>, -) -> (VariantData, Vec<DefDiagnostic>) { +) -> (Arena<FieldData>, Vec<DefDiagnostic>) { let mut diagnostics = Vec::new(); - match fields { - Fields::Record(flds) => { - let mut arena = Arena::new(); - for field_id in flds.clone() { - let attrs = item_tree.attrs(db, krate, field_id.into()); - let field = &item_tree[field_id]; - if attrs.is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, field, override_visibility)); - } else { - diagnostics.push(DefDiagnostic::unconfigured_code( - container, - InFile::new( - current_file_id, - match field.ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }, - ), - attrs.cfg().unwrap(), - cfg_options.clone(), - )) - } - } - (VariantData::Record(arena), diagnostics) - } - Fields::Tuple(flds) => { - let mut arena = Arena::new(); - for field_id in flds.clone() { - let attrs = item_tree.attrs(db, krate, field_id.into()); - let field = &item_tree[field_id]; - if attrs.is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, field, override_visibility)); - } else { - diagnostics.push(DefDiagnostic::unconfigured_code( - container, - InFile::new( - current_file_id, - match field.ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }, - ), - attrs.cfg().unwrap(), - cfg_options.clone(), - )) - } - } - (VariantData::Tuple(arena), diagnostics) + let mut arena = Arena::new(); + for (idx, field) in fields.iter().enumerate() { + let attr_owner = AttrOwner::make_field_indexed(parent, idx); + let attrs = item_tree.attrs(db, krate, attr_owner); + if attrs.is_cfg_enabled(cfg_options) { + arena.alloc(lower_field(item_tree, field, override_visibility)); + } else { + diagnostics.push(DefDiagnostic::unconfigured_code( + container, + tree_id, + attr_owner, + attrs.cfg().unwrap(), + cfg_options.clone(), + )) } - Fields::Unit => (VariantData::Unit, diagnostics), } + (arena, diagnostics) } fn lower_field( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 0eb9e7d30b2..56feb0163e1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -1,10 +1,10 @@ //! Defines database & queries for name resolution. -use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast}; +use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use either::Either; use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; -use intern::Interned; +use intern::{sym, Interned}; use la_arena::ArenaMap; -use span::MacroCallId; +use span::{EditionedFileId, MacroCallId}; use syntax::{ast, AstPtr}; use triomphe::Arc; @@ -177,6 +177,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba // endregion:data #[salsa::invoke(Body::body_with_source_map_query)] + #[salsa::lru] fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>); #[salsa::invoke(Body::body_query)] @@ -239,11 +240,14 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; - fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>; + fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>; } // return: macro call id and include file id -fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> { +fn include_macro_invoc( + db: &dyn DefDatabase, + krate: CrateId, +) -> Vec<(MacroCallId, EditionedFileId)> { db.crate_def_map(krate) .modules .values() @@ -257,13 +261,13 @@ fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId } fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { - let file = db.crate_graph()[crate_id].root_file_id; + let file = db.crate_graph()[crate_id].root_file_id(); let item_tree = db.file_item_tree(file.into()); let attrs = item_tree.raw_attrs(AttrOwner::TopLevel); for attr in &**attrs { - match attr.path().as_ident().and_then(|id| id.as_text()) { - Some(ident) if ident == "no_std" => return true, - Some(ident) if ident == "cfg_attr" => {} + match attr.path().as_ident() { + Some(ident) if *ident == sym::no_std.clone() => return true, + Some(ident) if *ident == sym::cfg_attr.clone() => {} _ => continue, } @@ -278,7 +282,7 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { tt.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',')); for output in segments.skip(1) { match output { - [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => { + [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => { return true } _ => {} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs index dbf8e6b225c..6d8b4445f75 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs @@ -6,10 +6,11 @@ use base_db::CrateId; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ - attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId, - InFile, MacroCallId, + attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind, + ExpandResult, HirFileId, InFile, Lookup, MacroCallId, }; use limit::Limit; +use span::SyntaxContextId; use syntax::{ast, Parse}; use triomphe::Arc; @@ -52,6 +53,11 @@ impl Expander { self.module.krate } + pub fn syntax_context(&self) -> SyntaxContextId { + // FIXME: + SyntaxContextId::ROOT + } + pub fn enter_expand<T: ast::AstNode>( &mut self, db: &dyn DefDatabase, @@ -154,26 +160,30 @@ impl Expander { // so don't return overflow error here to avoid diagnostics duplication. cov_mark::hit!(overflow_but_not_me); return ExpandResult::ok(None); - } else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { - self.recursion_depth = u32::MAX; - cov_mark::hit!(your_stack_belongs_to_me); - return ExpandResult::only_err(ExpandError::RecursionOverflow); } let ExpandResult { value, err } = op(self); let Some(call_id) = value else { return ExpandResult { value: None, err }; }; + if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { + self.recursion_depth = u32::MAX; + cov_mark::hit!(your_stack_belongs_to_me); + return ExpandResult::only_err(ExpandError::new( + db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2, + ExpandErrorKind::RecursionOverflow, + )); + } let macro_file = call_id.as_macro_file(); let res = db.parse_macro_expansion(macro_file); let err = err.or(res.err); ExpandResult { - value: match err { + value: match &err { // If proc-macro is disabled or unresolved, we want to expand to a missing expression // instead of an empty tree which might end up in an empty block. - Some(ExpandError::UnresolvedProcMacro(_)) => None, + Some(e) if matches!(e.kind(), ExpandErrorKind::MissingProcMacroExpander(_)) => None, _ => (|| { let parse = res.value.0.cast::<T>()?; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index 9a3c0495414..91594aecd04 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -2,10 +2,12 @@ use std::{cell::Cell, cmp::Ordering, iter}; +use base_db::{CrateId, CrateOrigin, LangCrateOrigin}; use hir_expand::{ - name::{known, AsName, Name}, + name::{AsName, Name}, Lookup, }; +use intern::sym; use rustc_hash::FxHashSet; use crate::{ @@ -36,7 +38,8 @@ pub fn find_path( // within block modules, forcing a `self` or `crate` prefix will not allow using inner items, so // default to plain paths. - if item.module(db).is_some_and(ModuleId::is_within_block) { + let item_module = item.module(db)?; + if item_module.is_within_block() { prefix_kind = PrefixKind::Plain; } cfg.prefer_no_std = cfg.prefer_no_std || db.crate_supports_no_std(from.krate()); @@ -53,23 +56,17 @@ pub fn find_path( }, item, MAX_PATH_LEN, + db.crate_graph()[item_module.krate()].origin.is_lang(), ) } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] enum Stability { Unstable, Stable, } use Stability::*; -fn zip_stability(a: Stability, b: Stability) -> Stability { - match (a, b) { - (Stable, Stable) => Stable, - _ => Unstable, - } -} - const MAX_PATH_LEN: usize = 15; const FIND_PATH_FUEL: usize = 10000; @@ -107,12 +104,18 @@ struct FindPathCtx<'db> { } /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId -fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option<ModPath> { +fn find_path_inner( + ctx: &FindPathCtx<'_>, + item: ItemInNs, + max_len: usize, + is_std_item: bool, +) -> Option<ModPath> { // - if the item is a module, jump straight to module search - if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { - let mut visited_modules = FxHashSet::default(); - return find_path_for_module(ctx, &mut visited_modules, module_id, max_len) - .map(|(item, _)| item); + if !is_std_item { + if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { + return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) + .map(|choice| choice.path); + } } let may_be_in_scope = match ctx.prefix { @@ -130,14 +133,17 @@ fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Opt // - if the item is in the prelude, return the name from there if let Some(value) = find_in_prelude(ctx.db, ctx.from_def_map, item, ctx.from) { - return Some(value); + return Some(value.path); } if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { // - if the item is an enum variant, refer to it via the enum - if let Some(mut path) = - find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len) - { + if let Some(mut path) = find_path_inner( + ctx, + ItemInNs::Types(variant.lookup(ctx.db).parent.into()), + max_len, + is_std_item, + ) { path.push_segment(ctx.db.enum_variant_data(variant).name.clone()); return Some(path); } @@ -146,22 +152,42 @@ fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Opt // variant somewhere } - let mut visited_modules = FxHashSet::default(); + if is_std_item { + // The item we are searching for comes from the sysroot libraries, so skip prefer looking in + // the sysroot libraries directly. + // We do need to fallback as the item in question could be re-exported by another crate + // while not being a transitive dependency of the current crate. + if let Some(choice) = find_in_sysroot(ctx, &mut FxHashSet::default(), item, max_len) { + return Some(choice.path); + } + } - calculate_best_path(ctx, &mut visited_modules, item, max_len).map(|(item, _)| item) + let mut best_choice = None; + calculate_best_path(ctx, &mut FxHashSet::default(), item, max_len, &mut best_choice); + best_choice.map(|choice| choice.path) } #[tracing::instrument(skip_all)] fn find_path_for_module( ctx: &FindPathCtx<'_>, - visited_modules: &mut FxHashSet<ModuleId>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, module_id: ModuleId, + maybe_extern: bool, max_len: usize, -) -> Option<(ModPath, Stability)> { +) -> Option<Choice> { + if max_len == 0 { + // recursive base case, we can't find a path of length 0 + return None; + } if let Some(crate_root) = module_id.as_crate_root() { - if crate_root == ctx.from.derive_crate_root() { + if !maybe_extern || crate_root == ctx.from.derive_crate_root() { // - if the item is the crate root, return `crate` - return Some((ModPath::from_segments(PathKind::Crate, None), Stable)); + return Some(Choice { + path: ModPath::from_segments(PathKind::Crate, None), + path_text_len: 5, + stability: Stable, + prefer_due_to_prelude: false, + }); } // - otherwise if the item is the crate root of a dependency crate, return the name from the extern prelude @@ -188,7 +214,7 @@ fn find_path_for_module( } else { PathKind::Plain }; - return Some((ModPath::from_segments(kind, iter::once(name.clone())), Stable)); + return Some(Choice::new(ctx.cfg.prefer_prelude, kind, name.clone(), Stable)); } } @@ -206,27 +232,39 @@ fn find_path_for_module( ); if let Some(scope_name) = scope_name { // - if the item is already in scope, return the name under which it is - return Some(( - ModPath::from_segments(ctx.prefix.path_kind(), iter::once(scope_name)), + return Some(Choice::new( + ctx.cfg.prefer_prelude, + ctx.prefix.path_kind(), + scope_name, Stable, )); } } // - if the module can be referenced as self, super or crate, do that - if let Some(mod_path) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) { - if ctx.prefix != PrefixKind::ByCrate || mod_path.kind == PathKind::Crate { - return Some((mod_path, Stable)); + if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) { + if ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate { + return Some(Choice { + path: ModPath::from_segments(kind, None), + path_text_len: path_kind_len(kind), + stability: Stable, + prefer_due_to_prelude: false, + }); } } // - if the module is in the prelude, return it by that path - if let Some(mod_path) = - find_in_prelude(ctx.db, ctx.from_def_map, ItemInNs::Types(module_id.into()), ctx.from) - { - return Some((mod_path, Stable)); + let item = ItemInNs::Types(module_id.into()); + if let Some(choice) = find_in_prelude(ctx.db, ctx.from_def_map, item, ctx.from) { + return Some(choice); + } + let mut best_choice = None; + if maybe_extern { + calculate_best_path(ctx, visited_modules, item, max_len, &mut best_choice); + } else { + calculate_best_path_local(ctx, visited_modules, item, max_len, &mut best_choice); } - calculate_best_path(ctx, visited_modules, ItemInNs::Types(module_id.into()), max_len) + best_choice } fn find_in_scope( @@ -251,7 +289,7 @@ fn find_in_prelude( local_def_map: &DefMap, item: ItemInNs, from: ModuleId, -) -> Option<ModPath> { +) -> Option<Choice> { let (prelude_module, _) = local_def_map.prelude()?; let prelude_def_map = prelude_module.def_map(db); let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; @@ -273,7 +311,7 @@ fn find_in_prelude( }); if found_and_same_def.unwrap_or(true) { - Some(ModPath::from_segments(PathKind::Plain, iter::once(name.clone()))) + Some(Choice::new(false, PathKind::Plain, name.clone(), Stable)) } else { None } @@ -283,7 +321,7 @@ fn is_kw_kind_relative_to_from( def_map: &DefMap, item: ModuleId, from: ModuleId, -) -> Option<ModPath> { +) -> Option<PathKind> { if item.krate != from.krate || item.is_within_block() || from.is_within_block() { return None; } @@ -291,14 +329,11 @@ fn is_kw_kind_relative_to_from( let from = from.local_id; if item == from { // - if the item is the module we're in, use `self` - Some(ModPath::from_segments(PathKind::SELF, None)) + Some(PathKind::SELF) } else if let Some(parent_id) = def_map[from].parent { if item == parent_id { // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly) - Some(ModPath::from_segments( - if parent_id == DefMap::ROOT { PathKind::Crate } else { PathKind::Super(1) }, - None, - )) + Some(if parent_id == DefMap::ROOT { PathKind::Crate } else { PathKind::Super(1) }) } else { None } @@ -310,15 +345,11 @@ fn is_kw_kind_relative_to_from( #[tracing::instrument(skip_all)] fn calculate_best_path( ctx: &FindPathCtx<'_>, - visited_modules: &mut FxHashSet<ModuleId>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, item: ItemInNs, max_len: usize, -) -> Option<(ModPath, Stability)> { - if max_len <= 1 { - // recursive base case, we can't find a path prefix of length 0, one segment is occupied by - // the item's name itself. - return None; - } + best_choice: &mut Option<Choice>, +) { let fuel = ctx.fuel.get(); if fuel == 0 { // we ran out of fuel, so we stop searching here @@ -327,144 +358,208 @@ fn calculate_best_path( item.krate(ctx.db), ctx.from.krate() ); - return None; + return; } ctx.fuel.set(fuel - 1); - let mut best_path = None; - let mut best_path_len = max_len; - let mut process = |mut path: (ModPath, Stability), name, best_path_len: &mut _| { - path.0.push_segment(name); - let new_path = match best_path.take() { - Some(best_path) => select_best_path(best_path, path, ctx.cfg), - None => path, - }; - if new_path.1 == Stable { - *best_path_len = new_path.0.len(); - } - match &mut best_path { - Some((old_path, old_stability)) => { - *old_path = new_path.0; - *old_stability = zip_stability(*old_stability, new_path.1); - } - None => best_path = Some(new_path), - } - }; - let db = ctx.db; - if item.krate(db) == Some(ctx.from.krate) { + if item.krate(ctx.db) == Some(ctx.from.krate) { // Item was defined in the same crate that wants to import it. It cannot be found in any // dependency in this case. - // FIXME: cache the `find_local_import_locations` output? - find_local_import_locations(db, item, ctx.from, ctx.from_def_map, |name, module_id| { - if !visited_modules.insert(module_id) { - return; - } - // we are looking for paths of length up to best_path_len, any longer will make it be - // less optimal. The -1 is due to us pushing name onto it afterwards. - if let Some(path) = - find_path_for_module(ctx, visited_modules, module_id, best_path_len - 1) - { - process(path, name.clone(), &mut best_path_len); - } - }) + calculate_best_path_local(ctx, visited_modules, item, max_len, best_choice) } else { // Item was defined in some upstream crate. This means that it must be exported from one, // too (unless we can't name it at all). It could *also* be (re)exported by the same crate // that wants to import it here, but we always prefer to use the external path here. - for dep in &db.crate_graph()[ctx.from.krate].dependencies { - let import_map = db.import_map(dep.crate_id); - let Some(import_info_for) = import_map.import_info_for(item) else { continue }; - for info in import_info_for { - if info.is_doc_hidden { - // the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate - continue; - } - - // Determine best path for containing module and append last segment from `info`. - // FIXME: we should guide this to look up the path locally, or from the same crate again? - let path = - find_path_for_module(ctx, visited_modules, info.container, best_path_len - 1); - let Some((path, path_stability)) = path else { - continue; - }; - cov_mark::hit!(partially_imported); - let path = ( - path, - zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }), - ); + ctx.db.crate_graph()[ctx.from.krate].dependencies.iter().for_each(|dep| { + find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id) + }); + } +} - process(path, info.name.clone(), &mut best_path_len); +fn find_in_sysroot( + ctx: &FindPathCtx<'_>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + item: ItemInNs, + max_len: usize, +) -> Option<Choice> { + let crate_graph = ctx.db.crate_graph(); + let dependencies = &crate_graph[ctx.from.krate].dependencies; + let mut best_choice = None; + let mut search = |lang, best_choice: &mut _| { + if let Some(dep) = dependencies.iter().filter(|it| it.is_sysroot()).find(|dep| { + match crate_graph[dep.crate_id].origin { + CrateOrigin::Lang(l) => l == lang, + _ => false, } + }) { + find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id); + } + }; + if ctx.cfg.prefer_no_std { + search(LangCrateOrigin::Core, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; + } + search(LangCrateOrigin::Std, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; + } + } else { + search(LangCrateOrigin::Std, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; + } + search(LangCrateOrigin::Core, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; } } - best_path + let mut best_choice = None; + dependencies.iter().filter(|it| it.is_sysroot()).for_each(|dep| { + find_in_dep(ctx, visited_modules, item, max_len, &mut best_choice, dep.crate_id); + }); + best_choice } -/// Select the best (most relevant) path between two paths. -/// This accounts for stability, path length whether, std should be chosen over alloc/core paths as -/// well as ignoring prelude like paths or not. -fn select_best_path( - old_path @ (_, old_stability): (ModPath, Stability), - new_path @ (_, new_stability): (ModPath, Stability), - cfg: ImportPathConfig, -) -> (ModPath, Stability) { - match (old_stability, new_stability) { - (Stable, Unstable) => return old_path, - (Unstable, Stable) => return new_path, - _ => {} - } - const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; - - let choose = |new: (ModPath, _), old: (ModPath, _)| { - let (new_path, _) = &new; - let (old_path, _) = &old; - let new_has_prelude = new_path.segments().iter().any(|seg| seg == &known::prelude); - let old_has_prelude = old_path.segments().iter().any(|seg| seg == &known::prelude); - match (new_has_prelude, old_has_prelude, cfg.prefer_prelude) { - (true, false, true) | (false, true, false) => new, - (true, false, false) | (false, true, true) => old, - // no prelude difference in the paths, so pick the shorter one - (true, true, _) | (false, false, _) => { - let new_path_is_shorter = new_path - .len() - .cmp(&old_path.len()) - .then_with(|| new_path.textual_len().cmp(&old_path.textual_len())) - .is_lt(); - if new_path_is_shorter { - new - } else { - old - } +fn find_in_dep( + ctx: &FindPathCtx<'_>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + item: ItemInNs, + max_len: usize, + best_choice: &mut Option<Choice>, + dep: CrateId, +) { + let import_map = ctx.db.import_map(dep); + let Some(import_info_for) = import_map.import_info_for(item) else { + return; + }; + for info in import_info_for { + if info.is_doc_hidden { + // the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate + continue; + } + + // Determine best path for containing module and append last segment from `info`. + // FIXME: we should guide this to look up the path locally, or from the same crate again? + let choice = find_path_for_module( + ctx, + visited_modules, + info.container, + true, + best_choice.as_ref().map_or(max_len, |it| it.path.len()) - 1, + ); + let Some(mut choice) = choice else { + continue; + }; + cov_mark::hit!(partially_imported); + if info.is_unstable { + choice.stability = Unstable; + } + + Choice::try_select(best_choice, choice, ctx.cfg.prefer_prelude, info.name.clone()); + } +} + +fn calculate_best_path_local( + ctx: &FindPathCtx<'_>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + item: ItemInNs, + max_len: usize, + best_choice: &mut Option<Choice>, +) { + // FIXME: cache the `find_local_import_locations` output? + find_local_import_locations( + ctx.db, + item, + ctx.from, + ctx.from_def_map, + visited_modules, + |visited_modules, name, module_id| { + // we are looking for paths of length up to best_path_len, any longer will make it be + // less optimal. The -1 is due to us pushing name onto it afterwards. + if let Some(choice) = find_path_for_module( + ctx, + visited_modules, + module_id, + false, + best_choice.as_ref().map_or(max_len, |it| it.path.len()) - 1, + ) { + Choice::try_select(best_choice, choice, ctx.cfg.prefer_prelude, name.clone()); } + }, + ); +} + +struct Choice { + path: ModPath, + /// The length in characters of the path + path_text_len: usize, + /// The stability of the path + stability: Stability, + /// Whether this path contains a prelude segment and preference for it has been signaled + prefer_due_to_prelude: bool, +} + +impl Choice { + fn new(prefer_prelude: bool, kind: PathKind, name: Name, stability: Stability) -> Self { + Self { + path_text_len: path_kind_len(kind) + name.as_str().len(), + stability, + prefer_due_to_prelude: prefer_prelude && name == sym::prelude, + path: ModPath::from_segments(kind, iter::once(name)), } - }; + } - match (old_path.0.segments().first(), new_path.0.segments().first()) { - (Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => { - let rank = match cfg.prefer_no_std { - false => |name: &Name| match name { - name if name == &known::core => 0, - name if name == &known::alloc => 1, - name if name == &known::std => 2, - _ => unreachable!(), - }, - true => |name: &Name| match name { - name if name == &known::core => 2, - name if name == &known::alloc => 1, - name if name == &known::std => 0, - _ => unreachable!(), - }, - }; - let nrank = rank(new); - let orank = rank(old); - match nrank.cmp(&orank) { - Ordering::Less => old_path, - Ordering::Equal => choose(new_path, old_path), - Ordering::Greater => new_path, + fn push(mut self, prefer_prelude: bool, name: Name) -> Self { + self.path_text_len += name.as_str().len(); + self.prefer_due_to_prelude |= prefer_prelude && name == sym::prelude; + self.path.push_segment(name); + self + } + + fn try_select( + current: &mut Option<Choice>, + mut other: Choice, + prefer_prelude: bool, + name: Name, + ) { + let Some(current) = current else { + *current = Some(other.push(prefer_prelude, name)); + return; + }; + match other + .stability + .cmp(¤t.stability) + .then_with(|| other.prefer_due_to_prelude.cmp(¤t.prefer_due_to_prelude)) + .then_with(|| (current.path.len()).cmp(&(other.path.len() + 1))) + { + Ordering::Less => return, + Ordering::Equal => { + other.path_text_len += name.as_str().len(); + if let Ordering::Less | Ordering::Equal = + current.path_text_len.cmp(&other.path_text_len) + { + return; + } + } + Ordering::Greater => { + other.path_text_len += name.as_str().len(); } } - _ => choose(new_path, old_path), + other.path.push_segment(name); + *current = other; + } +} + +fn path_kind_len(kind: PathKind) -> usize { + match kind { + PathKind::Plain => 0, + PathKind::Super(0) => 4, + PathKind::Super(s) => s as usize * 5, + PathKind::Crate => 5, + PathKind::Abs => 2, + PathKind::DollarCrate(_) => 0, } } @@ -474,7 +569,8 @@ fn find_local_import_locations( item: ItemInNs, from: ModuleId, def_map: &DefMap, - mut cb: impl FnMut(&Name, ModuleId), + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + mut cb: impl FnMut(&mut FxHashSet<(ItemInNs, ModuleId)>, &Name, ModuleId), ) { let _p = tracing::info_span!("find_local_import_locations").entered(); @@ -487,32 +583,29 @@ fn find_local_import_locations( let mut worklist = def_map[from.local_id] .children .values() - .map(|child| def_map.module_id(*child)) - // FIXME: do we need to traverse out of block expressions here? + .map(|&child| def_map.module_id(child)) .chain(iter::successors(from.containing_module(db), |m| m.containing_module(db))) + .zip(iter::repeat(false)) .collect::<Vec<_>>(); - let mut seen: FxHashSet<_> = FxHashSet::default(); let def_map = def_map.crate_root().def_map(db); - - while let Some(module) = worklist.pop() { - if !seen.insert(module) { - continue; // already processed this module + let mut block_def_map; + let mut cursor = 0; + + while let Some(&mut (module, ref mut processed)) = worklist.get_mut(cursor) { + cursor += 1; + if !visited_modules.insert((item, module)) { + // already processed this module + continue; } - let ext_def_map; - let data = if module.krate == from.krate { - if module.block.is_some() { - // Re-query the block's DefMap - ext_def_map = module.def_map(db); - &ext_def_map[module.local_id] - } else { - // Reuse the root DefMap - &def_map[module.local_id] - } + *processed = true; + let data = if module.block.is_some() { + // Re-query the block's DefMap + block_def_map = module.def_map(db); + &block_def_map[module.local_id] } else { - // The crate might reexport a module defined in another crate. - ext_def_map = module.def_map(db); - &ext_def_map[module.local_id] + // Reuse the root DefMap + &def_map[module.local_id] }; if let Some((name, vis, declared)) = data.scope.name_of(item) { @@ -535,18 +628,30 @@ fn find_local_import_locations( // the item and we're a submodule of it, so can we. // Also this keeps the cached data smaller. if declared || is_pub_or_explicit { - cb(name, module); + cb(visited_modules, name, module); } } } // Descend into all modules visible from `from`. for (module, vis) in data.scope.modules_in_scope() { + if module.krate != from.krate { + // We don't need to look at modules from other crates as our item has to be in the + // current crate + continue; + } + if visited_modules.contains(&(item, module)) { + continue; + } + if vis.is_visible_from(db, from) { - worklist.push(module); + worklist.push((module, false)); } } } + worklist.into_iter().filter(|&(_, processed)| processed).for_each(|(module, _)| { + visited_modules.remove(&(item, module)); + }); } #[cfg(test)] @@ -1514,8 +1619,6 @@ fn main() { #[test] fn from_inside_module() { - // This worked correctly, but the test suite logic was broken. - cov_mark::check!(submodule_in_testdb); check_found_path( r#" mod baz { @@ -1541,6 +1644,35 @@ mod bar { } #[test] + fn from_inside_module2() { + check_found_path( + r#" +mod qux { + pub mod baz { + pub struct Foo {} + } + + mod bar { + fn bar() { + $0; + } + } +} + + "#, + "crate::qux::baz::Foo", + expect![[r#" + Plain (imports ✔): super::baz::Foo + Plain (imports ✖): super::baz::Foo + ByCrate(imports ✔): crate::qux::baz::Foo + ByCrate(imports ✖): crate::qux::baz::Foo + BySelf (imports ✔): super::baz::Foo + BySelf (imports ✖): super::baz::Foo + "#]], + ) + } + + #[test] fn from_inside_module_with_inner_items() { check_found_path( r#" @@ -1915,4 +2047,34 @@ pub fn c() {} "#]], ); } + + #[test] + fn prefer_long_std_over_short_extern() { + check_found_path( + r#" +//- /lib.rs crate:main deps:futures_lite,std,core +$0 +//- /futures_lite.rs crate:futures_lite deps:std,core +pub use crate::future::Future; +pub mod future { + pub use core::future::Future; +} +//- /std.rs crate:std deps:core +pub use core::future; +//- /core.rs crate:core +pub mod future { + pub trait Future {} +} +"#, + "core::future::Future", + expect![[r#" + Plain (imports ✔): std::future::Future + Plain (imports ✖): std::future::Future + ByCrate(imports ✔): std::future::Future + ByCrate(imports ✖): std::future::Future + BySelf (imports ✔): std::future::Future + BySelf (imports ✖): std::future::Future + "#]], + ); + } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs index d306f9be657..86fd092603f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs @@ -18,10 +18,9 @@ pub mod type_ref; use std::fmt; use hir_expand::name::Name; -use intern::Interned; +use intern::{Interned, Symbol}; use la_arena::{Idx, RawIdx}; use rustc_apfloat::ieee::{Half as f16, Quad as f128}; -use smallvec::SmallVec; use syntax::ast; use crate::{ @@ -60,41 +59,41 @@ pub type LabelId = Idx<Label>; // We leave float values as a string to avoid double rounding. // For PartialEq, string comparison should work, as ordering is not important // https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360 -#[derive(Default, Debug, Clone, Eq, PartialEq)] -pub struct FloatTypeWrapper(Box<str>); +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct FloatTypeWrapper(Symbol); // FIXME(#17451): Use builtin types once stabilised. impl FloatTypeWrapper { - pub fn new(value: String) -> Self { - Self(value.into()) + pub fn new(sym: Symbol) -> Self { + Self(sym) } pub fn to_f128(&self) -> f128 { - self.0.parse().unwrap_or_default() + self.0.as_str().parse().unwrap_or_default() } pub fn to_f64(&self) -> f64 { - self.0.parse().unwrap_or_default() + self.0.as_str().parse().unwrap_or_default() } pub fn to_f32(&self) -> f32 { - self.0.parse().unwrap_or_default() + self.0.as_str().parse().unwrap_or_default() } pub fn to_f16(&self) -> f16 { - self.0.parse().unwrap_or_default() + self.0.as_str().parse().unwrap_or_default() } } impl fmt::Display for FloatTypeWrapper { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.0) + f.write_str(self.0.as_str()) } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum Literal { - String(Box<str>), + String(Symbol), ByteString(Box<[u8]>), CString(Box<[u8]>), Char(char), @@ -130,7 +129,10 @@ impl From<ast::LiteralKind> for Literal { match ast_lit_kind { LiteralKind::IntNumber(lit) => { if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { - Literal::Float(FloatTypeWrapper::new(lit.value_string()), builtin) + Literal::Float( + FloatTypeWrapper::new(Symbol::intern(&lit.value_string())), + builtin, + ) } else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinUint::from_suffix) { Literal::Uint(lit.value().unwrap_or(0), builtin) } else { @@ -140,14 +142,14 @@ impl From<ast::LiteralKind> for Literal { } LiteralKind::FloatNumber(lit) => { let ty = lit.suffix().and_then(BuiltinFloat::from_suffix); - Literal::Float(FloatTypeWrapper::new(lit.value_string()), ty) + Literal::Float(FloatTypeWrapper::new(Symbol::intern(&lit.value_string())), ty) } LiteralKind::ByteString(bs) => { let text = bs.value().map_or_else(|_| Default::default(), Box::from); Literal::ByteString(text) } LiteralKind::String(s) => { - let text = s.value().map_or_else(|_| Default::default(), Box::from); + let text = s.value().map_or_else(|_| Symbol::empty(), |it| Symbol::intern(&it)); Literal::String(text) } LiteralKind::CString(s) => { @@ -522,7 +524,6 @@ pub enum BindingProblems { pub struct Binding { pub name: Name, pub mode: BindingAnnotation, - pub definitions: SmallVec<[PatId; 1]>, pub problems: Option<BindingProblems>, } @@ -537,7 +538,7 @@ pub struct RecordFieldPat { pub enum Pat { Missing, Wild, - Tuple { args: Box<[PatId]>, ellipsis: Option<usize> }, + Tuple { args: Box<[PatId]>, ellipsis: Option<u32> }, Or(Box<[PatId]>), Record { path: Option<Box<Path>>, args: Box<[RecordFieldPat]>, ellipsis: bool }, Range { start: Option<Box<LiteralOrConst>>, end: Option<Box<LiteralOrConst>> }, @@ -545,7 +546,7 @@ pub enum Pat { Path(Box<Path>), Lit(ExprId), Bind { id: BindingId, subpat: Option<PatId> }, - TupleStruct { path: Option<Box<Path>>, args: Box<[PatId]>, ellipsis: Option<usize> }, + TupleStruct { path: Option<Box<Path>>, args: Box<[PatId]>, ellipsis: Option<u32> }, Ref { pat: PatId, mutability: Mutability }, Box { inner: PatId }, ConstBlock(ExprId), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs index de0fa982c86..390e7da677f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs @@ -1,12 +1,13 @@ //! Parses `format_args` input. -use std::mem; use hir_expand::name::Name; +use intern::Symbol; use rustc_parse_format as parse; +use span::SyntaxContextId; use stdx::TupleExt; use syntax::{ ast::{self, IsString}, - SmolStr, TextRange, TextSize, + TextRange, TextSize, }; use crate::hir::ExprId; @@ -28,7 +29,7 @@ pub struct FormatArguments { #[derive(Debug, Clone, PartialEq, Eq)] pub enum FormatArgsPiece { - Literal(Box<str>), + Literal(Symbol), Placeholder(FormatPlaceholder), } @@ -174,6 +175,7 @@ pub(crate) fn parse( is_direct_literal: bool, mut synth: impl FnMut(Name) -> ExprId, mut record_usage: impl FnMut(Name, Option<TextRange>), + call_ctx: SyntaxContextId, ) -> FormatArgs { let Ok(text) = s.value() else { return FormatArgs { @@ -248,7 +250,7 @@ pub(crate) fn parse( } } ArgRef::Name(name, span) => { - let name = Name::new_text_dont_use(SmolStr::new(name)); + let name = Name::new(name, tt::IdentIsRaw::No, call_ctx); if let Some((index, _)) = args.by_name(&name) { record_usage(name, span); // Name found in `args`, so we resolve it to its index. @@ -289,9 +291,8 @@ pub(crate) fn parse( parse::Piece::NextArgument(arg) => { let parse::Argument { position, position_span, format } = *arg; if !unfinished_literal.is_empty() { - template.push(FormatArgsPiece::Literal( - mem::take(&mut unfinished_literal).into_boxed_str(), - )); + template.push(FormatArgsPiece::Literal(Symbol::intern(&unfinished_literal))); + unfinished_literal.clear(); } let span = parser.arg_places.get(placeholder_index).and_then(|&s| to_span(s)); @@ -411,7 +412,7 @@ pub(crate) fn parse( } if !unfinished_literal.is_empty() { - template.push(FormatArgsPiece::Literal(unfinished_literal.into_boxed_str())); + template.push(FormatArgsPiece::Literal(Symbol::intern(&unfinished_literal))); } if !invalid_refs.is_empty() { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index 7272ed98ceb..8f618b2d303 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -9,7 +9,7 @@ use hir_expand::{ name::{AsName, Name}, AstId, }; -use intern::Interned; +use intern::{sym, Interned, Symbol}; use syntax::ast::{self, HasGenericArgs, HasName, IsString}; use crate::{ @@ -121,10 +121,10 @@ pub enum TypeRef { Slice(Box<TypeRef>), /// A fn pointer. Last element of the vector is the return type. Fn( - Vec<(Option<Name>, TypeRef)>, - bool, /*varargs*/ - bool, /*is_unsafe*/ - Option<Interned<str>>, /* abi */ + Box<[(Option<Name>, TypeRef)]>, + bool, /*varargs*/ + bool, /*is_unsafe*/ + Option<Symbol>, /* abi */ ), ImplTrait(Vec<Interned<TypeBound>>), DynTrait(Vec<Interned<TypeBound>>), @@ -228,19 +228,19 @@ impl TypeRef { }) .collect() } else { - Vec::new() + Vec::with_capacity(1) }; - fn lower_abi(abi: ast::Abi) -> Interned<str> { + fn lower_abi(abi: ast::Abi) -> Symbol { match abi.abi_string() { - Some(tok) => Interned::new_str(tok.text_without_quotes()), + Some(tok) => Symbol::intern(tok.text_without_quotes()), // `extern` default to be `extern "C"`. - _ => Interned::new_str("C"), + _ => sym::C.clone(), } } let abi = inner.abi().map(lower_abi); params.push((None, ret_ty)); - TypeRef::Fn(params, is_varargs, inner.unsafe_token().is_some(), abi) + TypeRef::Fn(params.into(), is_varargs, inner.unsafe_token().is_some(), abi) } // for types are close enough for our purposes to the inner type for now... ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), @@ -379,6 +379,7 @@ impl TypeBound { None => TypeBound::Error, } } + ast::TypeBoundKind::Use(_) => TypeBound::Error, ast::TypeBoundKind::Lifetime(lifetime) => { TypeBound::Lifetime(LifetimeRef::new(&lifetime)) } @@ -396,7 +397,7 @@ impl TypeBound { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ConstRef { - Scalar(LiteralConstRef), + Scalar(Box<LiteralConstRef>), Path(Name), Complex(AstId<ast::ConstArg>), } @@ -408,7 +409,7 @@ impl ConstRef { return Self::from_expr(expr, Some(lower_ctx.ast_id(&arg))); } } - Self::Scalar(LiteralConstRef::Unknown) + Self::Scalar(Box::new(LiteralConstRef::Unknown)) } pub(crate) fn from_const_param( @@ -452,10 +453,10 @@ impl ConstRef { ast::Expr::PathExpr(p) if is_path_ident(&p) => { match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) { Some(it) => Self::Path(it.as_name()), - None => Self::Scalar(LiteralConstRef::Unknown), + None => Self::Scalar(Box::new(LiteralConstRef::Unknown)), } } - ast::Expr::Literal(literal) => Self::Scalar(match literal.kind() { + ast::Expr::Literal(literal) => Self::Scalar(Box::new(match literal.kind() { ast::LiteralKind::IntNumber(num) => { num.value().map(LiteralConstRef::UInt).unwrap_or(LiteralConstRef::Unknown) } @@ -464,12 +465,12 @@ impl ConstRef { } ast::LiteralKind::Bool(f) => LiteralConstRef::Bool(f), _ => LiteralConstRef::Unknown, - }), + })), _ => { if let Some(ast_id) = ast_id { Self::Complex(ast_id) } else { - Self::Scalar(LiteralConstRef::Unknown) + Self::Scalar(Box::new(LiteralConstRef::Unknown)) } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index 2b2db21a9f4..8cc022e4c60 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -9,6 +9,7 @@ use itertools::Itertools; use rustc_hash::FxHashSet; use smallvec::SmallVec; use stdx::{format_to, TupleExt}; +use syntax::ToSmolStr; use triomphe::Arc; use crate::{ @@ -81,9 +82,9 @@ impl ImportMap { .iter() // We've only collected items, whose name cannot be tuple field so unwrapping is fine. .flat_map(|(&item, (info, _))| { - info.iter() - .enumerate() - .map(move |(idx, info)| (item, info.name.to_smol_str(), idx as u32)) + info.iter().enumerate().map(move |(idx, info)| { + (item, info.name.display(db.upcast()).to_smolstr(), idx as u32) + }) }) .collect(); importables.sort_by(|(_, l_info, _), (_, r_info, _)| { @@ -412,7 +413,7 @@ pub fn search_dependencies( for map in &import_maps { op = op.add(map.fst.search(&automaton)); } - search_maps(&import_maps, op.union(), query) + search_maps(db, &import_maps, op.union(), query) } SearchMode::Fuzzy => { let automaton = fst::automaton::Subsequence::new(&query.lowercased); @@ -420,7 +421,7 @@ pub fn search_dependencies( for map in &import_maps { op = op.add(map.fst.search(&automaton)); } - search_maps(&import_maps, op.union(), query) + search_maps(db, &import_maps, op.union(), query) } SearchMode::Prefix => { let automaton = fst::automaton::Str::new(&query.lowercased).starts_with(); @@ -428,12 +429,13 @@ pub fn search_dependencies( for map in &import_maps { op = op.add(map.fst.search(&automaton)); } - search_maps(&import_maps, op.union(), query) + search_maps(db, &import_maps, op.union(), query) } } } fn search_maps( + db: &dyn DefDatabase, import_maps: &[Arc<ImportMap>], mut stream: fst::map::Union<'_>, query: &Query, @@ -459,7 +461,7 @@ fn search_maps( query.search_mode.check( &query.query, query.case_sensitive, - &info.name.to_smol_str(), + &info.name.display(db.upcast()).to_smolstr(), ) }); res.extend(iter.map(TupleExt::head)); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index d86c0667a0b..df6b1f55c1d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -1,10 +1,9 @@ //! Describes items defined or visible (ie, imported) in a certain scope. //! This is shared between modules and blocks. -use std::collections::hash_map::Entry; - use base_db::CrateId; use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId}; +use indexmap::map::Entry; use itertools::Itertools; use la_arena::Idx; use once_cell::sync::Lazy; @@ -17,8 +16,8 @@ use crate::{ db::DefDatabase, per_ns::PerNs, visibility::{Visibility, VisibilityExplicitness}, - AdtId, BuiltinType, ConstId, ExternCrateId, HasModule, ImplId, LocalModuleId, Lookup, MacroId, - ModuleDefId, ModuleId, TraitId, UseId, + AdtId, BuiltinType, ConstId, ExternCrateId, FxIndexMap, HasModule, ImplId, LocalModuleId, + Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, }; #[derive(Debug, Default)] @@ -62,14 +61,26 @@ pub struct ImportId { pub idx: Idx<ast::UseTree>, } +impl PerNsGlobImports { + pub(crate) fn contains_type(&self, module_id: LocalModuleId, name: Name) -> bool { + self.types.contains(&(module_id, name)) + } + pub(crate) fn contains_value(&self, module_id: LocalModuleId, name: Name) -> bool { + self.values.contains(&(module_id, name)) + } + pub(crate) fn contains_macro(&self, module_id: LocalModuleId, name: Name) -> bool { + self.macros.contains(&(module_id, name)) + } +} + #[derive(Debug, Default, PartialEq, Eq)] pub struct ItemScope { /// Defs visible in this scope. This includes `declarations`, but also /// imports. The imports belong to this module and can be resolved by using them on /// the `use_imports_*` fields. - types: FxHashMap<Name, (ModuleDefId, Visibility, Option<ImportOrExternCrate>)>, - values: FxHashMap<Name, (ModuleDefId, Visibility, Option<ImportId>)>, - macros: FxHashMap<Name, (MacroId, Visibility, Option<ImportId>)>, + types: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportOrExternCrate>)>, + values: FxIndexMap<Name, (ModuleDefId, Visibility, Option<ImportId>)>, + macros: FxIndexMap<Name, (MacroId, Visibility, Option<ImportId>)>, unresolved: FxHashSet<Name>, /// The defs declared in this scope. Each def has a single scope where it is @@ -118,8 +129,8 @@ struct DeriveMacroInvocation { derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>, } -pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| { - BuiltinType::ALL +pub(crate) static BUILTIN_SCOPE: Lazy<FxIndexMap<Name, PerNs>> = Lazy::new(|| { + BuiltinType::all_builtin_types() .iter() .map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public, None))) .collect() @@ -511,38 +522,48 @@ impl ItemScope { entry.insert(fld); changed = true; } - Entry::Occupied(mut entry) if !matches!(import, Some(ImportType::Glob(..))) => { - if glob_imports.types.remove(&lookup) { - let import = match import { - Some(ImportType::ExternCrate(extern_crate)) => { - Some(ImportOrExternCrate::ExternCrate(extern_crate)) - } - Some(ImportType::Import(import)) => { - Some(ImportOrExternCrate::Import(import)) - } - None | Some(ImportType::Glob(_)) => None, - }; - let prev = std::mem::replace(&mut fld.2, import); - if let Some(import) = import { - self.use_imports_types.insert( - import, - match prev { - Some(ImportOrExternCrate::Import(import)) => { - ImportOrDef::Import(import) + Entry::Occupied(mut entry) => { + match import { + Some(ImportType::Glob(..)) => { + // Multiple globs may import the same item and they may + // override visibility from previously resolved globs. This is + // currently handled by `DefCollector`, because we need to + // compute the max visibility for items and we need `DefMap` + // for that. + } + _ => { + if glob_imports.types.remove(&lookup) { + let import = match import { + Some(ImportType::ExternCrate(extern_crate)) => { + Some(ImportOrExternCrate::ExternCrate(extern_crate)) } - Some(ImportOrExternCrate::ExternCrate(import)) => { - ImportOrDef::ExternCrate(import) + Some(ImportType::Import(import)) => { + Some(ImportOrExternCrate::Import(import)) } - None => ImportOrDef::Def(fld.0), - }, - ); + None | Some(ImportType::Glob(_)) => None, + }; + let prev = std::mem::replace(&mut fld.2, import); + if let Some(import) = import { + self.use_imports_types.insert( + import, + match prev { + Some(ImportOrExternCrate::Import(import)) => { + ImportOrDef::Import(import) + } + Some(ImportOrExternCrate::ExternCrate(import)) => { + ImportOrDef::ExternCrate(import) + } + None => ImportOrDef::Def(fld.0), + }, + ); + } + cov_mark::hit!(import_shadowed); + entry.insert(fld); + changed = true; + } } - cov_mark::hit!(import_shadowed); - entry.insert(fld); - changed = true; } } - _ => {} } } @@ -757,6 +778,27 @@ impl ItemScope { } } +// These methods are a temporary measure only meant to be used by `DefCollector::push_res_and_update_glob_vis()`. +impl ItemScope { + pub(crate) fn update_visibility_types(&mut self, name: &Name, vis: Visibility) { + let res = + self.types.get_mut(name).expect("tried to update visibility of non-existent type"); + res.1 = vis; + } + + pub(crate) fn update_visibility_values(&mut self, name: &Name, vis: Visibility) { + let res = + self.values.get_mut(name).expect("tried to update visibility of non-existent value"); + res.1 = vis; + } + + pub(crate) fn update_visibility_macros(&mut self, name: &Name, vis: Visibility) { + let res = + self.macros.get_mut(name).expect("tried to update visibility of non-existent macro"); + res.1 = vis; + } +} + impl PerNs { pub(crate) fn from_def( def: ModuleDefId, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index 7650dfe9f37..28eebb286ed 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -46,8 +46,8 @@ use ast::{AstNode, StructKind}; use base_db::CrateId; use either::Either; use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile}; -use intern::Interned; -use la_arena::{Arena, Idx, IdxRange, RawIdx}; +use intern::{Interned, Symbol}; +use la_arena::{Arena, Idx, RawIdx}; use once_cell::sync::OnceCell; use rustc_hash::FxHashMap; use smallvec::SmallVec; @@ -218,9 +218,7 @@ impl ItemTree { extern_crates, extern_blocks, functions, - params, structs, - fields, unions, enums, variants, @@ -241,9 +239,7 @@ impl ItemTree { extern_crates.shrink_to_fit(); extern_blocks.shrink_to_fit(); functions.shrink_to_fit(); - params.shrink_to_fit(); structs.shrink_to_fit(); - fields.shrink_to_fit(); unions.shrink_to_fit(); enums.shrink_to_fit(); variants.shrink_to_fit(); @@ -295,9 +291,7 @@ struct ItemTreeData { extern_crates: Arena<ExternCrate>, extern_blocks: Arena<ExternBlock>, functions: Arena<Function>, - params: Arena<Param>, structs: Arena<Struct>, - fields: Arena<Field>, unions: Arena<Union>, enums: Arena<Enum>, variants: Arena<Variant>, @@ -315,7 +309,7 @@ struct ItemTreeData { vis: ItemVisibilities, } -#[derive(Debug, Eq, PartialEq, Hash)] +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub enum AttrOwner { /// Attributes on an item. ModItem(ModItem), @@ -323,12 +317,28 @@ pub enum AttrOwner { TopLevel, Variant(FileItemTreeId<Variant>), - Field(Idx<Field>), - Param(Idx<Param>), + Field(FieldParent, ItemTreeFieldId), + Param(FileItemTreeId<Function>, ItemTreeParamId), TypeOrConstParamData(GenericModItem, LocalTypeOrConstParamId), LifetimeParamData(GenericModItem, LocalLifetimeParamId), } +impl AttrOwner { + pub fn make_field_indexed(parent: FieldParent, idx: usize) -> Self { + AttrOwner::Field(parent, ItemTreeFieldId::from_raw(RawIdx::from_u32(idx as u32))) + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] +pub enum FieldParent { + Struct(FileItemTreeId<Struct>), + Union(FileItemTreeId<Union>), + Variant(FileItemTreeId<Variant>), +} + +pub type ItemTreeParamId = Idx<Param>; +pub type ItemTreeFieldId = Idx<Field>; + macro_rules! from_attrs { ( $( $var:ident($t:ty) ),+ $(,)? ) => { $( @@ -341,12 +351,7 @@ macro_rules! from_attrs { }; } -from_attrs!( - ModItem(ModItem), - Variant(FileItemTreeId<Variant>), - Field(Idx<Field>), - Param(Idx<Param>), -); +from_attrs!(ModItem(ModItem), Variant(FileItemTreeId<Variant>)); /// Trait implemented by all nodes in the item tree. pub trait ItemTreeNode: Clone { @@ -365,7 +370,7 @@ pub trait GenericsItemTreeNode: ItemTreeNode { pub struct FileItemTreeId<N>(Idx<N>); impl<N> FileItemTreeId<N> { - pub fn range_iter(range: Range<Self>) -> impl Iterator<Item = Self> { + pub fn range_iter(range: Range<Self>) -> impl Iterator<Item = Self> + Clone { (range.start.index().into_raw().into_u32()..range.end.index().into_raw().into_u32()) .map(RawIdx::from_u32) .map(Idx::from_raw) @@ -417,18 +422,18 @@ impl TreeId { Self { file, block } } - pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> { + pub fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> { match self.block { Some(block) => db.block_item_tree(block), None => db.file_item_tree(self.file), } } - pub(crate) fn file_id(self) -> HirFileId { + pub fn file_id(self) -> HirFileId { self.file } - pub(crate) fn is_block(self) -> bool { + pub fn is_block(self) -> bool { self.block.is_some() } } @@ -505,6 +510,27 @@ macro_rules! mod_items { )+ } + impl ModItem { + pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> { + match self { + $(ModItem::$typ(it) => tree[it.index()].ast_id().upcast()),+ + } + } + } + + impl GenericModItem { + pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::AnyHasGenericParams> { + match self { + $( + $( + #[cfg_attr(ignore_fragment, $generic_params)] + GenericModItem::$typ(it) => tree[it.index()].ast_id().upcast(), + )? + )+ + } + } + } + impl From<GenericModItem> for ModItem { fn from(id: GenericModItem) -> ModItem { match id { @@ -596,22 +622,6 @@ mod_items! { Macro2 in macro_defs -> ast::MacroDef, } -macro_rules! impl_index { - ( $($fld:ident: $t:ty),+ $(,)? ) => { - $( - impl Index<Idx<$t>> for ItemTree { - type Output = $t; - - fn index(&self, index: Idx<$t>) -> &Self::Output { - &self.data().$fld[index] - } - } - )+ - }; -} - -impl_index!(fields: Field, variants: Variant, params: Param); - impl Index<RawVisibilityId> for ItemTree { type Output = RawVisibility; fn index(&self, index: RawVisibilityId) -> &Self::Output { @@ -712,7 +722,7 @@ pub struct ExternCrate { #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExternBlock { - pub abi: Option<Interned<str>>, + pub abi: Option<Symbol>, pub ast_id: FileAstId<ast::ExternBlock>, pub children: Box<[ModItem]>, } @@ -722,8 +732,8 @@ pub struct Function { pub name: Name, pub visibility: RawVisibilityId, pub explicit_generic_params: Interned<GenericParams>, - pub abi: Option<Interned<str>>, - pub params: IdxRange<Param>, + pub abi: Option<Symbol>, + pub params: Box<[Param]>, pub ret_type: Interned<TypeRef>, pub ast_id: FileAstId<ast::Fn>, pub(crate) flags: FnFlags, @@ -731,15 +741,7 @@ pub struct Function { #[derive(Debug, Clone, PartialEq, Eq)] pub struct Param { - /// This is [`None`] for varargs pub type_ref: Option<Interned<TypeRef>>, - pub ast_id: ParamAstId, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ParamAstId { - Param(FileAstId<ast::Param>), - SelfParam(FileAstId<ast::SelfParam>), } bitflags::bitflags! { @@ -760,7 +762,8 @@ pub struct Struct { pub name: Name, pub visibility: RawVisibilityId, pub generic_params: Interned<GenericParams>, - pub fields: Fields, + pub fields: Box<[Field]>, + pub shape: FieldsShape, pub ast_id: FileAstId<ast::Struct>, } @@ -769,7 +772,7 @@ pub struct Union { pub name: Name, pub visibility: RawVisibilityId, pub generic_params: Interned<GenericParams>, - pub fields: Fields, + pub fields: Box<[Field]>, pub ast_id: FileAstId<ast::Union>, } @@ -782,6 +785,29 @@ pub struct Enum { pub ast_id: FileAstId<ast::Enum>, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Variant { + pub name: Name, + pub fields: Box<[Field]>, + pub shape: FieldsShape, + pub ast_id: FileAstId<ast::Variant>, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum FieldsShape { + Record, + Tuple, + Unit, +} + +/// A single field of an enum variant or struct +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Field { + pub name: Name, + pub type_ref: Interned<TypeRef>, + pub visibility: RawVisibilityId, +} + #[derive(Debug, Clone, Eq, PartialEq)] pub struct Const { /// `None` for `const _: () = ();` @@ -1039,28 +1065,6 @@ impl ModItem { &ModItem::Function(func) => Some(AssocItem::Function(func)), } } - - pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> { - match self { - ModItem::Use(it) => tree[it.index()].ast_id().upcast(), - ModItem::ExternCrate(it) => tree[it.index()].ast_id().upcast(), - ModItem::ExternBlock(it) => tree[it.index()].ast_id().upcast(), - ModItem::Function(it) => tree[it.index()].ast_id().upcast(), - ModItem::Struct(it) => tree[it.index()].ast_id().upcast(), - ModItem::Union(it) => tree[it.index()].ast_id().upcast(), - ModItem::Enum(it) => tree[it.index()].ast_id().upcast(), - ModItem::Const(it) => tree[it.index()].ast_id().upcast(), - ModItem::Static(it) => tree[it.index()].ast_id().upcast(), - ModItem::Trait(it) => tree[it.index()].ast_id().upcast(), - ModItem::TraitAlias(it) => tree[it.index()].ast_id().upcast(), - ModItem::Impl(it) => tree[it.index()].ast_id().upcast(), - ModItem::TypeAlias(it) => tree[it.index()].ast_id().upcast(), - ModItem::Mod(it) => tree[it.index()].ast_id().upcast(), - ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(), - ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(), - ModItem::Macro2(it) => tree[it.index()].ast_id().upcast(), - } - } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] @@ -1099,32 +1103,3 @@ impl AssocItem { } } } - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Variant { - pub name: Name, - pub fields: Fields, - pub ast_id: FileAstId<ast::Variant>, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum Fields { - Record(IdxRange<Field>), - Tuple(IdxRange<Field>), - Unit, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum FieldAstId { - Record(FileAstId<ast::RecordField>), - Tuple(FileAstId<ast::TupleField>), -} - -/// A single field of an enum variant or struct -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Field { - pub name: Name, - pub type_ref: Interned<TypeRef>, - pub visibility: RawVisibilityId, - pub ast_id: FieldAstId, -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 199b8daa37e..7aac383ab47 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -2,7 +2,8 @@ use std::collections::hash_map::Entry; -use hir_expand::{mod_path::path, name, name::AsName, span_map::SpanMapRef, HirFileId}; +use hir_expand::{mod_path::path, name::AsName, span_map::SpanMapRef, HirFileId}; +use intern::{sym, Symbol}; use la_arena::Arena; use rustc_hash::FxHashMap; use span::{AstIdMap, SyntaxContextId}; @@ -16,12 +17,12 @@ use crate::{ db::DefDatabase, generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance}, item_tree::{ - AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldAstId, - Fields, FileItemTreeId, FnFlags, Function, GenericArgs, GenericModItem, Idx, IdxRange, - Impl, ImportAlias, Interned, ItemTree, ItemTreeData, ItemTreeNode, Macro2, MacroCall, - MacroRules, Mod, ModItem, ModKind, ModPath, Mutability, Name, Param, ParamAstId, Path, - Range, RawAttrs, RawIdx, RawVisibilityId, Static, Struct, StructKind, Trait, TraitAlias, - TypeAlias, Union, Use, UseTree, UseTreeKind, Variant, + AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldParent, + FieldsShape, FileItemTreeId, FnFlags, Function, GenericArgs, GenericModItem, Idx, Impl, + ImportAlias, Interned, ItemTree, ItemTreeData, Macro2, MacroCall, MacroRules, Mod, ModItem, + ModKind, ModPath, Mutability, Name, Param, Path, Range, RawAttrs, RawIdx, RawVisibilityId, + Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, + Variant, }, path::AssociatedTypeBinding, type_ref::{LifetimeRef, TraitBoundModifier, TraitRef, TypeBound, TypeRef}, @@ -29,7 +30,7 @@ use crate::{ LocalLifetimeParamId, LocalTypeOrConstParamId, }; -fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> { +fn id<N>(index: Idx<N>) -> FileItemTreeId<N> { FileItemTreeId(index) } @@ -192,82 +193,98 @@ impl<'a> Ctx<'a> { let visibility = self.lower_visibility(strukt); let name = strukt.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(strukt); - let fields = self.lower_fields(&strukt.kind()); + let (fields, kind, attrs) = self.lower_fields(&strukt.kind()); let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt); - let res = Struct { name, visibility, generic_params, fields, ast_id }; + let res = Struct { name, visibility, generic_params, fields, shape: kind, ast_id }; let id = id(self.data().structs.alloc(res)); + for (idx, attr) in attrs { + self.add_attrs( + AttrOwner::Field( + FieldParent::Struct(id), + Idx::from_raw(RawIdx::from_u32(idx as u32)), + ), + attr, + ); + } self.write_generic_params_attributes(id.into()); Some(id) } - fn lower_fields(&mut self, strukt_kind: &ast::StructKind) -> Fields { + fn lower_fields( + &mut self, + strukt_kind: &ast::StructKind, + ) -> (Box<[Field]>, FieldsShape, Vec<(usize, RawAttrs)>) { match strukt_kind { ast::StructKind::Record(it) => { - let range = self.lower_record_fields(it); - Fields::Record(range) + let mut fields = vec![]; + let mut attrs = vec![]; + + for (i, field) in it.fields().enumerate() { + let data = self.lower_record_field(&field); + fields.push(data); + let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map()); + if !attr.is_empty() { + attrs.push((i, attr)) + } + } + (fields.into(), FieldsShape::Record, attrs) } ast::StructKind::Tuple(it) => { - let range = self.lower_tuple_fields(it); - Fields::Tuple(range) - } - ast::StructKind::Unit => Fields::Unit, - } - } - - fn lower_record_fields(&mut self, fields: &ast::RecordFieldList) -> IdxRange<Field> { - let start = self.next_field_idx(); - for field in fields.fields() { - if let Some(data) = self.lower_record_field(&field) { - let idx = self.data().fields.alloc(data); - self.add_attrs( - idx.into(), - RawAttrs::new(self.db.upcast(), &field, self.span_map()), - ); + let mut fields = vec![]; + let mut attrs = vec![]; + + for (i, field) in it.fields().enumerate() { + let data = self.lower_tuple_field(i, &field); + fields.push(data); + let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map()); + if !attr.is_empty() { + attrs.push((i, attr)) + } + } + (fields.into(), FieldsShape::Tuple, attrs) } + ast::StructKind::Unit => (Box::default(), FieldsShape::Unit, Vec::default()), } - let end = self.next_field_idx(); - IdxRange::new(start..end) } - fn lower_record_field(&mut self, field: &ast::RecordField) -> Option<Field> { - let name = field.name()?.as_name(); + fn lower_record_field(&mut self, field: &ast::RecordField) -> Field { + let name = match field.name() { + Some(name) => name.as_name(), + None => Name::missing(), + }; let visibility = self.lower_visibility(field); let type_ref = self.lower_type_ref_opt(field.ty()); - let ast_id = FieldAstId::Record(self.source_ast_id_map.ast_id(field)); - let res = Field { name, type_ref, visibility, ast_id }; - Some(res) - } - fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldList) -> IdxRange<Field> { - let start = self.next_field_idx(); - for (i, field) in fields.fields().enumerate() { - let data = self.lower_tuple_field(i, &field); - let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map())); - } - let end = self.next_field_idx(); - IdxRange::new(start..end) + Field { name, type_ref, visibility } } fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field { let name = Name::new_tuple_field(idx); let visibility = self.lower_visibility(field); let type_ref = self.lower_type_ref_opt(field.ty()); - let ast_id = FieldAstId::Tuple(self.source_ast_id_map.ast_id(field)); - Field { name, type_ref, visibility, ast_id } + Field { name, type_ref, visibility } } fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> { let visibility = self.lower_visibility(union); let name = union.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(union); - let fields = match union.record_field_list() { + let (fields, _, attrs) = match union.record_field_list() { Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), - None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())), + None => (Box::default(), FieldsShape::Record, Vec::default()), }; let generic_params = self.lower_generic_params(HasImplicitSelf::No, union); let res = Union { name, visibility, generic_params, fields, ast_id }; let id = id(self.data().unions.alloc(res)); + for (idx, attr) in attrs { + self.add_attrs( + AttrOwner::Field( + FieldParent::Union(id), + Idx::from_raw(RawIdx::from_u32(idx as u32)), + ), + attr, + ); + } self.write_generic_params_attributes(id.into()); Some(id) } @@ -292,24 +309,35 @@ impl<'a> Ctx<'a> { fn lower_variants(&mut self, variants: &ast::VariantList) -> Range<FileItemTreeId<Variant>> { let start = self.next_variant_idx(); for variant in variants.variants() { - if let Some(data) = self.lower_variant(&variant) { - let idx = self.data().variants.alloc(data); - self.add_attrs( - id(idx).into(), - RawAttrs::new(self.db.upcast(), &variant, self.span_map()), - ); - } + let idx = self.lower_variant(&variant); + self.add_attrs( + id(idx).into(), + RawAttrs::new(self.db.upcast(), &variant, self.span_map()), + ); } let end = self.next_variant_idx(); FileItemTreeId(start)..FileItemTreeId(end) } - fn lower_variant(&mut self, variant: &ast::Variant) -> Option<Variant> { - let name = variant.name()?.as_name(); - let fields = self.lower_fields(&variant.kind()); + fn lower_variant(&mut self, variant: &ast::Variant) -> Idx<Variant> { + let name = match variant.name() { + Some(name) => name.as_name(), + None => Name::missing(), + }; + let (fields, kind, attrs) = self.lower_fields(&variant.kind()); let ast_id = self.source_ast_id_map.ast_id(variant); - let res = Variant { name, fields, ast_id }; - Some(res) + let res = Variant { name, fields, shape: kind, ast_id }; + let id = self.data().variants.alloc(res); + for (idx, attr) in attrs { + self.add_attrs( + AttrOwner::Field( + FieldParent::Variant(FileItemTreeId(id)), + Idx::from_raw(RawIdx::from_u32(idx as u32)), + ), + attr, + ); + } + id } fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> { @@ -317,13 +345,25 @@ impl<'a> Ctx<'a> { let name = func.name()?.as_name(); let mut has_self_param = false; - let start_param = self.next_param_idx(); + let mut has_var_args = false; + let mut params = vec![]; + let mut attrs = vec![]; + let mut push_attr = |idx, attr: RawAttrs| { + if !attr.is_empty() { + attrs.push((idx, attr)) + } + }; if let Some(param_list) = func.param_list() { if let Some(self_param) = param_list.self_param() { + push_attr( + params.len(), + RawAttrs::new(self.db.upcast(), &self_param, self.span_map()), + ); let self_type = match self_param.ty() { Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref), None => { - let self_type = TypeRef::Path(name![Self].into()); + let self_type = + TypeRef::Path(Name::new_symbol_root(sym::Self_.clone()).into()); match self_param.kind() { ast::SelfParamKind::Owned => self_type, ast::SelfParamKind::Ref => TypeRef::Reference( @@ -340,40 +380,25 @@ impl<'a> Ctx<'a> { } }; let type_ref = Interned::new(self_type); - let ast_id = self.source_ast_id_map.ast_id(&self_param); - let idx = self.data().params.alloc(Param { - type_ref: Some(type_ref), - ast_id: ParamAstId::SelfParam(ast_id), - }); - self.add_attrs( - idx.into(), - RawAttrs::new(self.db.upcast(), &self_param, self.span_map()), - ); + params.push(Param { type_ref: Some(type_ref) }); has_self_param = true; } for param in param_list.params() { - let ast_id = self.source_ast_id_map.ast_id(¶m); - let idx = match param.dotdotdot_token() { - Some(_) => self - .data() - .params - .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }), + push_attr(params.len(), RawAttrs::new(self.db.upcast(), ¶m, self.span_map())); + let param = match param.dotdotdot_token() { + Some(_) => { + has_var_args = true; + Param { type_ref: None } + } None => { let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); let ty = Interned::new(type_ref); - self.data() - .params - .alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) }) + Param { type_ref: Some(ty) } } }; - self.add_attrs( - idx.into(), - RawAttrs::new(self.db.upcast(), ¶m, self.span_map()), - ); + params.push(param); } } - let end_param = self.next_param_idx(); - let params = IdxRange::new(start_param..end_param); let ret_type = match func.ret_type() { Some(rt) => match rt.ty() { @@ -415,19 +440,25 @@ impl<'a> Ctx<'a> { if func.unsafe_token().is_some() { flags |= FnFlags::HAS_UNSAFE_KW; } + if has_var_args { + flags |= FnFlags::IS_VARARGS; + } let res = Function { name, visibility, explicit_generic_params: self.lower_generic_params(HasImplicitSelf::No, func), abi, - params, + params: params.into_boxed_slice(), ret_type: Interned::new(ret_type), ast_id, flags, }; let id = id(self.data().functions.alloc(res)); + for (idx, attr) in attrs { + self.add_attrs(AttrOwner::Param(id, Idx::from_raw(RawIdx::from_u32(idx as u32))), attr); + } self.write_generic_params_attributes(id.into()); Some(id) } @@ -669,7 +700,7 @@ impl<'a> Ctx<'a> { // Traits and trait aliases get the Self type as an implicit first type parameter. generics.type_or_consts.alloc( TypeParamData { - name: Some(name![Self]), + name: Some(Name::new_symbol_root(sym::Self_.clone())), default: None, provenance: TypeParamProvenance::TraitSelf, } @@ -680,7 +711,7 @@ impl<'a> Ctx<'a> { generics.fill_bounds( &self.body_ctx, bounds, - Either::Left(TypeRef::Path(name![Self].into())), + Either::Left(TypeRef::Path(Name::new_symbol_root(sym::Self_.clone()).into())), ); } @@ -723,21 +754,11 @@ impl<'a> Ctx<'a> { } } - fn next_field_idx(&self) -> Idx<Field> { - Idx::from_raw(RawIdx::from( - self.tree.data.as_ref().map_or(0, |data| data.fields.len() as u32), - )) - } fn next_variant_idx(&self) -> Idx<Variant> { Idx::from_raw(RawIdx::from( self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32), )) } - fn next_param_idx(&self) -> Idx<Param> { - Idx::from_raw(RawIdx::from( - self.tree.data.as_ref().map_or(0, |data| data.params.len() as u32), - )) - } } fn desugar_future_path(orig: TypeRef) -> Path { @@ -745,7 +766,7 @@ fn desugar_future_path(orig: TypeRef) -> Path { let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments().len() - 1).collect(); let binding = AssociatedTypeBinding { - name: name![Output], + name: Name::new_symbol_root(sym::Output.clone()), args: None, type_ref: Some(orig), bounds: Box::default(), @@ -764,11 +785,11 @@ enum HasImplicitSelf { No, } -fn lower_abi(abi: ast::Abi) -> Interned<str> { +fn lower_abi(abi: ast::Abi) -> Symbol { match abi.abi_string() { - Some(tok) => Interned::new_str(tok.text_without_quotes()), + Some(tok) => Symbol::intern(tok.text_without_quotes()), // `extern` default to be `extern "C"`. - _ => Interned::new_str("C"), + _ => sym::C.clone(), } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 6283ae23b52..740759e6e39 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -2,16 +2,17 @@ use std::fmt::{self, Write}; +use la_arena::{Idx, RawIdx}; use span::ErasedFileAstId; use crate::{ generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, item_tree::{ - AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldAstId, Fields, - FileItemTreeId, FnFlags, Function, GenericModItem, GenericParams, Impl, Interned, ItemTree, - Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, Param, ParamAstId, Path, RawAttrs, - RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, TypeRef, Union, - Use, UseTree, UseTreeKind, Variant, + AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldParent, + FieldsShape, FileItemTreeId, FnFlags, Function, GenericModItem, GenericParams, Impl, + Interned, ItemTree, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, Param, Path, + RawAttrs, RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, + TypeRef, Union, Use, UseTree, UseTreeKind, Variant, }, pretty::{print_path, print_type_bounds, print_type_ref}, visibility::RawVisibility, @@ -118,19 +119,17 @@ impl Printer<'_> { }; } - fn print_fields(&mut self, fields: &Fields) { - match fields { - Fields::Record(fields) => { + fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) { + match kind { + FieldsShape::Record => { self.whitespace(); w!(self, "{{"); self.indented(|this| { - for field in fields.clone() { - let Field { visibility, name, type_ref, ast_id } = &this.tree[field]; - this.print_ast_id(match ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }); - this.print_attrs_of(field, "\n"); + for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() { + this.print_attrs_of( + AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))), + "\n", + ); this.print_visibility(*visibility); w!(this, "{}: ", name.display(self.db.upcast())); this.print_type_ref(type_ref); @@ -139,16 +138,14 @@ impl Printer<'_> { }); w!(self, "}}"); } - Fields::Tuple(fields) => { + FieldsShape::Tuple => { w!(self, "("); self.indented(|this| { - for field in fields.clone() { - let Field { visibility, name, type_ref, ast_id } = &this.tree[field]; - this.print_ast_id(match ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }); - this.print_attrs_of(field, "\n"); + for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() { + this.print_attrs_of( + AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))), + "\n", + ); this.print_visibility(*visibility); w!(this, "{}: ", name.display(self.db.upcast())); this.print_type_ref(type_ref); @@ -157,24 +154,30 @@ impl Printer<'_> { }); w!(self, ")"); } - Fields::Unit => {} + FieldsShape::Unit => {} } } - fn print_fields_and_where_clause(&mut self, fields: &Fields, params: &GenericParams) { - match fields { - Fields::Record(_) => { + fn print_fields_and_where_clause( + &mut self, + parent: FieldParent, + kind: FieldsShape, + fields: &[Field], + params: &GenericParams, + ) { + match kind { + FieldsShape::Record => { if self.print_where_clause(params) { wln!(self); } - self.print_fields(fields); + self.print_fields(parent, kind, fields); } - Fields::Unit => { + FieldsShape::Unit => { self.print_where_clause(params); - self.print_fields(fields); + self.print_fields(parent, kind, fields); } - Fields::Tuple(_) => { - self.print_fields(fields); + FieldsShape::Tuple => { + self.print_fields(parent, kind, fields); self.print_where_clause(params); } } @@ -280,25 +283,20 @@ impl Printer<'_> { w!(self, "("); if !params.is_empty() { self.indented(|this| { - for param in params.clone() { - this.print_attrs_of(param, "\n"); - let Param { type_ref, ast_id } = &this.tree[param]; - this.print_ast_id(match ast_id { - ParamAstId::Param(it) => it.erase(), - ParamAstId::SelfParam(it) => it.erase(), - }); - match type_ref { - Some(ty) => { - if flags.contains(FnFlags::HAS_SELF_PARAM) { - w!(this, "self: "); - } - this.print_type_ref(ty); - wln!(this, ","); - } - None => { - wln!(this, "..."); - } - }; + for (idx, Param { type_ref }) in params.iter().enumerate() { + this.print_attrs_of( + AttrOwner::Param(it, Idx::from_raw(RawIdx::from(idx as u32))), + "\n", + ); + if idx == 0 && flags.contains(FnFlags::HAS_SELF_PARAM) { + w!(this, "self: "); + } + if let Some(type_ref) = type_ref { + this.print_type_ref(type_ref); + } else { + wln!(this, "..."); + } + wln!(this, ","); } }); } @@ -312,13 +310,19 @@ impl Printer<'_> { } } ModItem::Struct(it) => { - let Struct { visibility, name, fields, generic_params, ast_id } = &self.tree[it]; + let Struct { visibility, name, fields, shape: kind, generic_params, ast_id } = + &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); w!(self, "struct {}", name.display(self.db.upcast())); self.print_generic_params(generic_params, it.into()); - self.print_fields_and_where_clause(fields, generic_params); - if matches!(fields, Fields::Record(_)) { + self.print_fields_and_where_clause( + FieldParent::Struct(it), + *kind, + fields, + generic_params, + ); + if matches!(kind, FieldsShape::Record) { wln!(self); } else { wln!(self, ";"); @@ -330,12 +334,13 @@ impl Printer<'_> { self.print_visibility(*visibility); w!(self, "union {}", name.display(self.db.upcast())); self.print_generic_params(generic_params, it.into()); - self.print_fields_and_where_clause(fields, generic_params); - if matches!(fields, Fields::Record(_)) { - wln!(self); - } else { - wln!(self, ";"); - } + self.print_fields_and_where_clause( + FieldParent::Union(it), + FieldsShape::Record, + fields, + generic_params, + ); + wln!(self); } ModItem::Enum(it) => { let Enum { name, visibility, variants, generic_params, ast_id } = &self.tree[it]; @@ -346,11 +351,11 @@ impl Printer<'_> { self.print_where_clause_and_opening_brace(generic_params); self.indented(|this| { for variant in FileItemTreeId::range_iter(variants.clone()) { - let Variant { name, fields, ast_id } = &this.tree[variant]; + let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant]; this.print_ast_id(ast_id.erase()); this.print_attrs_of(variant, "\n"); w!(this, "{}", name.display(self.db.upcast())); - this.print_fields(fields); + this.print_fields(FieldParent::Variant(variant), *kind, fields); wln!(this, ","); } }); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs index 79bab11998b..c6930401a6f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs @@ -129,40 +129,34 @@ enum E { #[derive(Debug)] // AstId: 2 pub(self) struct Struct { - // AstId: 6 #[doc = " fld docs"] pub(self) fld: (), } // AstId: 3 pub(self) struct Tuple( - // AstId: 7 #[attr] pub(self) 0: u8, ); // AstId: 4 pub(self) union Ize { - // AstId: 8 pub(self) a: (), - // AstId: 9 pub(self) b: (), } // AstId: 5 pub(self) enum E { - // AstId: 10 + // AstId: 6 #[doc = " comment on Unit"] Unit, - // AstId: 11 + // AstId: 7 #[doc = " comment on Tuple"] Tuple( - // AstId: 13 pub(self) 0: u8, ), - // AstId: 12 + // AstId: 8 Struct { - // AstId: 14 #[doc = " comment on a: u8"] pub(self) a: u8, }, @@ -201,9 +195,7 @@ trait Tr: SuperTrait + 'lifetime { // AstId: 3 pub(self) fn f( #[attr] - // AstId: 5 u8, - // AstId: 6 (), ) -> () { ... } @@ -213,12 +205,11 @@ trait Tr: SuperTrait + 'lifetime { Self: SuperTrait, Self: 'lifetime { - // AstId: 8 + // AstId: 6 pub(self) type Assoc: AssocBound = Default; - // AstId: 9 + // AstId: 7 pub(self) fn method( - // AstId: 10 self: &Self, ) -> (); } @@ -300,17 +291,11 @@ struct S { expect![[r#" // AstId: 1 pub(self) struct S { - // AstId: 2 pub(self) a: self::Ty, - // AstId: 3 pub(self) b: super::SuperTy, - // AstId: 4 pub(self) c: super::super::SuperSuperTy, - // AstId: 5 pub(self) d: ::abs::Path, - // AstId: 6 pub(self) e: crate::Crate, - // AstId: 7 pub(self) f: plain::path::Ty, } "#]], @@ -331,13 +316,9 @@ struct S { expect![[r#" // AstId: 1 pub(self) struct S { - // AstId: 2 pub(self) a: Mixed::<'a, T, Item = (), OtherItem = u8>, - // AstId: 3 pub(self) b: Qualified::<Self=Fully>::Syntax, - // AstId: 4 pub(self) c: <TypeAnchored>::Path::<'a>, - // AstId: 5 pub(self) d: dyn for<'a> Trait::<'a>, } "#]], @@ -371,15 +352,12 @@ trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {} T: 'a, T: 'b { - // AstId: 8 pub(self) field: &'a &'b T, } // AstId: 2 pub(self) struct Tuple<T, U>( - // AstId: 9 pub(self) 0: T, - // AstId: 10 pub(self) 1: U, ) where @@ -393,9 +371,8 @@ trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {} T: 'a, T: 'b { - // AstId: 12 + // AstId: 9 pub(self) fn f<G>( - // AstId: 13 impl Copy, ) -> impl Copy where diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 6d7836d5ae8..a09fd658aeb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -3,8 +3,8 @@ //! This attribute to tell the compiler about semi built-in std library //! features, such as Fn family of traits. use hir_expand::name::Name; +use intern::{sym, Symbol}; use rustc_hash::FxHashMap; -use syntax::SmolStr; use triomphe::Arc; use crate::{ @@ -191,8 +191,7 @@ impl LangItems { } pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> { - let attrs = db.attrs(item); - attrs.by_key("lang").string_value().and_then(LangItem::from_str) + db.attrs(item).lang_item() } pub(crate) fn notable_traits_in_deps( @@ -253,17 +252,16 @@ macro_rules! language_item_table { } impl LangItem { - pub fn name(self) -> SmolStr { + pub fn name(self) -> &'static str { match self { - $( LangItem::$variant => SmolStr::new(stringify!($name)), )* + $( LangItem::$variant => stringify!($name), )* } } /// Opposite of [`LangItem::name`] - #[allow(clippy::should_implement_trait)] - pub fn from_str(name: &str) -> Option<Self> { - match name { - $( stringify!($name) => Some(LangItem::$variant), )* + pub fn from_symbol(sym: &Symbol) -> Option<Self> { + match sym { + $(sym if *sym == $module::$name => Some(LangItem::$variant), )* _ => None, } } @@ -274,7 +272,7 @@ macro_rules! language_item_table { impl LangItem { /// Opposite of [`LangItem::name`] pub fn from_name(name: &hir_expand::name::Name) -> Option<Self> { - Self::from_str(name.as_str()?) + Self::from_symbol(name.symbol()) } pub fn path(&self, db: &dyn DefDatabase, start_crate: CrateId) -> Option<Path> { @@ -360,7 +358,7 @@ language_item_table! { DerefTarget, sym::deref_target, deref_target, Target::AssocTy, GenericRequirement::None; Receiver, sym::receiver, receiver_trait, Target::Trait, GenericRequirement::None; - Fn, kw::fn, fn_trait, Target::Trait, GenericRequirement::Exact(1); + Fn, sym::fn_, fn_trait, Target::Trait, GenericRequirement::Exact(1); FnMut, sym::fn_mut, fn_mut_trait, Target::Trait, GenericRequirement::Exact(1); FnOnce, sym::fn_once, fn_once_trait, Target::Trait, GenericRequirement::Exact(1); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index fc026a14d44..66412b26a00 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -46,7 +46,6 @@ pub mod body; pub mod resolver; pub mod nameres; -mod trace; pub mod child_by_source; pub mod src; @@ -76,9 +75,7 @@ use base_db::{ CrateId, }; use hir_expand::{ - builtin_attr_macro::BuiltinAttrExpander, - builtin_derive_macro::BuiltinDeriveExpander, - builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, + builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, eager::expand_eager_macro_input, impl_intern_lookup, @@ -862,7 +859,7 @@ impl GeneralConstId { .const_data(const_id) .name .as_ref() - .and_then(|it| it.as_str()) + .map(|it| it.as_str()) .unwrap_or("_") .to_owned(), GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"), @@ -1437,7 +1434,10 @@ impl AsMacroCall for InFile<&ast::MacroCall> { }); let Some((call_site, path)) = path else { - return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); + return Ok(ExpandResult::only_err(ExpandError::other( + span_map.span_for_range(self.value.syntax().text_range()), + "malformed macro invocation", + ))); }; macro_call_as_call_id_with_eager( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 9596100b60e..b6dbba12cd6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -154,7 +154,7 @@ fn main() { file!(); } #[rustc_builtin_macro] macro_rules! file {() => {}} -fn main() { ""; } +fn main() { "file"; } "##]], ); } @@ -439,7 +439,7 @@ macro_rules! include_bytes { ($file:expr,) => {{ /* compiler built-in */ }}; } -fn main() { include_bytes("foo"); } +fn main() { include_bytes("foo");include_bytes(r"foo"); } "#, expect![[r##" #[rustc_builtin_macro] @@ -448,7 +448,7 @@ macro_rules! include_bytes { ($file:expr,) => {{ /* compiler built-in */ }}; } -fn main() { include_bytes("foo"); } +fn main() { include_bytes("foo");include_bytes(r"foo"); } "##]], ); } @@ -460,13 +460,13 @@ fn test_concat_expand() { #[rustc_builtin_macro] macro_rules! concat {} -fn main() { concat!("fo", "o", 0, r#"bar"#, "\n", false, '"', '\0'); } +fn main() { concat!("fo", "o", 0, r#""bar""#, "\n", false, '"', '\0'); } "##, expect![[r##" #[rustc_builtin_macro] macro_rules! concat {} -fn main() { "foo0bar\nfalse\"\u{0}"; } +fn main() { "foo0\"bar\"\nfalse\"\u{0}"; } "##]], ); } @@ -478,13 +478,13 @@ fn test_concat_bytes_expand() { #[rustc_builtin_macro] macro_rules! concat_bytes {} -fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); } +fn main() { concat_bytes!(b'A', b"BC\"", [68, b'E', 70], br#"G""#,b'\0'); } "##, expect![[r#" #[rustc_builtin_macro] macro_rules! concat_bytes {} -fn main() { [b'A', 66, 67, 68, b'E', 70]; } +fn main() { b"ABC\"DEFG\"\x00"; } "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index 4058159cefe..64b37d2d065 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1921,3 +1921,59 @@ fn f() { "#]], ); } + +#[test] +fn test_edition_handling_out() { + check( + r#" +//- /main.rs crate:main deps:old edition:2021 +macro_rules! r#try { + ($it:expr) => { + $it? + }; +} +fn f() { + old::invoke_bare_try!(0); +} +//- /old.rs crate:old edition:2015 +#[macro_export] +macro_rules! invoke_bare_try { + ($it:expr) => { + try!($it) + }; +} + "#, + expect![[r#" +macro_rules! r#try { + ($it:expr) => { + $it? + }; +} +fn f() { + try!(0); +} +"#]], + ); +} + +#[test] +fn test_edition_handling_in() { + check( + r#" +//- /main.rs crate:main deps:old edition:2021 +fn f() { + old::parse_try_old!(try!{}); +} +//- /old.rs crate:old edition:2015 +#[macro_export] +macro_rules! parse_try_old { + ($it:expr) => {}; +} + "#, + expect![[r#" +fn f() { + ; +} +"#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 4aad53c3bd7..485f72e92ce 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -1058,7 +1058,7 @@ macro_rules! concat {} macro_rules! line {} fn main() { - "event 0u32"; + "event 0"; } "##]], @@ -1084,7 +1084,7 @@ fn main() { macro_rules! concat_bytes {} fn main() { - let x = /* error: unexpected token in input */[]; + let x = /* error: unexpected token */b""; } "#]], diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index dc964b3c9a8..d34f0afc3ef 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -24,6 +24,7 @@ use hir_expand::{ span_map::SpanMapRef, InFile, MacroFileId, MacroFileIdExt, }; +use intern::Symbol; use span::Span; use stdx::{format_to, format_to_acc}; use syntax::{ @@ -55,7 +56,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream "# .into(), ProcMacro { - name: "identity_when_valid".into(), + name: Symbol::intern("identity_when_valid"), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), disabled: false, @@ -121,7 +122,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let mut expn_text = String::new(); if let Some(err) = exp.err { - format_to!(expn_text, "/* error: {} */", err); + format_to!(expn_text, "/* error: {} */", err.render_to_string(&db).0); } let (parse, token_map) = exp.value; if expect_errors { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 8e7ef48112f..8825e463363 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -59,14 +59,15 @@ mod tests; use std::ops::Deref; -use base_db::{CrateId, FileId}; +use base_db::CrateId; use hir_expand::{ name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, }; +use intern::Symbol; use itertools::Itertools; use la_arena::Arena; use rustc_hash::{FxHashMap, FxHashSet}; -use span::{Edition, FileAstId, ROOT_ERASED_FILE_AST_ID}; +use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID}; use stdx::format_to; use syntax::{ast, SmolStr}; use triomphe::Arc; @@ -144,15 +145,13 @@ struct DefMapCrateData { /// Side table for resolving derive helpers. exported_derives: FxHashMap<MacroDefId, Box<[Name]>>, fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>, - /// The error that occurred when failing to load the proc-macro dll. - proc_macro_loading_error: Option<Box<str>>, /// Custom attributes registered with `#![register_attr]`. - registered_attrs: Vec<SmolStr>, + registered_attrs: Vec<Symbol>, /// Custom tool modules registered with `#![register_tool]`. - registered_tools: Vec<SmolStr>, + registered_tools: Vec<Symbol>, /// Unstable features of Rust enabled with `#![feature(A, B)]`. - unstable_features: FxHashSet<SmolStr>, + unstable_features: FxHashSet<Symbol>, /// #[rustc_coherence_is_core] rustc_coherence_is_core: bool, no_core: bool, @@ -168,9 +167,8 @@ impl DefMapCrateData { extern_prelude: FxIndexMap::default(), exported_derives: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(), - proc_macro_loading_error: None, registered_attrs: Vec::new(), - registered_tools: PREDEFINED_TOOLS.into(), + registered_tools: PREDEFINED_TOOLS.iter().map(|it| Symbol::intern(it)).collect(), unstable_features: FxHashSet::default(), rustc_coherence_is_core: false, no_core: false, @@ -188,7 +186,6 @@ impl DefMapCrateData { registered_attrs, registered_tools, unstable_features, - proc_macro_loading_error: _, rustc_coherence_is_core: _, no_core: _, no_std: _, @@ -243,14 +240,14 @@ impl std::ops::Index<LocalModuleId> for DefMap { #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum ModuleOrigin { CrateRoot { - definition: FileId, + definition: EditionedFileId, }, /// Note that non-inline modules, by definition, live inside non-macro file. File { is_mod_rs: bool, declaration: FileAstId<ast::Module>, declaration_tree_id: ItemTreeId<Mod>, - definition: FileId, + definition: EditionedFileId, }, Inline { definition_tree_id: ItemTreeId<Mod>, @@ -276,7 +273,7 @@ impl ModuleOrigin { } } - pub fn file_id(&self) -> Option<FileId> { + pub fn file_id(&self) -> Option<EditionedFileId> { match self { ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { Some(*definition) @@ -323,7 +320,7 @@ pub struct ModuleData { /// /// [`None`] for block modules because they are always its `DefMap`'s root. pub parent: Option<LocalModuleId>, - pub children: FxHashMap<Name, LocalModuleId>, + pub children: FxIndexMap<Name, LocalModuleId>, pub scope: ItemScope, } @@ -338,7 +335,7 @@ impl DefMap { let _p = tracing::info_span!("crate_def_map_query", ?name).entered(); let module_data = ModuleData::new( - ModuleOrigin::CrateRoot { definition: krate.root_file_id }, + ModuleOrigin::CrateRoot { definition: krate.root_file_id() }, Visibility::Public, ); @@ -349,7 +346,7 @@ impl DefMap { None, ); let def_map = - collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id.into(), None)); + collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id().into(), None)); Arc::new(def_map) } @@ -432,7 +429,9 @@ impl DefMap { pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ { self.modules .iter() - .filter(move |(_id, data)| data.origin.file_id() == Some(file_id)) + .filter(move |(_id, data)| { + data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) + }) .map(|(id, _data)| id) } @@ -447,15 +446,15 @@ impl DefMap { self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref) } - pub fn registered_tools(&self) -> &[SmolStr] { + pub fn registered_tools(&self) -> &[Symbol] { &self.data.registered_tools } - pub fn registered_attrs(&self) -> &[SmolStr] { + pub fn registered_attrs(&self) -> &[Symbol] { &self.data.registered_attrs } - pub fn is_unstable_feature_enabled(&self, feature: &str) -> bool { + pub fn is_unstable_feature_enabled(&self, feature: &Symbol) -> bool { self.data.unstable_features.contains(feature) } @@ -471,10 +470,6 @@ impl DefMap { self.data.fn_proc_macro_mapping.get(&id).copied() } - pub fn proc_macro_loading_error(&self) -> Option<&str> { - self.data.proc_macro_loading_error.as_deref() - } - pub fn krate(&self) -> CrateId { self.krate } @@ -593,10 +588,8 @@ impl DefMap { self.data.extern_prelude.iter().map(|(name, &def)| (name, def)) } - pub(crate) fn macro_use_prelude( - &self, - ) -> impl Iterator<Item = (&Name, (MacroId, Option<ExternCrateId>))> + '_ { - self.macro_use_prelude.iter().map(|(name, &def)| (name, def)) + pub(crate) fn macro_use_prelude(&self) -> &FxHashMap<Name, (MacroId, Option<ExternCrateId>)> { + &self.macro_use_prelude } pub(crate) fn resolve_path( @@ -668,7 +661,7 @@ impl ModuleData { origin, visibility, parent: None, - children: FxHashMap::default(), + children: Default::default(), scope: ItemScope::default(), } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs index f842027d642..747860fd8e1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs @@ -7,7 +7,7 @@ use hir_expand::{ MacroCallId, MacroCallKind, MacroDefId, }; use span::SyntaxContextId; -use syntax::{ast, SmolStr}; +use syntax::ast; use triomphe::Arc; use crate::{ @@ -79,20 +79,20 @@ impl DefMap { let segments = path.segments(); if let Some(name) = segments.first() { - let name = name.to_smol_str(); - let pred = |n: &_| *n == name; + let name = name.symbol(); + let pred = |n: &_| *n == *name; - let is_tool = self.data.registered_tools.iter().map(SmolStr::as_str).any(pred); + let is_tool = self.data.registered_tools.iter().any(pred); // FIXME: tool modules can be shadowed by actual modules if is_tool { return true; } if segments.len() == 1 { - if find_builtin_attr_idx(&name).is_some() { + if find_builtin_attr_idx(name).is_some() { return true; } - if self.data.registered_attrs.iter().map(SmolStr::as_str).any(pred) { + if self.data.registered_attrs.iter().any(pred) { return true; } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index b5045efb621..483bffc4b29 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -5,24 +5,23 @@ use std::{cmp::Ordering, iter, mem, ops::Not}; -use base_db::{CrateId, CrateOrigin, Dependency, FileId, LangCrateOrigin}; +use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ attrs::{Attr, AttrId}, - builtin_attr_macro::find_builtin_attr, - builtin_derive_macro::find_builtin_derive, - builtin_fn_macro::find_builtin_macro, - name::{name, AsName, Name}, + builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro}, + name::{AsName, Name}, proc_macro::CustomProcMacroExpander, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, + MacroFileIdExt, }; -use intern::Interned; +use intern::{sym, Interned}; use itertools::{izip, Itertools}; use la_arena::Idx; use limit::Limit; use rustc_hash::{FxHashMap, FxHashSet}; -use span::{Edition, ErasedFileAstId, FileAstId, SyntaxContextId}; +use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId}; use syntax::ast; use triomphe::Arc; @@ -31,8 +30,8 @@ use crate::{ db::DefDatabase, item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports}, item_tree::{ - self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, - Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, + self, AttrOwner, ExternCrate, FieldsShape, FileItemTreeId, ImportKind, ItemTree, + ItemTreeId, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, }, macro_call_as_call_id, macro_call_as_call_id_with_eager, nameres::{ @@ -75,31 +74,11 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI } let proc_macros = if krate.is_proc_macro { - match db.proc_macros().get(&def_map.krate) { - Some(Ok(proc_macros)) => Ok(proc_macros - .iter() - .enumerate() - .map(|(idx, it)| { - let name = Name::new_text_dont_use(it.name.clone()); - ( - name, - if !db.expand_proc_attr_macros() { - CustomProcMacroExpander::dummy() - } else if it.disabled { - CustomProcMacroExpander::disabled() - } else { - CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new( - idx as u32, - )) - }, - ) - }) - .collect()), - Some(Err(e)) => Err(e.clone().into_boxed_str()), - None => Err("No proc-macros present for crate".to_owned().into_boxed_str()), - } + db.proc_macros() + .for_crate(def_map.krate, db.syntax_context(tree_id.file_id())) + .unwrap_or_default() } else { - Ok(vec![]) + Default::default() }; let mut collector = DefCollector { @@ -248,10 +227,10 @@ struct DefCollector<'a> { mod_dirs: FxHashMap<LocalModuleId, ModDir>, cfg_options: &'a CfgOptions, /// List of procedural macros defined by this crate. This is read from the dynamic library - /// built by the build system, and is the list of proc. macros we can actually expand. It is - /// empty when proc. macro support is disabled (in which case we still do name resolution for - /// them). - proc_macros: Result<Vec<(Name, CustomProcMacroExpander)>, Box<str>>, + /// built by the build system, and is the list of proc-macros we can actually expand. It is + /// empty when proc-macro support is disabled (in which case we still do name resolution for + /// them). The bool signals whether the proc-macro has been explicitly disabled for name-resolution. + proc_macros: Box<[(Name, CustomProcMacroExpander, bool)]>, is_proc_macro: bool, from_glob_import: PerNsGlobImports, /// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute. @@ -269,15 +248,11 @@ impl DefCollector<'_> { let _p = tracing::info_span!("seed_with_top_level").entered(); let crate_graph = self.db.crate_graph(); - let file_id = crate_graph[self.def_map.krate].root_file_id; + let file_id = crate_graph[self.def_map.krate].root_file_id(); let item_tree = self.db.file_item_tree(file_id.into()); let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); - if let Err(e) = &self.proc_macros { - crate_data.proc_macro_loading_error = Some(e.clone()); - } - let mut process = true; // Process other crate-level attributes. @@ -291,43 +266,43 @@ impl DefCollector<'_> { let Some(attr_name) = attr.path.as_ident() else { continue }; match () { - () if *attr_name == hir_expand::name![recursion_limit] => { + () if *attr_name == sym::recursion_limit.clone() => { if let Some(limit) = attr.string_value() { - if let Ok(limit) = limit.parse() { + if let Ok(limit) = limit.as_str().parse() { crate_data.recursion_limit = Some(limit); } } } - () if *attr_name == hir_expand::name![crate_type] => { - if let Some("proc-macro") = attr.string_value() { + () if *attr_name == sym::crate_type.clone() => { + if attr.string_value() == Some(&sym::proc_dash_macro) { self.is_proc_macro = true; } } - () if *attr_name == hir_expand::name![no_core] => crate_data.no_core = true, - () if *attr_name == hir_expand::name![no_std] => crate_data.no_std = true, - () if attr_name.as_text().as_deref() == Some("rustc_coherence_is_core") => { + () if *attr_name == sym::no_core.clone() => crate_data.no_core = true, + () if *attr_name == sym::no_std.clone() => crate_data.no_std = true, + () if *attr_name == sym::rustc_coherence_is_core.clone() => { crate_data.rustc_coherence_is_core = true; } - () if *attr_name == hir_expand::name![feature] => { + () if *attr_name == sym::feature.clone() => { let features = attr .parse_path_comma_token_tree(self.db.upcast()) .into_iter() .flatten() .filter_map(|(feat, _)| match feat.segments() { - [name] => Some(name.to_smol_str()), + [name] => Some(name.symbol().clone()), _ => None, }); crate_data.unstable_features.extend(features); } - () if *attr_name == hir_expand::name![register_attr] => { + () if *attr_name == sym::register_attr.clone() => { if let Some(ident) = attr.single_ident_value() { - crate_data.registered_attrs.push(ident.text.clone()); + crate_data.registered_attrs.push(ident.sym.clone()); cov_mark::hit!(register_attr); } } - () if *attr_name == hir_expand::name![register_tool] => { + () if *attr_name == sym::register_tool.clone() => { if let Some(ident) = attr.single_ident_value() { - crate_data.registered_tools.push(ident.text.clone()); + crate_data.registered_tools.push(ident.sym.clone()); cov_mark::hit!(register_tool); } } @@ -535,27 +510,30 @@ impl DefCollector<'_> { } let krate = if self.def_map.data.no_std { - name![core] - } else if self.def_map.extern_prelude().any(|(name, _)| *name == name![std]) { - name![std] + Name::new_symbol_root(sym::core.clone()) + } else if self.def_map.extern_prelude().any(|(name, _)| *name == sym::std.clone()) { + Name::new_symbol_root(sym::std.clone()) } else { // If `std` does not exist for some reason, fall back to core. This mostly helps // keep r-a's own tests minimal. - name![core] + Name::new_symbol_root(sym::core.clone()) }; let edition = match self.def_map.data.edition { - Edition::Edition2015 => name![rust_2015], - Edition::Edition2018 => name![rust_2018], - Edition::Edition2021 => name![rust_2021], - Edition::Edition2024 => name![rust_2024], + Edition::Edition2015 => Name::new_symbol_root(sym::rust_2015.clone()), + Edition::Edition2018 => Name::new_symbol_root(sym::rust_2018.clone()), + Edition::Edition2021 => Name::new_symbol_root(sym::rust_2021.clone()), + Edition::Edition2024 => Name::new_symbol_root(sym::rust_2024.clone()), }; let path_kind = match self.def_map.data.edition { Edition::Edition2015 => PathKind::Plain, _ => PathKind::Abs, }; - let path = ModPath::from_segments(path_kind, [krate, name![prelude], edition]); + let path = ModPath::from_segments( + path_kind, + [krate, Name::new_symbol_root(sym::prelude.clone()), edition], + ); let (per_ns, _) = self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None); @@ -601,11 +579,17 @@ impl DefCollector<'_> { fn_id: FunctionId, ) { let kind = def.kind.to_basedb_kind(); - let (expander, kind) = - match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { - Ok(Some(&(_, expander))) => (expander, kind), - _ => (CustomProcMacroExpander::dummy(), kind), - }; + let (expander, kind) = match self.proc_macros.iter().find(|(n, _, _)| n == &def.name) { + Some(_) + if kind == hir_expand::proc_macro::ProcMacroKind::Attr + && !self.db.expand_proc_attr_macros() => + { + (CustomProcMacroExpander::disabled_proc_attr(), kind) + } + Some(&(_, _, true)) => (CustomProcMacroExpander::disabled(), kind), + Some(&(_, expander, false)) => (expander, kind), + None => (CustomProcMacroExpander::missing_expander(), kind), + }; let proc_macro_id = ProcMacroLoc { container: self.def_map.crate_root(), @@ -838,7 +822,7 @@ impl DefCollector<'_> { } fn resolve_extern_crate(&self, name: &Name) -> Option<CrateRootModuleId> { - if *name == name![self] { + if *name == sym::self_.clone() { cov_mark::hit!(extern_crate_self_as); Some(self.def_map.crate_root()) } else { @@ -1018,7 +1002,7 @@ impl DefCollector<'_> { fn update_recursive( &mut self, - // The module for which `resolutions` have been resolve + // The module for which `resolutions` have been resolved. module_id: LocalModuleId, resolutions: &[(Option<Name>, PerNs)], // All resolutions are imported with this visibility; the visibilities in @@ -1036,10 +1020,9 @@ impl DefCollector<'_> { for (name, res) in resolutions { match name { Some(name) => { - let scope = &mut self.def_map.modules[module_id].scope; - changed |= scope.push_res_with_import( - &mut self.from_glob_import, - (module_id, name.clone()), + changed |= self.push_res_and_update_glob_vis( + module_id, + name, res.with_visibility(vis), import, ); @@ -1105,6 +1088,84 @@ impl DefCollector<'_> { } } + fn push_res_and_update_glob_vis( + &mut self, + module_id: LocalModuleId, + name: &Name, + mut defs: PerNs, + def_import_type: Option<ImportType>, + ) -> bool { + let mut changed = false; + + if let Some(ImportType::Glob(_)) = def_import_type { + let prev_defs = self.def_map[module_id].scope.get(name); + + // Multiple globs may import the same item and they may override visibility from + // previously resolved globs. Handle overrides here and leave the rest to + // `ItemScope::push_res_with_import()`. + if let Some((def, def_vis, _)) = defs.types { + if let Some((prev_def, prev_vis, _)) = prev_defs.types { + if def == prev_def + && self.from_glob_import.contains_type(module_id, name.clone()) + && def_vis != prev_vis + && def_vis.max(prev_vis, &self.def_map) == Some(def_vis) + { + changed = true; + // This import is being handled here, don't pass it down to + // `ItemScope::push_res_with_import()`. + defs.types = None; + self.def_map.modules[module_id] + .scope + .update_visibility_types(name, def_vis); + } + } + } + + if let Some((def, def_vis, _)) = defs.values { + if let Some((prev_def, prev_vis, _)) = prev_defs.values { + if def == prev_def + && self.from_glob_import.contains_value(module_id, name.clone()) + && def_vis != prev_vis + && def_vis.max(prev_vis, &self.def_map) == Some(def_vis) + { + changed = true; + // See comment above. + defs.values = None; + self.def_map.modules[module_id] + .scope + .update_visibility_values(name, def_vis); + } + } + } + + if let Some((def, def_vis, _)) = defs.macros { + if let Some((prev_def, prev_vis, _)) = prev_defs.macros { + if def == prev_def + && self.from_glob_import.contains_macro(module_id, name.clone()) + && def_vis != prev_vis + && def_vis.max(prev_vis, &self.def_map) == Some(def_vis) + { + changed = true; + // See comment above. + defs.macros = None; + self.def_map.modules[module_id] + .scope + .update_visibility_macros(name, def_vis); + } + } + } + } + + changed |= self.def_map.modules[module_id].scope.push_res_with_import( + &mut self.from_glob_import, + (module_id, name.clone()), + defs, + def_import_type, + ); + + changed + } + fn resolve_macros(&mut self) -> ReachedFixedPoint { let mut macros = mem::take(&mut self.unresolved_macros); let mut resolved = Vec::new(); @@ -1331,25 +1392,23 @@ impl DefCollector<'_> { return recollect_without(self); } - let call_id = call_id(); if let MacroDefKind::ProcMacro(_, exp, _) = def.kind { // If there's no expander for the proc macro (e.g. // because proc macros are disabled, or building the // proc macro crate failed), report this and skip // expansion like we would if it was disabled - if exp.is_dummy() { - self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + if let Some(err) = exp.as_expand_error(def.krate) { + self.def_map.diagnostics.push(DefDiagnostic::macro_error( directive.module_id, - self.db.lookup_intern_macro_call(call_id).kind, - def.krate, + ast_id, + (**path).clone(), + err, )); return recollect_without(self); } - if exp.is_disabled() { - return recollect_without(self); - } } + let call_id = call_id(); self.def_map.modules[directive.module_id] .scope .add_attr_macro_invoc(ast_id, call_id); @@ -1388,10 +1447,14 @@ impl DefCollector<'_> { } let file_id = macro_call_id.as_file(); - // Then, fetch and process the item tree. This will reuse the expansion result from above. let item_tree = self.db.file_item_tree(file_id); - let mod_dir = self.mod_dirs[&module_id].clone(); + let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db.upcast()) { + ModDir::root() + } else { + self.mod_dirs[&module_id].clone() + }; + ModCollector { def_collector: &mut *self, macro_depth: depth, @@ -1568,10 +1631,7 @@ impl ModCollector<'_, '_> { let attrs = self.item_tree.attrs(db, krate, item.into()); if let Some(cfg) = attrs.cfg() { if !self.is_cfg_enabled(&cfg) { - self.emit_unconfigured_diagnostic( - InFile::new(self.file_id(), item.ast_id(self.item_tree).erase()), - &cfg, - ); + self.emit_unconfigured_diagnostic(self.tree_id, item.into(), &cfg); return; } } @@ -1593,7 +1653,7 @@ impl ModCollector<'_, '_> { id: ItemTreeId::new(self.tree_id, item_tree_id), } .intern(db); - let is_prelude = attrs.by_key("prelude_import").exists(); + let is_prelude = attrs.by_key(&sym::prelude_import).exists(); Import::from_use( self.item_tree, ItemTreeId::new(self.tree_id, item_tree_id), @@ -1618,7 +1678,7 @@ impl ModCollector<'_, '_> { self.process_macro_use_extern_crate( item_tree_id, id, - attrs.by_key("macro_use").attrs(), + attrs.by_key(&sym::macro_use).attrs(), ); } @@ -1687,7 +1747,7 @@ impl ModCollector<'_, '_> { .into(), &it.name, vis, - !matches!(it.fields, Fields::Record(_)), + !matches!(it.shape, FieldsShape::Record), ); } ModItem::Union(id) => { @@ -1725,10 +1785,8 @@ impl ModCollector<'_, '_> { match is_enabled { Err(cfg) => { self.emit_unconfigured_diagnostic( - InFile::new( - self.file_id(), - self.item_tree[variant.index()].ast_id.erase(), - ), + self.tree_id, + variant.into(), &cfg, ); None @@ -1891,8 +1949,8 @@ impl ModCollector<'_, '_> { } fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) { - let path_attr = attrs.by_key("path").string_value_unescape(); - let is_macro_use = attrs.by_key("macro_use").exists(); + let path_attr = attrs.by_key(&sym::path).string_value_unescape(); + let is_macro_use = attrs.by_key(&sym::macro_use).exists(); let module = &self.item_tree[module_id]; match &module.kind { // inline module, just recurse @@ -1944,7 +2002,8 @@ impl ModCollector<'_, '_> { match is_enabled { Err(cfg) => { self.emit_unconfigured_diagnostic( - ast_id.map(|it| it.erase()), + self.tree_id, + AttrOwner::TopLevel, &cfg, ); } @@ -1968,7 +2027,7 @@ impl ModCollector<'_, '_> { let is_macro_use = is_macro_use || item_tree .top_level_attrs(db, krate) - .by_key("macro_use") + .by_key(&sym::macro_use) .exists(); if is_macro_use { self.import_all_legacy_macros(module_id); @@ -1997,7 +2056,7 @@ impl ModCollector<'_, '_> { &mut self, name: Name, declaration: FileAstId<ast::Module>, - definition: Option<(FileId, bool)>, + definition: Option<(EditionedFileId, bool)>, visibility: &crate::visibility::RawVisibility, mod_tree_id: FileItemTreeId<Mod>, ) -> LocalModuleId { @@ -2118,14 +2177,12 @@ impl ModCollector<'_, '_> { let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into()); let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast()); - let export_attr = attrs.by_key("macro_export"); + let export_attr = attrs.by_key(&sym::macro_export); let is_export = export_attr.exists(); let local_inner = if is_export { export_attr.tt_values().flat_map(|it| it.token_trees.iter()).any(|it| match it { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - ident.text.contains("local_inner_macros") - } + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.sym == sym::local_inner_macros, _ => false, }) } else { @@ -2133,17 +2190,17 @@ impl ModCollector<'_, '_> { }; // Case 1: builtin macros - let expander = if attrs.by_key("rustc_builtin_macro").exists() { + let expander = if attrs.by_key(&sym::rustc_builtin_macro).exists() { // `#[rustc_builtin_macro = "builtin_name"]` overrides the `macro_rules!` name. let name; - let name = match attrs.by_key("rustc_builtin_macro").string_value() { - Some(it) => { - name = Name::new_text_dont_use(it.into()); + let name = match attrs.by_key(&sym::rustc_builtin_macro).string_value_with_span() { + Some((it, span)) => { + name = Name::new_symbol(it.clone(), span.ctx); &name } None => { let explicit_name = - attrs.by_key("rustc_builtin_macro").tt_values().next().and_then(|tt| { + attrs.by_key(&sym::rustc_builtin_macro).tt_values().next().and_then(|tt| { match tt.token_trees.first() { Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name), _ => None, @@ -2173,7 +2230,7 @@ impl ModCollector<'_, '_> { // Case 2: normal `macro_rules!` macro MacroExpander::Declarative }; - let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists(); + let allow_internal_unsafe = attrs.by_key(&sym::allow_internal_unsafe).exists(); let mut flags = MacroRulesLocFlags::empty(); flags.set(MacroRulesLocFlags::LOCAL_INNER, local_inner); @@ -2203,14 +2260,14 @@ impl ModCollector<'_, '_> { // Case 1: builtin macros let mut helpers_opt = None; let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into()); - let expander = if attrs.by_key("rustc_builtin_macro").exists() { + let expander = if attrs.by_key(&sym::rustc_builtin_macro).exists() { if let Some(expander) = find_builtin_macro(&mac.name) { match expander { Either::Left(it) => MacroExpander::BuiltIn(it), Either::Right(it) => MacroExpander::BuiltInEager(it), } } else if let Some(expander) = find_builtin_derive(&mac.name) { - if let Some(attr) = attrs.by_key("rustc_builtin_macro").tt_values().next() { + if let Some(attr) = attrs.by_key(&sym::rustc_builtin_macro).tt_values().next() { // NOTE: The item *may* have both `#[rustc_builtin_macro]` and `#[proc_macro_derive]`, // in which case rustc ignores the helper attributes from the latter, but it // "doesn't make sense in practice" (see rust-lang/rust#87027). @@ -2243,7 +2300,7 @@ impl ModCollector<'_, '_> { // Case 2: normal `macro` MacroExpander::Declarative }; - let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists(); + let allow_internal_unsafe = attrs.by_key(&sym::allow_internal_unsafe).exists(); let macro_id = Macro2Loc { container: module, @@ -2392,10 +2449,11 @@ impl ModCollector<'_, '_> { self.def_collector.cfg_options.check(cfg) != Some(false) } - fn emit_unconfigured_diagnostic(&mut self, ast_id: InFile<ErasedFileAstId>, cfg: &CfgExpr) { + fn emit_unconfigured_diagnostic(&mut self, tree_id: TreeId, item: AttrOwner, cfg: &CfgExpr) { self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id, - ast_id, + tree_id, + item, cfg.clone(), self.def_collector.cfg_options.clone(), )); @@ -2426,7 +2484,7 @@ mod tests { unresolved_macros: Vec::new(), mod_dirs: FxHashMap::default(), cfg_options: &CfgOptions::default(), - proc_macros: Ok(vec![]), + proc_macros: Default::default(), from_glob_import: Default::default(), skip_attrs: Default::default(), is_proc_macro: false, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs index 4ab53d20b57..bc1617c55b0 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs @@ -2,14 +2,13 @@ use std::ops::Not; -use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; -use hir_expand::{attrs::AttrId, ErasedAstId, MacroCallKind}; +use hir_expand::{attrs::AttrId, ExpandErrorKind, MacroCallKind}; use la_arena::Idx; use syntax::ast; use crate::{ - item_tree::{self, ItemTreeId}, + item_tree::{self, AttrOwner, ItemTreeId, TreeId}, nameres::LocalModuleId, path::ModPath, AstId, @@ -17,47 +16,16 @@ use crate::{ #[derive(Debug, PartialEq, Eq)] pub enum DefDiagnosticKind { - UnresolvedModule { - ast: AstId<ast::Module>, - candidates: Box<[String]>, - }, - UnresolvedExternCrate { - ast: AstId<ast::ExternCrate>, - }, - UnresolvedImport { - id: ItemTreeId<item_tree::Use>, - index: Idx<ast::UseTree>, - }, - UnconfiguredCode { - ast: ErasedAstId, - cfg: CfgExpr, - opts: CfgOptions, - }, - /// A proc-macro that is lacking an expander, this might be due to build scripts not yet having - /// run or proc-macro expansion being disabled. - UnresolvedProcMacro { - ast: MacroCallKind, - krate: CrateId, - }, - UnresolvedMacroCall { - ast: MacroCallKind, - path: ModPath, - }, - UnimplementedBuiltinMacro { - ast: AstId<ast::Macro>, - }, - InvalidDeriveTarget { - ast: AstId<ast::Item>, - id: usize, - }, - MalformedDerive { - ast: AstId<ast::Adt>, - id: usize, - }, - MacroDefError { - ast: AstId<ast::Macro>, - message: String, - }, + UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> }, + UnresolvedExternCrate { ast: AstId<ast::ExternCrate> }, + UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> }, + UnconfiguredCode { tree: TreeId, item: AttrOwner, cfg: CfgExpr, opts: CfgOptions }, + UnresolvedMacroCall { ast: MacroCallKind, path: ModPath }, + UnimplementedBuiltinMacro { ast: AstId<ast::Macro> }, + InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize }, + MalformedDerive { ast: AstId<ast::Adt>, id: usize }, + MacroDefError { ast: AstId<ast::Macro>, message: String }, + MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind }, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -114,21 +82,26 @@ impl DefDiagnostic { Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } } } - pub fn unconfigured_code( + pub fn macro_error( container: LocalModuleId, - ast: ErasedAstId, - cfg: CfgExpr, - opts: CfgOptions, + ast: AstId<ast::Item>, + path: ModPath, + err: ExpandErrorKind, ) -> Self { - Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } } + Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, path, err } } } - pub fn unresolved_proc_macro( + pub fn unconfigured_code( container: LocalModuleId, - ast: MacroCallKind, - krate: CrateId, + tree: TreeId, + item: AttrOwner, + cfg: CfgExpr, + opts: CfgOptions, ) -> Self { - Self { in_module: container, kind: DefDiagnosticKind::UnresolvedProcMacro { ast, krate } } + Self { + in_module: container, + kind: DefDiagnosticKind::UnconfiguredCode { tree, item, cfg, opts }, + } } // FIXME: Whats the difference between this and unresolved_proc_macro diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs index 696fb6a961c..ab4ffbb2c1e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,8 +1,10 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; -use base_db::{AnchoredPath, FileId}; -use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt}; +use base_db::AnchoredPath; +use hir_expand::{name::Name, HirFileIdExt}; use limit::Limit; +use span::EditionedFileId; +use syntax::ToSmolStr as _; use crate::{db::DefDatabase, HirFileId}; @@ -33,7 +35,7 @@ impl ModDir { let path = match attr_path { None => { let mut path = self.dir_path.clone(); - path.push(&name.unescaped().to_smol_str()); + path.push(&name.unescaped().display_no_db().to_smolstr()); path } Some(attr_path) => { @@ -63,7 +65,7 @@ impl ModDir { file_id: HirFileId, name: &Name, attr_path: Option<&str>, - ) -> Result<(FileId, bool, ModDir), Box<[String]>> { + ) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> { let name = name.unescaped(); let mut candidate_files = ArrayVec::<_, 2>::new(); @@ -71,10 +73,6 @@ impl ModDir { Some(attr_path) => { candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) } - None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => { - candidate_files.push(format!("{}.rs", name.display(db.upcast()))); - candidate_files.push(format!("{}/mod.rs", name.display(db.upcast()))); - } None => { candidate_files.push(format!( "{}{}.rs", @@ -91,7 +89,7 @@ impl ModDir { let orig_file_id = file_id.original_file_respecting_includes(db.upcast()); for candidate in candidate_files.iter() { - let path = AnchoredPath { anchor: orig_file_id, path: candidate.as_str() }; + let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() }; if let Some(file_id) = db.resolve_path(path) { let is_mod_rs = candidate.ends_with("/mod.rs"); @@ -102,7 +100,12 @@ impl ModDir { DirPath::new(format!("{}/", name.display(db.upcast()))) }; if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) { - return Ok((file_id, is_mod_rs, mod_dir)); + return Ok(( + // FIXME: Edition, is this rightr? + EditionedFileId::new(file_id, orig_file_id.edition()), + is_mod_rs, + mod_dir, + )); } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs index e797d19223e..75cab137f78 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs @@ -17,7 +17,7 @@ use triomphe::Arc; use crate::{ db::DefDatabase, item_scope::{ImportOrExternCrate, BUILTIN_SCOPE}, - item_tree::Fields, + item_tree::FieldsShape, nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs}, path::{ModPath, PathKind}, per_ns::PerNs, @@ -381,11 +381,11 @@ impl DefMap { .iter() .find_map(|&variant| { let variant_data = &tree[variant.lookup(db).id.value]; - (variant_data.name == *segment).then(|| match variant_data.fields { - Fields::Record(_) => { + (variant_data.name == *segment).then(|| match variant_data.shape { + FieldsShape::Record => { PerNs::types(variant.into(), Visibility::Public, None) } - Fields::Tuple(_) | Fields::Unit => PerNs::both( + FieldsShape::Tuple | FieldsShape::Unit => PerNs::both( variant.into(), variant.into(), Visibility::Public, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs index 5052708dc93..fd0b52bc7d7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs @@ -1,6 +1,7 @@ //! Nameres-specific procedural macro data and helpers. use hir_expand::name::{AsName, Name}; +use intern::sym; use crate::attr::Attrs; use crate::tt::{Leaf, TokenTree}; @@ -35,8 +36,8 @@ impl Attrs { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang }) } else if self.is_proc_macro_attribute() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr }) - } else if self.by_key("proc_macro_derive").exists() { - let derive = self.by_key("proc_macro_derive").tt_values().next()?; + } else if self.by_key(&sym::proc_macro_derive).exists() { + let derive = self.by_key(&sym::proc_macro_derive).tt_values().next()?; let def = parse_macro_name_and_helper_attrs(&derive.token_trees) .map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } }); @@ -67,7 +68,7 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Nam TokenTree::Leaf(Leaf::Punct(comma)), TokenTree::Leaf(Leaf::Ident(attributes)), TokenTree::Subtree(helpers) - ] if comma.char == ',' && attributes.text == "attributes" => + ] if comma.char == ',' && attributes.sym == sym::attributes => { let helpers = helpers .token_trees diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs index 1ca74b5da6b..a2696055ca1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs @@ -367,3 +367,48 @@ use event::Event; "#]], ); } + +#[test] +fn glob_may_override_visibility() { + check( + r#" +mod reexport { + use crate::defs::*; + mod inner { + pub use crate::defs::{Trait, function, makro}; + } + pub use inner::*; +} +mod defs { + pub trait Trait {} + pub fn function() {} + pub macro makro($t:item) { $t } +} +use reexport::*; +"#, + expect![[r#" + crate + Trait: t + defs: t + function: v + makro: m + reexport: t + + crate::defs + Trait: t + function: v + makro: m + + crate::reexport + Trait: t + function: v + inner: t + makro: m + + crate::reexport::inner + Trait: ti + function: vi + makro: mi + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index be41634eb57..e82af318501 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -16,7 +16,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: }); assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") } - db.set_file_text(pos.file_id, ra_fixture_change); + db.set_file_text(pos.file_id.file_id(), ra_fixture_change); { let events = db.log_executed(|| { @@ -266,7 +266,7 @@ fn quux() { 92 } m!(Y); m!(Z); "#; - db.set_file_text(pos.file_id, new_text); + db.set_file_text(pos.file_id.file_id(), new_text); { let events = db.log_executed(|| { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs index d278b75e815..390c934f6da 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs @@ -1310,6 +1310,116 @@ pub mod ip_address { } #[test] +fn include_with_submod_file() { + check( + r#" +//- minicore: include +//- /lib.rs +include!("out_dir/includes.rs"); + +//- /out_dir/includes.rs +pub mod company_name { + pub mod network { + pub mod v1; + } +} +//- /out_dir/company_name/network/v1.rs +pub struct IpAddress { + pub ip_type: &'static str, +} +/// Nested message and enum types in `IpAddress`. +pub mod ip_address { + pub enum IpType { + IpV4(u32), + } +} + +"#, + expect![[r#" + crate + company_name: t + + crate::company_name + network: t + + crate::company_name::network + v1: t + + crate::company_name::network::v1 + IpAddress: t + ip_address: t + + crate::company_name::network::v1::ip_address + IpType: t + "#]], + ); +} + +#[test] +fn include_many_mods() { + check( + r#" +//- /lib.rs +#[rustc_builtin_macro] +macro_rules! include { () => {} } + +mod nested { + include!("out_dir/includes.rs"); + + mod different_company { + include!("out_dir/different_company/mod.rs"); + } + + mod util; +} + +//- /nested/util.rs +pub struct Helper {} +//- /out_dir/includes.rs +pub mod company_name { + pub mod network { + pub mod v1; + } +} +//- /out_dir/company_name/network/v1.rs +pub struct IpAddress {} +//- /out_dir/different_company/mod.rs +pub mod network; +//- /out_dir/different_company/network.rs +pub struct Url {} + +"#, + expect![[r#" + crate + nested: t + + crate::nested + company_name: t + different_company: t + util: t + + crate::nested::company_name + network: t + + crate::nested::company_name::network + v1: t + + crate::nested::company_name::network::v1 + IpAddress: t + + crate::nested::different_company + network: t + + crate::nested::different_company::network + Url: t + + crate::nested::util + Helper: t + "#]], + ); +} + +#[test] fn macro_use_imports_all_macro_types() { let db = TestDB::with_files( r#" diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs index ff5d39cf53d..f90bc954a9b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs @@ -13,7 +13,7 @@ use crate::{ }; use hir_expand::name::Name; use intern::Interned; -use syntax::ast; +use syntax::{ast, ToSmolStr}; pub use hir_expand::mod_path::{path, ModPath, PathKind}; @@ -29,7 +29,7 @@ impl Display for ImportAlias { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ImportAlias::Underscore => f.write_str("_"), - ImportAlias::Alias(name) => f.write_str(&name.to_smol_str()), + ImportAlias::Alias(name) => f.write_str(&name.display_no_db().to_smolstr()), } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs index cee9e055459..7c39773aa68 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs @@ -6,9 +6,9 @@ use crate::{lower::LowerCtx, type_ref::ConstRef}; use hir_expand::{ mod_path::resolve_crate_root, - name::{name, AsName}, + name::{AsName, Name}, }; -use intern::Interned; +use intern::{sym, Interned}; use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds}; use crate::{ @@ -60,7 +60,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path segments.push(name); } ast::PathSegmentKind::SelfTypeKw => { - segments.push(name![Self]); + segments.push(Name::new_symbol_root(sym::Self_.clone())); } ast::PathSegmentKind::Type { type_ref, trait_ref } => { assert!(path.qualifier().is_none()); // this can only occur at the first segment @@ -268,7 +268,7 @@ fn lower_generic_args_from_fn_path( let bindings = if let Some(ret_type) = ret_type { let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty()); Box::new([AssociatedTypeBinding { - name: name![Output], + name: Name::new_symbol_root(sym::Output.clone()), args: None, type_ref: Some(type_ref), bounds: Box::default(), @@ -277,7 +277,7 @@ fn lower_generic_args_from_fn_path( // -> () let type_ref = TypeRef::Tuple(Vec::new()); Box::new([AssociatedTypeBinding { - name: name![Output], + name: Name::new_symbol_root(sym::Output.clone()), args: None, type_ref: Some(type_ref), bounds: Box::default(), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs index d08e063976a..3ee88b536fc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs @@ -200,7 +200,7 @@ pub(crate) fn print_type_ref( } if let Some(abi) = abi { buf.write_str("extern ")?; - buf.write_str(abi)?; + buf.write_str(abi.as_str())?; buf.write_char(' ')?; } write!(buf, "fn(")?; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index e5c1f93bbde..f0f2210ec2c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -2,11 +2,9 @@ use std::{fmt, iter, mem}; use base_db::CrateId; -use hir_expand::{ - name::{name, Name}, - MacroDefId, -}; -use intern::Interned; +use hir_expand::{name::Name, MacroDefId}; +use intern::{sym, Interned}; +use itertools::Itertools as _; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; use triomphe::Arc; @@ -197,12 +195,12 @@ impl Resolver { } } &Scope::ImplDefScope(impl_) => { - if first_name == &name![Self] { + if *first_name == sym::Self_.clone() { return Some((TypeNs::SelfType(impl_), remaining_idx(), None)); } } &Scope::AdtScope(adt) => { - if first_name == &name![Self] { + if *first_name == sym::Self_.clone() { return Some((TypeNs::AdtSelfType(adt), remaining_idx(), None)); } } @@ -294,7 +292,7 @@ impl Resolver { } }; let n_segments = path.segments().len(); - let tmp = name![self]; + let tmp = Name::new_symbol_root(sym::self_.clone()); let first_name = if path.is_self() { &tmp } else { path.segments().first()? }; let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); if skip_to_mod { @@ -325,7 +323,7 @@ impl Resolver { } } &Scope::ImplDefScope(impl_) => { - if first_name == &name![Self] { + if *first_name == sym::Self_.clone() { return Some(ResolveValueResult::ValueNs( ValueNs::ImplSelf(impl_), None, @@ -352,7 +350,7 @@ impl Resolver { } } &Scope::ImplDefScope(impl_) => { - if first_name == &name![Self] { + if *first_name == sym::Self_.clone() { return Some(ResolveValueResult::Partial( TypeNs::SelfType(impl_), 1, @@ -361,7 +359,7 @@ impl Resolver { } } Scope::AdtScope(adt) => { - if first_name == &name![Self] { + if *first_name == sym::Self_.clone() { let ty = TypeNs::AdtSelfType(*adt); return Some(ResolveValueResult::Partial(ty, 1, None)); } @@ -425,7 +423,7 @@ impl Resolver { } pub fn resolve_lifetime(&self, lifetime: &LifetimeRef) -> Option<LifetimeNs> { - if lifetime.name == name::known::STATIC_LIFETIME { + if lifetime.name == sym::tick_static.clone() { return Some(LifetimeNs::Static); } @@ -500,9 +498,11 @@ impl Resolver { res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))); }) }); - def_map.macro_use_prelude().for_each(|(name, (def, _extern_crate))| { - res.add(name, ScopeDef::ModuleDef(def.into())); - }); + def_map.macro_use_prelude().iter().sorted_by_key(|&(k, _)| k.clone()).for_each( + |(name, &(def, _extern_crate))| { + res.add(name, ScopeDef::ModuleDef(def.into())); + }, + ); def_map.extern_prelude().for_each(|(name, (def, _extern_crate))| { res.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def.into()))); }); @@ -781,10 +781,10 @@ impl Scope { } } Scope::ImplDefScope(i) => { - acc.add(&name![Self], ScopeDef::ImplSelfType(*i)); + acc.add(&Name::new_symbol_root(sym::Self_.clone()), ScopeDef::ImplSelfType(*i)); } Scope::AdtScope(i) => { - acc.add(&name![Self], ScopeDef::AdtSelfType(*i)); + acc.add(&Name::new_symbol_root(sym::Self_.clone()), ScopeDef::AdtSelfType(*i)); } Scope::ExprScope(scope) => { if let Some((label, name)) = scope.expr_scopes.label(scope.scope_id) { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs index a0d2079e0d4..c7ebfeecf51 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs @@ -6,9 +6,10 @@ use la_arena::ArenaMap; use syntax::{ast, AstNode, AstPtr}; use crate::{ - data::adt::lower_struct, db::DefDatabase, item_tree::ItemTreeNode, trace::Trace, GenericDefId, - ItemTreeLoc, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, UseId, - VariantId, + db::DefDatabase, + item_tree::{AttrOwner, FieldParent, ItemTreeNode}, + GenericDefId, ItemTreeLoc, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, + UseId, VariantId, }; pub trait HasSource { @@ -124,13 +125,13 @@ impl HasChildSource<LocalFieldId> for VariantId { fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> { let item_tree; - let (src, fields, container) = match *self { + let (src, parent, container) = match *self { VariantId::EnumVariantId(it) => { let lookup = it.lookup(db); item_tree = lookup.id.item_tree(db); ( lookup.source(db).map(|it| it.kind()), - &item_tree[lookup.id.value].fields, + FieldParent::Variant(lookup.id.value), lookup.parent.lookup(db).container, ) } @@ -139,7 +140,7 @@ impl HasChildSource<LocalFieldId> for VariantId { item_tree = lookup.id.item_tree(db); ( lookup.source(db).map(|it| it.kind()), - &item_tree[lookup.id.value].fields, + FieldParent::Struct(lookup.id.value), lookup.container, ) } @@ -148,13 +149,54 @@ impl HasChildSource<LocalFieldId> for VariantId { item_tree = lookup.id.item_tree(db); ( lookup.source(db).map(|it| it.kind()), - &item_tree[lookup.id.value].fields, + FieldParent::Union(lookup.id.value), lookup.container, ) } }; - let mut trace = Trace::new_for_map(); - lower_struct(db, &mut trace, &src, container.krate, &item_tree, fields); - src.with_value(trace.into_map()) + + let mut map = ArenaMap::new(); + match &src.value { + ast::StructKind::Tuple(fl) => { + let cfg_options = &db.crate_graph()[container.krate].cfg_options; + let mut idx = 0; + for (i, fd) in fl.fields().enumerate() { + let attrs = item_tree.attrs( + db, + container.krate, + AttrOwner::make_field_indexed(parent, i), + ); + if !attrs.is_cfg_enabled(cfg_options) { + continue; + } + map.insert( + LocalFieldId::from_raw(la_arena::RawIdx::from(idx)), + Either::Left(fd.clone()), + ); + idx += 1; + } + } + ast::StructKind::Record(fl) => { + let cfg_options = &db.crate_graph()[container.krate].cfg_options; + let mut idx = 0; + for (i, fd) in fl.fields().enumerate() { + let attrs = item_tree.attrs( + db, + container.krate, + AttrOwner::make_field_indexed(parent, i), + ); + if !attrs.is_cfg_enabled(cfg_options) { + continue; + } + map.insert( + LocalFieldId::from_raw(la_arena::RawIdx::from(idx)), + Either::Right(fd.clone()), + ); + idx += 1; + } + } + _ => (), + } + InFile::new(src.file_id, map) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index 9edb03c7cab..f44472eae5b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -4,10 +4,10 @@ use std::{fmt, panic, sync::Mutex}; use base_db::{ salsa::{self, Durability}, - AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase, - Upcast, + AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, }; -use hir_expand::{db::ExpandDatabase, InFile}; +use hir_expand::{db::ExpandDatabase, files::FilePosition, InFile}; +use span::{EditionedFileId, FileId}; use syntax::{algo, ast, AstNode}; use triomphe::Arc; @@ -85,7 +85,7 @@ impl TestDB { for &krate in self.relevant_crates(file_id).iter() { let crate_def_map = self.crate_def_map(krate); for (local_id, data) in crate_def_map.modules() { - if data.origin.file_id() == Some(file_id) { + if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) { return crate_def_map.module_id(local_id); } } @@ -94,7 +94,7 @@ impl TestDB { } pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId { - let file_module = self.module_for_file(position.file_id); + let file_module = self.module_for_file(position.file_id.file_id()); let mut def_map = file_module.def_map(self); let module = self.mod_at_position(&def_map, position); @@ -122,7 +122,7 @@ impl TestDB { let mut res = DefMap::ROOT; for (module, data) in def_map.modules() { let src = data.definition_source(self); - if src.file_id != position.file_id.into() { + if src.file_id != position.file_id { continue; } @@ -148,7 +148,6 @@ impl TestDB { }; if size != Some(new_size) { - cov_mark::hit!(submodule_in_testdb); size = Some(new_size); res = module; } @@ -163,7 +162,7 @@ impl TestDB { let mut fn_def = None; for (_, module) in def_map.modules() { let file_id = module.definition_source(self).file_id; - if file_id != position.file_id.into() { + if file_id != position.file_id { continue; } for decl in module.scope.declarations() { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/trace.rs b/src/tools/rust-analyzer/crates/hir-def/src/trace.rs deleted file mode 100644 index da50ee8dc7a..00000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/trace.rs +++ /dev/null @@ -1,55 +0,0 @@ -//! Trace is a pretty niche data structure which is used when lowering a CST -//! into HIR. -//! -//! Lowering process calculates two bits of information: -//! * the lowered syntax itself -//! * a mapping between lowered syntax and original syntax -//! -//! Due to the way salsa works, the mapping is usually hot lava, as it contains -//! absolute offsets. The `Trace` structure (inspired, at least in name, by -//! Kotlin's `BindingTrace`) allows use the same code to compute both -//! projections. -use la_arena::{Arena, ArenaMap, Idx, RawIdx}; - -// FIXME: This isn't really used anymore, at least not in a way where it does anything useful. -// Check if we should get rid of this or make proper use of it instead. -pub(crate) struct Trace<T, V> { - arena: Option<Arena<T>>, - map: Option<ArenaMap<Idx<T>, V>>, - len: u32, -} - -impl<T, V> Trace<T, V> { - #[allow(dead_code)] - pub(crate) fn new_for_arena() -> Trace<T, V> { - Trace { arena: Some(Arena::default()), map: None, len: 0 } - } - - pub(crate) fn new_for_map() -> Trace<T, V> { - Trace { arena: None, map: Some(ArenaMap::default()), len: 0 } - } - - pub(crate) fn alloc(&mut self, value: impl FnOnce() -> V, data: impl FnOnce() -> T) -> Idx<T> { - let id = if let Some(arena) = &mut self.arena { - arena.alloc(data()) - } else { - let id = Idx::<T>::from_raw(RawIdx::from(self.len)); - self.len += 1; - id - }; - - if let Some(map) = &mut self.map { - map.insert(id, value()); - } - id - } - - #[allow(dead_code)] - pub(crate) fn into_arena(mut self) -> Arena<T> { - self.arena.take().unwrap() - } - - pub(crate) fn into_map(mut self) -> ArenaMap<Idx<T>, V> { - self.map.take().unwrap() - } -} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index db0feb055e1..777e4154186 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -4,19 +4,24 @@ use std::{borrow::Cow, fmt, ops}; use base_db::CrateId; use cfg::CfgExpr; use either::Either; -use intern::Interned; -use mbe::{syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode, Punct}; +use intern::{sym, Interned, Symbol}; + +use mbe::{ + desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode, + Punct, +}; use smallvec::{smallvec, SmallVec}; use span::{Span, SyntaxContextId}; use syntax::unescape; -use syntax::{ast, format_smolstr, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; +use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode}; use triomphe::ThinArc; +use crate::name::Name; use crate::{ db::ExpandDatabase, mod_path::ModPath, span_map::SpanMapRef, - tt::{self, Subtree}, + tt::{self, token_to_literal, Subtree}, InFile, }; @@ -52,13 +57,20 @@ impl RawAttrs { } Either::Right(comment) => comment.doc_comment().map(|doc| { let span = span_map.span_for_range(comment.syntax().text_range()); + let (text, kind) = + desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro); Attr { id, input: Some(Box::new(AttrInput::Literal(tt::Literal { - text: SmolStr::new(format_smolstr!("\"{}\"", Self::escape_chars(doc))), + symbol: text, span, + kind, + suffix: None, }))), - path: Interned::new(ModPath::from(crate::name!(doc))), + path: Interned::new(ModPath::from(Name::new_symbol( + sym::doc.clone(), + span.ctx, + ))), ctxt: span.ctx, } }), @@ -74,10 +86,6 @@ impl RawAttrs { RawAttrs { entries } } - fn escape_chars(s: &str) -> String { - s.replace('\\', r#"\\"#).replace('"', r#"\""#) - } - pub fn from_attrs_owner( db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>, @@ -115,7 +123,7 @@ impl RawAttrs { pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs { let has_cfg_attrs = self .iter() - .any(|attr| attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr])); + .any(|attr| attr.path.as_ident().map_or(false, |name| *name == sym::cfg_attr.clone())); if !has_cfg_attrs { return self; } @@ -125,7 +133,7 @@ impl RawAttrs { self.iter() .flat_map(|attr| -> SmallVec<[_; 1]> { let is_cfg_attr = - attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]); + attr.path.as_ident().map_or(false, |name| *name == sym::cfg_attr.clone()); if !is_cfg_attr { return smallvec![attr.clone()]; } @@ -234,10 +242,8 @@ impl Attr { })?); let span = span_map.span_for_range(range); let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { - Some(Box::new(AttrInput::Literal(tt::Literal { - text: lit.token().text().into(), - span, - }))) + let token = lit.token(); + Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span)))) } else if let Some(tt) = ast.token_tree() { let tree = syntax_node_to_token_tree( tt.syntax(), @@ -254,8 +260,8 @@ impl Attr { fn from_tt(db: &dyn ExpandDatabase, mut tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> { if matches!(tt, - [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, .. })), ..] - if text == "unsafe" + [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..] + if *sym == sym::unsafe_ ) { match tt.get(1) { Some(tt::TokenTree::Subtree(subtree)) => tt = &subtree.token_trees, @@ -304,26 +310,38 @@ impl Attr { impl Attr { /// #[path = "string"] - pub fn string_value(&self) -> Option<&str> { + pub fn string_value(&self) -> Option<&Symbol> { match self.input.as_deref()? { - AttrInput::Literal(it) => match it.text.strip_prefix('r') { - Some(it) => it.trim_matches('#'), - None => it.text.as_str(), - } - .strip_prefix('"')? - .strip_suffix('"'), + AttrInput::Literal(tt::Literal { + symbol: text, + kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), + .. + }) => Some(text), + _ => None, + } + } + + /// #[path = "string"] + pub fn string_value_with_span(&self) -> Option<(&Symbol, span::Span)> { + match self.input.as_deref()? { + AttrInput::Literal(tt::Literal { + symbol: text, + kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), + span, + suffix: _, + }) => Some((text, *span)), _ => None, } } pub fn string_value_unescape(&self) -> Option<Cow<'_, str>> { match self.input.as_deref()? { - AttrInput::Literal(it) => match it.text.strip_prefix('r') { - Some(it) => { - it.trim_matches('#').strip_prefix('"')?.strip_suffix('"').map(Cow::Borrowed) - } - None => it.text.strip_prefix('"')?.strip_suffix('"').and_then(unescape), - }, + AttrInput::Literal(tt::Literal { + symbol: text, kind: tt::LitKind::StrRaw(_), .. + }) => Some(Cow::Borrowed(text.as_str())), + AttrInput::Literal(tt::Literal { symbol: text, kind: tt::LitKind::Str, .. }) => { + unescape(text.as_str()) + } _ => None, } } @@ -369,7 +387,7 @@ impl Attr { } pub fn cfg(&self) -> Option<CfgExpr> { - if *self.path.as_ident()? == crate::name![cfg] { + if *self.path.as_ident()? == sym::cfg.clone() { self.token_tree_value().map(CfgExpr::parse) } else { None diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs new file mode 100644 index 00000000000..252430e4e95 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs @@ -0,0 +1,15 @@ +//! Builtin macros and attributes +#[macro_use] +mod quote; + +mod attr_macro; +mod derive_macro; +mod fn_macro; + +pub use self::{ + attr_macro::{find_builtin_attr, pseudo_derive_attr_expansion, BuiltinAttrExpander}, + derive_macro::{find_builtin_derive, BuiltinDeriveExpander}, + fn_macro::{ + find_builtin_macro, include_input_to_file_id, BuiltinFnLikeExpander, EagerExpander, + }, +}; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs index 2e115f47932..b9afc666f75 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs @@ -1,4 +1,5 @@ //! Builtin attributes. +use intern::sym; use span::{MacroCallId, Span}; use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind}; @@ -19,7 +20,7 @@ macro_rules! register_builtin { fn find_by_name(name: &name::Name) -> Option<Self> { match name { - $( id if id == &name::name![$name] => Some(BuiltinAttrExpander::$variant), )* + $( id if id == &sym::$name => Some(BuiltinAttrExpander::$variant), )* _ => None, } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs index c7cdc5e9220..f560d3bfd1d 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs @@ -1,5 +1,6 @@ //! Builtin derives. +use intern::sym; use itertools::izip; use mbe::DocCommentDesugarMode; use rustc_hash::FxHashSet; @@ -8,18 +9,17 @@ use stdx::never; use tracing::debug; use crate::{ + builtin::quote::{dollar_crate, quote}, + db::ExpandDatabase, hygiene::span_with_def_site_ctxt, - name::{AsName, Name}, - quote::dollar_crate, + name::{self, AsName, Name}, span_map::ExpansionSpanMap, - tt, + tt, ExpandError, ExpandResult, }; use syntax::ast::{ self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds, }; -use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult}; - macro_rules! register_builtin { ( $($trait:ident => $expand:ident),* ) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -36,7 +36,7 @@ macro_rules! register_builtin { fn find_by_name(name: &name::Name) -> Option<Self> { match name { - $( id if id == &name::name![$trait] => Some(BuiltinDeriveExpander::$trait), )* + $( id if id == &sym::$trait => Some(BuiltinDeriveExpander::$trait), )* _ => None, } } @@ -81,7 +81,7 @@ enum VariantShape { } fn tuple_field_iterator(span: Span, n: usize) -> impl Iterator<Item = tt::Ident> { - (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span)) + (0..n).map(move |it| tt::Ident::new(&format!("f{it}"), span)) } impl VariantShape { @@ -128,13 +128,17 @@ impl VariantShape { } } - fn from(tm: &ExpansionSpanMap, value: Option<FieldList>) -> Result<Self, ExpandError> { + fn from( + call_site: Span, + tm: &ExpansionSpanMap, + value: Option<FieldList>, + ) -> Result<Self, ExpandError> { let r = match value { None => VariantShape::Unit, Some(FieldList::RecordFieldList(it)) => VariantShape::Struct( it.fields() .map(|it| it.name()) - .map(|it| name_to_token(tm, it)) + .map(|it| name_to_token(call_site, tm, it)) .collect::<Result<_, _>>()?, ), Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()), @@ -208,19 +212,20 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr let (parsed, tm) = &mbe::token_tree_to_syntax_node( tt, mbe::TopEntryPoint::MacroItems, - parser::Edition::CURRENT, + parser::Edition::CURRENT_FIXME, ); let macro_items = ast::MacroItems::cast(parsed.syntax_node()) - .ok_or_else(|| ExpandError::other("invalid item definition"))?; - let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?; + .ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?; + let item = + macro_items.items().next().ok_or_else(|| ExpandError::other(call_site, "no item found"))?; let adt = &ast::Adt::cast(item.syntax().clone()) - .ok_or_else(|| ExpandError::other("expected struct, enum or union"))?; + .ok_or_else(|| ExpandError::other(call_site, "expected struct, enum or union"))?; let (name, generic_param_list, where_clause, shape) = match adt { ast::Adt::Struct(it) => ( it.name(), it.generic_param_list(), it.where_clause(), - AdtShape::Struct(VariantShape::from(tm, it.field_list())?), + AdtShape::Struct(VariantShape::from(call_site, tm, it.field_list())?), ), ast::Adt::Enum(it) => { let default_variant = it @@ -240,8 +245,8 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr .flat_map(|it| it.variants()) .map(|it| { Ok(( - name_to_token(tm, it.name())?, - VariantShape::from(tm, it.field_list())?, + name_to_token(call_site, tm, it.name())?, + VariantShape::from(call_site, tm, it.field_list())?, )) }) .collect::<Result<_, ExpandError>>()?, @@ -356,20 +361,22 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr ) }) .collect(); - let name_token = name_to_token(tm, name)?; + let name_token = name_to_token(call_site, tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, where_clause, associated_types }) } fn name_to_token( + call_site: Span, token_map: &ExpansionSpanMap, name: Option<ast::Name>, ) -> Result<tt::Ident, ExpandError> { let name = name.ok_or_else(|| { debug!("parsed item has no name"); - ExpandError::other("missing name") + ExpandError::other(call_site, "missing name") })?; let span = token_map.span_at(name.syntax().text_range().start()); - let name_token = tt::Ident { span, text: name.text().into() }; + + let name_token = tt::Ident::new(name.text().as_ref(), span); Ok(name_token) } @@ -691,14 +698,14 @@ fn partial_eq_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> } [first, rest @ ..] => { let rest = rest.iter().map(|it| { - let t1 = tt::Ident::new(format!("{}_self", it.text), it.span); - let t2 = tt::Ident::new(format!("{}_other", it.text), it.span); + let t1 = tt::Ident::new(&format!("{}_self", it.sym), it.span); + let t2 = tt::Ident::new(&format!("{}_other", it.sym), it.span); let and_and = and_and(span); quote!(span =>#and_and #t1 .eq( #t2 )) }); let first = { - let t1 = tt::Ident::new(format!("{}_self", first.text), first.span); - let t2 = tt::Ident::new(format!("{}_other", first.text), first.span); + let t1 = tt::Ident::new(&format!("{}_self", first.sym), first.span); + let t2 = tt::Ident::new(&format!("{}_other", first.sym), first.span); quote!(span =>#t1 .eq( #t2 )) }; quote!(span =>#first ##rest) @@ -728,7 +735,7 @@ fn self_and_other_patterns( let self_patterns = adt.shape.as_pattern_map( name, |it| { - let t = tt::Ident::new(format!("{}_self", it.text), it.span); + let t = tt::Ident::new(&format!("{}_self", it.sym), it.span); quote!(span =>#t) }, span, @@ -736,7 +743,7 @@ fn self_and_other_patterns( let other_patterns = adt.shape.as_pattern_map( name, |it| { - let t = tt::Ident::new(format!("{}_other", it.text), it.span); + let t = tt::Ident::new(&format!("{}_other", it.sym), it.span); quote!(span =>#t) }, span, @@ -774,8 +781,8 @@ fn ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> { |(pat1, pat2, fields)| { let mut body = quote!(span =>#krate::cmp::Ordering::Equal); for f in fields.into_iter().rev() { - let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); - let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); + let t1 = tt::Ident::new(&format!("{}_self", f.sym), f.span); + let t2 = tt::Ident::new(&format!("{}_other", f.sym), f.span); body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); } let fat_arrow = fat_arrow(span); @@ -836,8 +843,8 @@ fn partial_ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> let mut body = quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); for f in fields.into_iter().rev() { - let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); - let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); + let t1 = tt::Ident::new(&format!("{}_self", f.sym), f.span); + let t2 = tt::Ident::new(&format!("{}_other", f.sym), f.span); body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); } let fat_arrow = fat_arrow(span); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 02fd431e4e7..7903ac075be 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -1,21 +1,24 @@ //! Builtin macro -use base_db::{AnchoredPath, FileId}; +use base_db::AnchoredPath; use cfg::CfgExpr; use either::Either; -use itertools::Itertools; -use mbe::{parse_exprs_with_sep, parse_to_token_tree}; -use span::{Edition, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; -use syntax::ast::{self, AstToken}; +use intern::{sym, Symbol}; +use mbe::{parse_exprs_with_sep, parse_to_token_tree, DelimiterKind}; +use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; +use stdx::format_to; +use syntax::{ + format_smolstr, + unescape::{unescape_byte, unescape_char, unescape_unicode, Mode}, +}; use crate::{ + builtin::quote::{dollar_crate, quote}, db::ExpandDatabase, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt}, - name::{self, known}, - quote, - quote::dollar_crate, + name, tt::{self, DelimSpan}, - ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroFileIdExt, + ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId, }; macro_rules! register_builtin { @@ -31,7 +34,7 @@ macro_rules! register_builtin { } impl BuiltinFnLikeExpander { - pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> { + fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> { match *self { $( BuiltinFnLikeExpander::$kind => $expand, )* } @@ -39,7 +42,7 @@ macro_rules! register_builtin { } impl EagerExpander { - pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> { + fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> { match *self { $( EagerExpander::$e_kind => $e_expand, )* } @@ -48,8 +51,8 @@ macro_rules! register_builtin { fn find_by_name(ident: &name::Name) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> { match ident { - $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )* - $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )* + $( id if id == &sym::$name => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )* + $( id if id == &sym::$e_name => Some(Either::Right(EagerExpander::$e_kind)), )* _ => return None, } } @@ -142,7 +145,7 @@ register_builtin! { } fn mk_pound(span: Span) -> tt::Subtree { - crate::quote::IntoTt::to_subtree( + crate::builtin::quote::IntoTt::to_subtree( vec![crate::tt::Leaf::Punct(crate::tt::Punct { char: '#', spacing: crate::tt::Spacing::Alone, @@ -177,8 +180,10 @@ fn line_expand( ExpandResult::ok(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(span), token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { - text: "0u32".into(), + symbol: sym::INTEGER_0.clone(), span, + kind: tt::LitKind::Integer, + suffix: Some(sym::u32.clone()), }))]), }) } @@ -223,7 +228,7 @@ fn assert_expand( span: Span, ) -> ExpandResult<tt::Subtree> { let call_site_span = span_with_call_site_ctxt(db, span, id); - let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT); + let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME); let dollar_crate = dollar_crate(span); let expanded = match &*args { [cond, panic_args @ ..] => { @@ -262,7 +267,7 @@ fn file_expand( ) -> ExpandResult<tt::Subtree> { // FIXME: RA purposefully lacks knowledge of absolute file names // so just return "". - let file_name = ""; + let file_name = "file"; let expanded = quote! {span => #file_name @@ -272,34 +277,36 @@ fn file_expand( } fn format_args_expand( - db: &dyn ExpandDatabase, - id: MacroCallId, + _db: &dyn ExpandDatabase, + _id: MacroCallId, tt: &tt::Subtree, span: Span, ) -> ExpandResult<tt::Subtree> { - format_args_expand_general(db, id, tt, "", span) + let pound = mk_pound(span); + let mut tt = tt.clone(); + tt.delimiter.kind = tt::DelimiterKind::Parenthesis; + ExpandResult::ok(quote! {span => + builtin #pound format_args #tt + }) } fn format_args_nl_expand( - db: &dyn ExpandDatabase, - id: MacroCallId, - tt: &tt::Subtree, - span: Span, -) -> ExpandResult<tt::Subtree> { - format_args_expand_general(db, id, tt, "\\n", span) -} - -fn format_args_expand_general( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, - // FIXME: Make use of this so that mir interpretation works properly - _end_string: &str, span: Span, ) -> ExpandResult<tt::Subtree> { let pound = mk_pound(span); let mut tt = tt.clone(); tt.delimiter.kind = tt::DelimiterKind::Parenthesis; + if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + kind: tt::LitKind::Str, + .. + }))) = tt.token_trees.first_mut() + { + *text = Symbol::intern(&format_smolstr!("{}\\n", text.as_str())); + } ExpandResult::ok(quote! {span => builtin #pound format_args #tt }) @@ -367,8 +374,11 @@ fn panic_expand( let dollar_crate = dollar_crate(span); let call_site_span = span_with_call_site_ctxt(db, span, id); - let mac = - if use_panic_2021(db, call_site_span) { known::panic_2021 } else { known::panic_2015 }; + let mac = if use_panic_2021(db, call_site_span) { + sym::panic_2021.clone() + } else { + sym::panic_2015.clone() + }; // Expand to a macro call `$crate::panic::panic_{edition}` let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!); @@ -397,9 +407,9 @@ fn unreachable_expand( let call_site_span = span_with_call_site_ctxt(db, span, id); let mac = if use_panic_2021(db, call_site_span) { - known::unreachable_2021 + sym::unreachable_2021.clone() } else { - known::unreachable_2015 + sym::unreachable_2015.clone() }; // Expand to a macro call `$crate::panic::panic_{edition}` @@ -432,7 +442,7 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool { // FIXME: Record allow_internal_unstable in the macro def (not been done yet because it // would consume quite a bit extra memory for all call locs...) // if let Some(features) = expn.def.allow_internal_unstable { - // if features.iter().any(|&f| f == sym::edition_panic) { + // if features.iter().any(|&f| f == sym::edition_panic.clone()) { // span = expn.call_site; // continue; // } @@ -441,27 +451,6 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool { } } -fn unquote_str(lit: &tt::Literal) -> Option<(String, Span)> { - let span = lit.span; - let lit = ast::make::tokens::literal(&lit.to_string()); - let token = ast::String::cast(lit)?; - token.value().ok().map(|it| (it.into_owned(), span)) -} - -fn unquote_char(lit: &tt::Literal) -> Option<(char, Span)> { - let span = lit.span; - let lit = ast::make::tokens::literal(&lit.to_string()); - let token = ast::Char::cast(lit)?; - token.value().ok().zip(Some(span)) -} - -fn unquote_byte_string(lit: &tt::Literal) -> Option<(Vec<u8>, Span)> { - let span = lit.span; - let lit = ast::make::tokens::literal(&lit.to_string()); - let token = ast::ByteString::cast(lit)?; - token.value().ok().map(|it| (it.into_owned(), span)) -} - fn compile_error_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, @@ -469,11 +458,13 @@ fn compile_error_expand( span: Span, ) -> ExpandResult<tt::Subtree> { let err = match &*tt.token_trees { - [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { - Some((unquoted, _)) => ExpandError::other(unquoted.into_boxed_str()), - None => ExpandError::other("`compile_error!` argument must be a string"), - }, - _ => ExpandError::other("`compile_error!` argument must be a string"), + [tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span: _, + kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), + suffix: _, + }))] => ExpandError::other(span, Box::from(unescape_str(text).as_str())), + _ => ExpandError::other(span, "`compile_error!` argument must be a string"), }; ExpandResult { value: quote! {span =>}, err: Some(err) } @@ -483,7 +474,7 @@ fn concat_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, - _: Span, + call_site: Span, ) -> ExpandResult<tt::Subtree> { let mut err = None; let mut text = String::new(); @@ -504,32 +495,49 @@ fn concat_expand( } } } - match t { tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { // concat works with string and char literals, so remove any quotes. // It also works with integer, float and boolean literals, so just use the rest // as-is. - if let Some((c, span)) = unquote_char(it) { - text.push(c); - record_span(span); - } else { - let (component, span) = - unquote_str(it).unwrap_or_else(|| (it.text.to_string(), it.span)); - text.push_str(&component); - record_span(span); + match it.kind { + tt::LitKind::Char => { + if let Ok(c) = unescape_char(it.symbol.as_str()) { + text.extend(c.escape_default()); + } + record_span(it.span); + } + tt::LitKind::Integer | tt::LitKind::Float => { + format_to!(text, "{}", it.symbol.as_str()) + } + tt::LitKind::Str => { + text.push_str(it.symbol.as_str()); + record_span(it.span); + } + tt::LitKind::StrRaw(_) => { + format_to!(text, "{}", it.symbol.as_str().escape_debug()); + record_span(it.span); + } + tt::LitKind::Byte + | tt::LitKind::ByteStr + | tt::LitKind::ByteStrRaw(_) + | tt::LitKind::CStr + | tt::LitKind::CStrRaw(_) + | tt::LitKind::Err(_) => { + err = Some(ExpandError::other(it.span, "unexpected literal")) + } } } // handle boolean literals tt::TokenTree::Leaf(tt::Leaf::Ident(id)) - if i % 2 == 0 && (id.text == "true" || id.text == "false") => + if i % 2 == 0 && (id.sym == sym::true_ || id.sym == sym::false_) => { - text.push_str(id.text.as_str()); + text.push_str(id.sym.as_str()); record_span(id.span); } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), _ => { - err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + err.get_or_insert(ExpandError::other(call_site, "unexpected token")); } } } @@ -543,7 +551,7 @@ fn concat_bytes_expand( tt: &tt::Subtree, call_site: Span, ) -> ExpandResult<tt::Subtree> { - let mut bytes = Vec::new(); + let mut bytes = String::new(); let mut err = None; let mut span: Option<Span> = None; let mut record_span = |s: Span| match &mut span { @@ -553,84 +561,97 @@ fn concat_bytes_expand( }; for (i, t) in tt.token_trees.iter().enumerate() { match t { - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { - let token = ast::make::tokens::literal(&lit.to_string()); - record_span(lit.span); - match token.kind() { - syntax::SyntaxKind::BYTE => bytes.push(token.text().to_owned()), - syntax::SyntaxKind::BYTE_STRING => { - let components = unquote_byte_string(lit).map_or(vec![], |(it, _)| it); - components.into_iter().for_each(|it| bytes.push(it.to_string())); + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind, + suffix: _, + })) => { + record_span(*span); + match kind { + tt::LitKind::Byte => { + if let Ok(b) = unescape_byte(text.as_str()) { + bytes.extend( + b.escape_ascii().filter_map(|it| char::from_u32(it as u32)), + ); + } + } + tt::LitKind::ByteStr => { + bytes.push_str(text.as_str()); + } + tt::LitKind::ByteStrRaw(_) => { + bytes.extend(text.as_str().escape_debug()); } _ => { - err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + err.get_or_insert(ExpandError::other(*span, "unexpected token")); break; } } } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => { - if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span) { + if let Err(e) = + concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span, call_site) + { err.get_or_insert(e); break; } } _ => { - err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + err.get_or_insert(ExpandError::other(call_site, "unexpected token")); break; } } } - let value = tt::Subtree { - delimiter: tt::Delimiter { - open: call_site, - close: call_site, - kind: tt::DelimiterKind::Bracket, - }, - token_trees: { - Itertools::intersperse_with( - bytes.into_iter().map(|it| { - tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { - text: it.into(), - span: span.unwrap_or(call_site), - })) - }), - || { - tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - char: ',', - spacing: tt::Spacing::Alone, - span: call_site, - })) - }, - ) - .collect() + let span = span.unwrap_or(tt.delimiter.open); + ExpandResult { + value: tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(span), + token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: Symbol::intern(&bytes), + span, + kind: tt::LitKind::ByteStr, + suffix: None, + }))] + .into(), }, - }; - ExpandResult { value, err } + err, + } } fn concat_bytes_expand_subtree( tree: &tt::Subtree, - bytes: &mut Vec<String>, + bytes: &mut String, mut record_span: impl FnMut(Span), + err_span: Span, ) -> Result<(), ExpandError> { for (ti, tt) in tree.token_trees.iter().enumerate() { match tt { - tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => { - let lit = ast::make::tokens::literal(&it.to_string()); - match lit.kind() { - syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => { - record_span(it.span); - bytes.push(lit.text().to_owned()) - } - _ => { - return Err(mbe::ExpandError::UnexpectedToken.into()); - } + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind: tt::LitKind::Byte, + suffix: _, + })) => { + if let Ok(b) = unescape_byte(text.as_str()) { + bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32))); + } + record_span(*span); + } + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind: tt::LitKind::Integer, + suffix: _, + })) => { + record_span(*span); + if let Ok(b) = text.as_str().parse::<u8>() { + bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32))); } } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (), _ => { - return Err(mbe::ExpandError::UnexpectedToken.into()); + return Err(ExpandError::other(err_span, "unexpected token")); } } } @@ -648,16 +669,16 @@ fn concat_idents_expand( for (i, t) in tt.token_trees.iter().enumerate() { match t { tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => { - ident.push_str(id.text.as_str()); + ident.push_str(id.sym.as_str()); } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), _ => { - err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + err.get_or_insert(ExpandError::other(span, "unexpected token")); } } } // FIXME merge spans - let ident = tt::Ident { text: ident.into(), span }; + let ident = tt::Ident { sym: Symbol::intern(&ident), span, is_raw: tt::IdentIsRaw::No }; ExpandResult { value: quote!(span =>#ident), err } } @@ -666,28 +687,64 @@ fn relative_file( call_id: MacroCallId, path_str: &str, allow_recursion: bool, -) -> Result<FileId, ExpandError> { - let call_site = call_id.as_macro_file().parent(db).original_file_respecting_includes(db); + err_span: Span, +) -> Result<EditionedFileId, ExpandError> { + let lookup = call_id.lookup(db); + let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(); let path = AnchoredPath { anchor: call_site, path: path_str }; let res = db .resolve_path(path) - .ok_or_else(|| ExpandError::other(format!("failed to load file `{path_str}`")))?; + .ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?; // Prevent include itself if res == call_site && !allow_recursion { - Err(ExpandError::other(format!("recursive inclusion of `{path_str}`"))) + Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`"))) } else { - Ok(res) + Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition)) } } -fn parse_string(tt: &tt::Subtree) -> Result<(String, Span), ExpandError> { +fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> { tt.token_trees .first() + .ok_or(tt.delimiter.open.cover(tt.delimiter.close)) .and_then(|tt| match tt { - tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it), - _ => None, + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind: tt::LitKind::Str, + suffix: _, + })) => Ok((unescape_str(text), *span)), + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind: tt::LitKind::StrRaw(_), + suffix: _, + })) => Ok((text.clone(), *span)), + // FIXME: We wrap expression fragments in parentheses which can break this expectation + // here + // Remove this once we handle none delims correctly + tt::TokenTree::Subtree(tt) if tt.delimiter.kind == DelimiterKind::Parenthesis => { + tt.token_trees.first().and_then(|tt| match tt { + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind: tt::LitKind::Str, + suffix: _, + })) => Some((unescape_str(text), *span)), + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind: tt::LitKind::StrRaw(_), + suffix: _, + })) => Some((text.clone(), *span)), + _ => None, + }) + } + .ok_or(tt.delimiter.open.cover(tt.delimiter.close)), + ::tt::TokenTree::Leaf(l) => Err(*l.span()), + ::tt::TokenTree::Subtree(tt) => Err(tt.delimiter.open.cover(tt.delimiter.close)), }) - .ok_or(mbe::ExpandError::ConversionError.into()) + .map_err(|span| ExpandError::other(span, "expected string literal")) } fn include_expand( @@ -703,14 +760,15 @@ fn include_expand( } }; match parse_to_token_tree( + file_id.edition(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, SyntaxContextId::ROOT, - &db.file_text(file_id), + &db.file_text(file_id.file_id()), ) { Some(it) => ExpandResult::ok(it), None => ExpandResult::new( tt::Subtree::empty(DelimSpan { open: span, close: span }), - ExpandError::other("failed to parse included file"), + ExpandError::other(span, "failed to parse included file"), ), } } @@ -719,8 +777,9 @@ pub fn include_input_to_file_id( db: &dyn ExpandDatabase, arg_id: MacroCallId, arg: &tt::Subtree, -) -> Result<FileId, ExpandError> { - relative_file(db, arg_id, &parse_string(arg)?.0, false) +) -> Result<EditionedFileId, ExpandError> { + let (s, span) = parse_string(arg)?; + relative_file(db, arg_id, s.as_str(), false, span) } fn include_bytes_expand( @@ -733,8 +792,10 @@ fn include_bytes_expand( let res = tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(span), token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { - text: r#"b"""#.into(), + symbol: Symbol::empty(), span, + kind: tt::LitKind::ByteStrRaw(1), + suffix: None, }))]), }; ExpandResult::ok(res) @@ -757,22 +818,22 @@ fn include_str_expand( // it's unusual to `include_str!` a Rust file), but we can return an empty string. // Ideally, we'd be able to offer a precise expansion if the user asks for macro // expansion. - let file_id = match relative_file(db, arg_id, &path, true) { + let file_id = match relative_file(db, arg_id, path.as_str(), true, span) { Ok(file_id) => file_id, Err(_) => { return ExpandResult::ok(quote!(span =>"")); } }; - let text = db.file_text(file_id); + let text = db.file_text(file_id.file_id()); let text = &*text; ExpandResult::ok(quote!(span =>#text)) } -fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> { +fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &Symbol) -> Option<String> { let krate = db.lookup_intern_macro_call(arg_id).krate; - db.crate_graph()[krate].env.get(key) + db.crate_graph()[krate].env.get(key.as_str()).map(|it| it.escape_debug().to_string()) } fn env_expand( @@ -792,8 +853,11 @@ fn env_expand( let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| { // The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid // unnecessary diagnostics for eg. `CARGO_PKG_NAME`. - if key == "OUT_DIR" { - err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#)); + if key.as_str() == "OUT_DIR" { + err = Some(ExpandError::other( + span, + r#"`OUT_DIR` not set, enable "build scripts" to fix"#, + )); } // If the variable is unset, still return a dummy string to help type inference along. @@ -842,6 +906,21 @@ fn quote_expand( ) -> ExpandResult<tt::Subtree> { ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), - ExpandError::other("quote! is not implemented"), + ExpandError::other(span, "quote! is not implemented"), ) } + +fn unescape_str(s: &Symbol) -> Symbol { + if s.as_str().contains('\\') { + let s = s.as_str(); + let mut buf = String::with_capacity(s.len()); + unescape_unicode(s, Mode::Str, &mut |_, c| { + if let Ok(c) = c { + buf.push(c) + } + }); + Symbol::intern(&buf) + } else { + s.clone() + } +} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs index 8f1e32321e1..5c33f817f9e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs @@ -1,13 +1,14 @@ //! A simplified version of quote-crate like quasi quote macro #![allow(clippy::crate_in_macro_def)] +use intern::{sym, Symbol}; use span::Span; -use syntax::format_smolstr; +use tt::IdentIsRaw; use crate::name::Name; -pub(crate) const fn dollar_crate(span: Span) -> tt::Ident<Span> { - tt::Ident { text: syntax::SmolStr::new_static("$crate"), span } +pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> { + tt::Ident { sym: sym::dollar_crate.clone(), span, is_raw: tt::IdentIsRaw::No } } // A helper macro quote macro @@ -16,22 +17,21 @@ pub(crate) const fn dollar_crate(span: Span) -> tt::Ident<Span> { // 2. #()* pattern repetition not supported now // * But we can do it manually, see `test_quote_derive_copy_hack` #[doc(hidden)] -#[macro_export] -macro_rules! __quote { +macro_rules! quote_impl__ { ($span:ident) => { Vec::<$crate::tt::TokenTree>::new() }; ( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => { { - let children = $crate::__quote!($span $($tt)*); + let children = $crate::builtin::quote::__quote!($span $($tt)*); $crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::$delim, open: $span, close: $span, }, - token_trees: $crate::quote::IntoTt::to_tokens(children).into_boxed_slice(), + token_trees: $crate::builtin::quote::IntoTt::to_tokens(children).into_boxed_slice(), } } }; @@ -68,9 +68,9 @@ macro_rules! __quote { // hash variable ($span:ident # $first:ident $($tail:tt)* ) => { { - let token = $crate::quote::ToTokenTree::to_token($first, $span); + let token = $crate::builtin::quote::ToTokenTree::to_token($first, $span); let mut tokens = vec![token.into()]; - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); + let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens } @@ -78,64 +78,66 @@ macro_rules! __quote { ($span:ident ## $first:ident $($tail:tt)* ) => { { - let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>(); - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); + let mut tokens = $first.into_iter().map(|it| $crate::builtin::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>(); + let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens } }; // Brace - ($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) }; + ($span:ident { $($tt:tt)* } ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Brace $($tt)*) }; // Bracket - ($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) }; + ($span:ident [ $($tt:tt)* ] ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Bracket $($tt)*) }; // Parenthesis - ($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) }; + ($span:ident ( $($tt:tt)* ) ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Parenthesis $($tt)*) }; // Literal - ($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] }; + ($span:ident $tt:literal ) => { vec![$crate::builtin::quote::ToTokenTree::to_token($tt, $span).into()] }; // Ident ($span:ident $tt:ident ) => { vec![ { crate::tt::Leaf::Ident(crate::tt::Ident { - text: stringify!($tt).into(), + sym: intern::Symbol::intern(stringify!($tt)), span: $span, + is_raw: tt::IdentIsRaw::No, }).into() }] }; // Puncts // FIXME: Not all puncts are handled - ($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')}; - ($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')}; - ($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')}; - ($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')}; - ($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')}; - ($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')}; - ($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')}; - ($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')}; - ($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')}; - ($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')}; + ($span:ident -> ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '-', '>')}; + ($span:ident & ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '&')}; + ($span:ident , ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ',')}; + ($span:ident : ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ':')}; + ($span:ident ; ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ';')}; + ($span:ident :: ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ':', ':')}; + ($span:ident . ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '.')}; + ($span:ident < ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '<')}; + ($span:ident > ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '>')}; + ($span:ident ! ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '!')}; ($span:ident $first:tt $($tail:tt)+ ) => { { - let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first )); - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); + let mut tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $first )); + let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens } }; } +pub(super) use quote_impl__ as __quote; /// FIXME: /// It probably should implement in proc-macro -#[macro_export] -macro_rules! quote { +macro_rules! quote_impl { ($span:ident=> $($tt:tt)* ) => { - $crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span) + $crate::builtin::quote::IntoTt::to_subtree($crate::builtin::quote::__quote!($span $($tt)*), $span) } } +pub(super) use quote_impl as quote; pub(crate) trait IntoTt { fn to_subtree(self, span: Span) -> crate::tt::Subtree; @@ -175,12 +177,6 @@ impl ToTokenTree for crate::tt::TokenTree { } } -impl ToTokenTree for &crate::tt::TokenTree { - fn to_token(self, _: Span) -> crate::tt::TokenTree { - self.clone() - } -} - impl ToTokenTree for crate::tt::Subtree { fn to_token(self, _: Span) -> crate::tt::TokenTree { self.into() @@ -196,42 +192,57 @@ macro_rules! impl_to_to_tokentrees { leaf.into() } } - - impl ToTokenTree for &$ty { - fn to_token($this, $span: Span) -> crate::tt::TokenTree { - let leaf: crate::tt::Leaf = $im.clone().into(); - leaf.into() - } - } )* } } +impl<T: ToTokenTree + Clone> ToTokenTree for &T { + fn to_token(self, span: Span) -> crate::tt::TokenTree { + self.clone().to_token(span) + } +} + impl_to_to_tokentrees! { - span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} }; - span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} }; - span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} }; - span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} }; + span: u32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } }; + span: usize => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } }; + span: i32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } }; + span: bool => self { crate::tt::Ident{sym: if self { sym::true_.clone() } else { sym::false_.clone() }, span, is_raw: tt::IdentIsRaw::No } }; _span: crate::tt::Leaf => self { self }; _span: crate::tt::Literal => self { self }; _span: crate::tt::Ident => self { self }; _span: crate::tt::Punct => self { self }; - span: &str => self { crate::tt::Literal{text: format_smolstr!("\"{}\"", self.escape_default()), span}}; - span: String => self { crate::tt::Literal{text: format_smolstr!("\"{}\"", self.escape_default()), span}}; - span: Name => self { crate::tt::Ident{text: self.to_smol_str(), span}}; + span: &str => self { crate::tt::Literal{symbol: Symbol::intern(self), span, kind: tt::LitKind::Str, suffix: None }}; + span: String => self { crate::tt::Literal{symbol: Symbol::intern(&self), span, kind: tt::LitKind::Str, suffix: None }}; + span: Name => self { + let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str()); + crate::tt::Ident{sym: Symbol::intern(s), span, is_raw } + }; + span: Symbol => self { + let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str()); + crate::tt::Ident{sym: Symbol::intern(s), span, is_raw } + }; } #[cfg(test)] mod tests { use crate::tt; - use base_db::FileId; + use ::tt::IdentIsRaw; use expect_test::expect; + use intern::Symbol; use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use syntax::{TextRange, TextSize}; + use super::quote; + const DUMMY: tt::Span = tt::Span { range: TextRange::empty(TextSize::new(0)), - anchor: SpanAnchor { file_id: FileId::from_raw(0xe4e4e), ast_id: ROOT_ERASED_FILE_AST_ID }, + anchor: SpanAnchor { + file_id: span::EditionedFileId::new( + span::FileId::from_raw(0xe4e4e), + span::Edition::CURRENT, + ), + ast_id: ROOT_ERASED_FILE_AST_ID, + }, ctx: SyntaxContextId::ROOT, }; @@ -257,7 +268,8 @@ mod tests { } fn mk_ident(name: &str) -> crate::tt::Ident { - crate::tt::Ident { text: name.into(), span: DUMMY } + let (is_raw, s) = IdentIsRaw::split_from_symbol(name); + crate::tt::Ident { sym: Symbol::intern(s), span: DUMMY, is_raw } } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs index 55ae19068f9..147cf912da1 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs @@ -3,13 +3,13 @@ use std::iter::Peekable; use base_db::CrateId; use cfg::{CfgAtom, CfgExpr}; +use intern::{sym, Symbol}; use rustc_hash::FxHashSet; use syntax::{ ast::{self, Attr, HasAttrs, Meta, VariantList}, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T, }; use tracing::{debug, warn}; -use tt::SmolStr; use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind}; @@ -263,13 +263,13 @@ where let name = match iter.next() { None => return None, Some(NodeOrToken::Token(element)) => match element.kind() { - syntax::T![ident] => SmolStr::new(element.text()), + syntax::T![ident] => Symbol::intern(element.text()), _ => return Some(CfgExpr::Invalid), }, Some(_) => return Some(CfgExpr::Invalid), }; - let result = match name.as_str() { - "all" | "any" | "not" => { + let result = match &name { + s if [&sym::all, &sym::any, &sym::not].contains(&s) => { let mut preds = Vec::new(); let Some(NodeOrToken::Node(tree)) = iter.next() else { return Some(CfgExpr::Invalid); @@ -286,10 +286,12 @@ where preds.push(pred); } } - let group = match name.as_str() { - "all" => CfgExpr::All(preds), - "any" => CfgExpr::Any(preds), - "not" => CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))), + let group = match &name { + s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()), + s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()), + s if *s == sym::not => { + CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))) + } _ => unreachable!(), }; Some(group) @@ -302,8 +304,10 @@ where if (value_token.kind() == syntax::SyntaxKind::STRING) => { let value = value_token.text(); - let value = SmolStr::new(value.trim_matches('"')); - Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name, value })) + Some(CfgExpr::Atom(CfgAtom::KeyValue { + key: name, + value: Symbol::intern(value.trim_matches('"')), + })) } _ => None, } @@ -339,7 +343,7 @@ mod tests { assert_eq!(node.syntax().text_range().start(), 0.into()); let cfg = parse_from_attr_meta(node.meta().unwrap()).unwrap(); - let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); + let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); expect.assert_eq(&actual); } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs index 08491db3726..1a3dd0e7ddb 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs @@ -25,8 +25,7 @@ impl ChangeWithProcMacros { pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) { self.source_change.apply(db); - if let Some(mut proc_macros) = self.proc_macros { - proc_macros.shrink_to_fit(); + if let Some(proc_macros) = self.proc_macros { db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } if let Some(target_data_layouts) = self.target_data_layouts { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index ad25a1168c4..01a35660a90 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -1,18 +1,17 @@ //! Defines database & queries for macro expansion. -use base_db::{salsa, CrateId, FileId, SourceDatabase}; +use base_db::{salsa, CrateId, SourceDatabase}; use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex}; use rustc_hash::FxHashSet; -use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId}; +use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId}; use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T}; use triomphe::Arc; use crate::{ attrs::{collect_attrs, AttrId}, - builtin_attr_macro::pseudo_derive_attr_expansion, - builtin_fn_macro::EagerExpander, + builtin::pseudo_derive_attr_expansion, cfg_process, declarative::DeclarativeMacroExpander, fixup::{self, SyntaxFixupUndoInfo}, @@ -20,9 +19,9 @@ use crate::{ proc_macro::ProcMacros, span_map::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, - CustomProcMacroExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, - HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, - MacroFileId, + CustomProcMacroExpander, EagerCallInfo, EagerExpander, ExpandError, ExpandResult, ExpandTo, + ExpansionSpanMap, HirFileId, HirFileIdRepr, Lookup, MacroCallId, MacroCallKind, MacroCallLoc, + MacroDefId, MacroDefKind, MacroFileId, }; /// This is just to ensure the types of smart_macro_arg and macro_arg are the same type MacroArgResult = (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span); @@ -62,10 +61,8 @@ pub trait ExpandDatabase: SourceDatabase { /// file or a macro expansion. #[salsa::transparent] fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode; - #[salsa::transparent] - fn parse_or_expand_with_err(&self, file_id: HirFileId) -> ExpandResult<Parse<SyntaxNode>>; /// Implementation for the macro case. - // This query is LRU cached + #[salsa::lru] fn parse_macro_expansion( &self, macro_file: MacroFileId, @@ -78,7 +75,7 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::invoke(crate::span_map::expansion_span_map)] fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>; #[salsa::invoke(crate::span_map::real_span_map)] - fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>; + fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// reason why we use salsa at all. @@ -99,6 +96,7 @@ pub trait ExpandDatabase: SourceDatabase { /// Lowers syntactic macro call to a token tree representation. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. + #[deprecated = "calling this is incorrect, call `macro_arg_considering_derives` instead"] fn macro_arg(&self, id: MacroCallId) -> MacroArgResult; #[salsa::transparent] fn macro_arg_considering_derives( @@ -133,6 +131,19 @@ pub trait ExpandDatabase: SourceDatabase { &self, macro_call: MacroCallId, ) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>>; + #[salsa::transparent] + fn syntax_context(&self, file: HirFileId) -> SyntaxContextId; +} + +fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId) -> SyntaxContextId { + match file.repr() { + HirFileIdRepr::FileId(_) => SyntaxContextId::ROOT, + HirFileIdRepr::MacroFile(m) => { + db.macro_arg_considering_derives(m.macro_call_id, &m.macro_call_id.lookup(db).kind) + .2 + .ctx + } + } } /// This expands the given macro call, but with different arguments. This is @@ -248,39 +259,38 @@ pub fn expand_speculative( // Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. - let mut speculative_expansion = - match loc.def.kind { - MacroDefKind::ProcMacro(ast, expander, _) => { - let span = db.proc_macro_span(ast); - tt.delimiter = tt::Delimiter::invisible_spanned(span); - expander.expand( - db, - loc.def.krate, - loc.krate, - &tt, - attr_arg.as_ref(), - span_with_def_site_ctxt(db, span, actual_macro_call), - span_with_call_site_ctxt(db, span, actual_macro_call), - span_with_mixed_site_ctxt(db, span, actual_macro_call), - ) - } - MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => { - pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span) - } - MacroDefKind::Declarative(it) => db - .decl_macro_expander(loc.krate, it) - .expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition), - MacroDefKind::BuiltIn(_, it) => { - it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) - } - MacroDefKind::BuiltInDerive(_, it) => { - it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) - } - MacroDefKind::BuiltInEager(_, it) => { - it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) - } - MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span), - }; + let mut speculative_expansion = match loc.def.kind { + MacroDefKind::ProcMacro(ast, expander, _) => { + let span = db.proc_macro_span(ast); + tt.delimiter = tt::Delimiter::invisible_spanned(span); + expander.expand( + db, + loc.def.krate, + loc.krate, + &tt, + attr_arg.as_ref(), + span_with_def_site_ctxt(db, span, actual_macro_call), + span_with_call_site_ctxt(db, span, actual_macro_call), + span_with_mixed_site_ctxt(db, span, actual_macro_call), + ) + } + MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => { + pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span) + } + MacroDefKind::Declarative(it) => { + db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt, span, loc.def.edition) + } + MacroDefKind::BuiltIn(_, it) => { + it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) + } + MacroDefKind::BuiltInDerive(_, it) => { + it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) + } + MacroDefKind::BuiltInEager(_, it) => { + it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) + } + MacroDefKind::BuiltInAttr(_, it) => it.expand(db, actual_macro_call, &tt, span), + }; let expand_to = loc.expand_to(); @@ -314,18 +324,6 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { } } -fn parse_or_expand_with_err( - db: &dyn ExpandDatabase, - file_id: HirFileId, -) -> ExpandResult<Parse<SyntaxNode>> { - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => ExpandResult::ok(db.parse(file_id).to_syntax()), - HirFileIdRepr::MacroFile(macro_file) => { - db.parse_macro_expansion(macro_file).map(|(it, _)| it) - } - } -} - // FIXME: We should verify that the parsed node is one of the many macro node variants we expect // instead of having it be untyped fn parse_macro_expansion( @@ -334,7 +332,7 @@ fn parse_macro_expansion( ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> { let _p = tracing::info_span!("parse_macro_expansion").entered(); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let edition = loc.def.edition; + let def_edition = loc.def.edition; let expand_to = loc.expand_to(); let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file.macro_call_id, loc); @@ -345,7 +343,7 @@ fn parse_macro_expansion( CowArc::Owned(it) => it, }, expand_to, - edition, + def_edition, ); rev_token_map.matched_arm = matched_arm; @@ -384,6 +382,7 @@ pub(crate) fn parse_with_map( /// Other wise return the [macro_arg] for the macro_call_id. /// /// This is not connected to the database so it does not cached the result. However, the inner [macro_arg] query is +#[allow(deprecated)] // we are macro_arg_considering_derives fn macro_arg_considering_derives( db: &dyn ExpandDatabase, id: MacroCallId, @@ -735,11 +734,14 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { if TOKEN_LIMIT.check(count).is_err() { Err(ExpandResult { value: (), - err: Some(ExpandError::other(format!( - "macro invocation exceeds token limit: produced {} tokens, limit is {}", - count, - TOKEN_LIMIT.inner(), - ))), + err: Some(ExpandError::other( + tt.delimiter.open, + format!( + "macro invocation exceeds token limit: produced {} tokens, limit is {}", + count, + TOKEN_LIMIT.inner(), + ), + )), }) } else { Ok(()) diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs index 29408902f16..48851af3fd1 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs @@ -1,7 +1,7 @@ //! Compiled declarative macro expanders (`macro_rules!`` and `macro`) -use std::sync::OnceLock; -use base_db::{CrateId, VersionReq}; +use base_db::CrateId; +use intern::sym; use mbe::DocCommentDesugarMode; use span::{Edition, MacroCallId, Span, SyntaxContextId}; use stdx::TupleExt; @@ -12,7 +12,7 @@ use crate::{ attrs::RawAttrs, db::ExpandDatabase, hygiene::{apply_mark, Transparency}, - tt, AstId, ExpandError, ExpandResult, Lookup, + tt, AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup, }; /// Old-style `macro_rules` or the new macros 2.0 @@ -22,9 +22,6 @@ pub struct DeclarativeMacroExpander { pub transparency: Transparency, } -// FIXME: Remove this once we drop support for 1.76 -static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new(); - impl DeclarativeMacroExpander { pub fn expand( &self, @@ -34,29 +31,16 @@ impl DeclarativeMacroExpander { span: Span, ) -> ExpandResult<(tt::Subtree, Option<u32>)> { let loc = db.lookup_intern_macro_call(call_id); - let toolchain = db.toolchain(loc.def.krate); - let new_meta_vars = toolchain.as_ref().map_or(false, |version| { - REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( - &base_db::Version { - pre: base_db::Prerelease::EMPTY, - build: base_db::BuildMetadata::EMPTY, - major: version.major, - minor: version.minor, - patch: version.patch, - }, - ) - }); match self.mac.err() { Some(_) => ExpandResult::new( (tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None), - ExpandError::MacroDefinition, + ExpandError::new(span, ExpandErrorKind::MacroDefinition), ), None => self .mac .expand( &tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), - new_meta_vars, span, loc.def.edition, ) @@ -66,32 +50,18 @@ impl DeclarativeMacroExpander { pub fn expand_unhygienic( &self, - db: &dyn ExpandDatabase, tt: tt::Subtree, - krate: CrateId, call_site: Span, def_site_edition: Edition, ) -> ExpandResult<tt::Subtree> { - let toolchain = db.toolchain(krate); - let new_meta_vars = toolchain.as_ref().map_or(false, |version| { - REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( - &base_db::Version { - pre: base_db::Prerelease::EMPTY, - build: base_db::BuildMetadata::EMPTY, - major: version.major, - minor: version.minor, - patch: version.patch, - }, - ) - }); match self.mac.err() { Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::MacroDefinition, + ExpandError::new(call_site, ExpandErrorKind::MacroDefinition), ), None => self .mac - .expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition) + .expand(&tt, |_| (), call_site, def_site_edition) .map(TupleExt::head) .map_err(Into::into), } @@ -111,35 +81,24 @@ impl DeclarativeMacroExpander { match &*attrs .iter() .find(|it| { - it.path.as_ident().and_then(|it| it.as_str()) - == Some("rustc_macro_transparency") + it.path + .as_ident() + .map(|it| *it == sym::rustc_macro_transparency.clone()) + .unwrap_or(false) })? .token_tree_value()? .token_trees { - [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text { - "transparent" => Some(Transparency::Transparent), - "semitransparent" => Some(Transparency::SemiTransparent), - "opaque" => Some(Transparency::Opaque), + [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym { + s if *s == sym::transparent => Some(Transparency::Transparent), + s if *s == sym::semitransparent => Some(Transparency::SemiTransparent), + s if *s == sym::opaque => Some(Transparency::Opaque), _ => None, }, _ => None, } }; - let toolchain = db.toolchain(def_crate); - let new_meta_vars = toolchain.as_ref().map_or(false, |version| { - REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( - &base_db::Version { - pre: base_db::Prerelease::EMPTY, - build: base_db::BuildMetadata::EMPTY, - major: version.major, - minor: version.minor, - patch: version.patch, - }, - ) - }); - - let edition = |ctx: SyntaxContextId| { + let ctx_edition = |ctx: SyntaxContextId| { let crate_graph = db.crate_graph(); if ctx.is_root() { crate_graph[def_crate].edition @@ -162,7 +121,7 @@ impl DeclarativeMacroExpander { DocCommentDesugarMode::Mbe, ); - mbe::DeclarativeMacro::parse_macro_rules(&tt, edition, new_meta_vars) + mbe::DeclarativeMacro::parse_macro_rules(&tt, ctx_edition) } None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( "expected a token tree".into(), @@ -190,12 +149,7 @@ impl DeclarativeMacroExpander { DocCommentDesugarMode::Mbe, ); - mbe::DeclarativeMacro::parse_macro2( - args.as_ref(), - &body, - edition, - new_meta_vars, - ) + mbe::DeclarativeMacro::parse_macro2(args.as_ref(), &body, ctx_edition) } None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( "expected a token tree".into(), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 3e0d2dfa6c1..5385b44532b 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -54,6 +54,7 @@ pub fn expand_eager_macro_input( ctxt: call_site, } .intern(db); + #[allow(deprecated)] // builtin eager macros are never derives let (_, _, span) = db.macro_arg(arg_id); let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); @@ -175,14 +176,19 @@ fn eager_macro_recur( Some(path) => match macro_resolver(&path) { Some(def) => def, None => { - error = - Some(ExpandError::other(format!("unresolved macro {}", path.display(db)))); + error = Some(ExpandError::other( + span_map.span_at(call.syntax().text_range().start()), + format!("unresolved macro {}", path.display(db)), + )); offset += call.syntax().text_range().len(); continue; } }, None => { - error = Some(ExpandError::other("malformed macro invocation")); + error = Some(ExpandError::other( + span_map.span_at(call.syntax().text_range().start()), + "malformed macro invocation", + )); offset += call.syntax().text_range().len(); continue; } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index fc9fa93268e..20f484f672a 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -3,8 +3,8 @@ use std::borrow::Borrow; use either::Either; use span::{ - AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr, - MacroFileId, SyntaxContextId, + AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId, + SyntaxContextId, }; use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize}; @@ -27,7 +27,36 @@ pub struct InFileWrapper<FileKind, T> { } pub type InFile<T> = InFileWrapper<HirFileId, T>; pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>; -pub type InRealFile<T> = InFileWrapper<FileId, T>; +pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>; + +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct FilePositionWrapper<FileKind> { + pub file_id: FileKind, + pub offset: TextSize, +} +pub type HirFilePosition = FilePositionWrapper<HirFileId>; +pub type MacroFilePosition = FilePositionWrapper<MacroFileId>; +pub type FilePosition = FilePositionWrapper<EditionedFileId>; + +impl From<FilePositionWrapper<EditionedFileId>> for FilePositionWrapper<span::FileId> { + fn from(value: FilePositionWrapper<EditionedFileId>) -> Self { + FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset } + } +} +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct FileRangeWrapper<FileKind> { + pub file_id: FileKind, + pub range: TextRange, +} +pub type HirFileRange = FileRangeWrapper<HirFileId>; +pub type MacroFileRange = FileRangeWrapper<MacroFileId>; +pub type FileRange = FileRangeWrapper<EditionedFileId>; + +impl From<FileRangeWrapper<EditionedFileId>> for FileRangeWrapper<span::FileId> { + fn from(value: FileRangeWrapper<EditionedFileId>) -> Self { + FileRangeWrapper { file_id: value.file_id.into(), range: value.range } + } +} /// `AstId` points to an AST node in any file. /// @@ -128,7 +157,7 @@ trait FileIdToSyntax: Copy { fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode; } -impl FileIdToSyntax for FileId { +impl FileIdToSyntax for EditionedFileId { fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { db.parse(self).syntax_node() } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 9fdf4aa4f7c..71579d2f87f 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -1,6 +1,7 @@ //! To make attribute macros work reliably when typing, we need to take care to //! fix up syntax errors in the code we're passing to them. +use intern::sym; use mbe::DocCommentDesugarMode; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; @@ -80,12 +81,13 @@ pub(crate) fn fixup_syntax( original.push(original_tree); let span = span_map.span_for_range(node_range); let replacement = Leaf::Ident(Ident { - text: "__ra_fixup".into(), + sym: sym::__ra_fixup.clone(), span: Span { range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, ctx: span.ctx, }, + is_raw: tt::IdentIsRaw::No, }); append.insert(node.clone().into(), vec![replacement]); preorder.skip_subtree(); @@ -99,8 +101,9 @@ pub(crate) fn fixup_syntax( // incomplete field access: some_expr.| append.insert(node.clone().into(), vec![ Leaf::Ident(Ident { - text: "__ra_fixup".into(), + sym: sym::__ra_fixup.clone(), span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }), ]); } @@ -136,8 +139,9 @@ pub(crate) fn fixup_syntax( }; append.insert(if_token.into(), vec![ Leaf::Ident(Ident { - text: "__ra_fixup".into(), - span: fake_span(node_range) + sym: sym::__ra_fixup.clone(), + span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }), ]); } @@ -166,8 +170,9 @@ pub(crate) fn fixup_syntax( }; append.insert(while_token.into(), vec![ Leaf::Ident(Ident { - text: "__ra_fixup".into(), - span: fake_span(node_range) + sym: sym::__ra_fixup.clone(), + span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }), ]); } @@ -213,8 +218,9 @@ pub(crate) fn fixup_syntax( }; append.insert(match_token.into(), vec![ Leaf::Ident(Ident { - text: "__ra_fixup".into(), - span: fake_span(node_range) + sym: sym::__ra_fixup.clone(), + span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }), ]); } @@ -242,13 +248,14 @@ pub(crate) fn fixup_syntax( }; let [pat, in_token, iter] = [ - "_", - "in", - "__ra_fixup" - ].map(|text| + sym::underscore.clone(), + sym::in_.clone(), + sym::__ra_fixup.clone(), + ].map(|sym| Leaf::Ident(Ident { - text: text.into(), - span: fake_span(node_range) + sym, + span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }), ); @@ -280,8 +287,9 @@ pub(crate) fn fixup_syntax( if it.name_ref().is_some() && it.expr().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { - text: "__ra_fixup".into(), - span: fake_span(node_range) + sym: sym::__ra_fixup.clone(), + span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }) ]); } @@ -292,8 +300,9 @@ pub(crate) fn fixup_syntax( if it.segment().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { - text: "__ra_fixup".into(), - span: fake_span(node_range) + sym: sym::__ra_fixup.clone(), + span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }) ]); } @@ -325,8 +334,9 @@ pub(crate) fn fixup_syntax( if it.body().is_none() { append.insert(node.into(), vec![ Leaf::Ident(Ident { - text: "__ra_fixup".into(), - span: fake_span(node_range) + sym: sym::__ra_fixup.clone(), + span: fake_span(node_range), + is_raw: tt::IdentIsRaw::No }) ]); } @@ -423,9 +433,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { #[cfg(test)] mod tests { - use base_db::FileId; use expect_test::{expect, Expect}; use mbe::DocCommentDesugarMode; + use span::{Edition, EditionedFileId, FileId}; use syntax::TextRange; use triomphe::Arc; @@ -439,9 +449,9 @@ mod tests { // `TokenTree`s, see the last assertion in `check()`. fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool { match (a, b) { - (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.text == b.text, + (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.symbol == b.symbol, (tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char, - (tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.text == b.text, + (tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.sym == b.sym, _ => false, } } @@ -463,7 +473,10 @@ mod tests { #[track_caller] fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture, span::Edition::CURRENT); - let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); + let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(EditionedFileId::new( + FileId::from_raw(0), + Edition::CURRENT, + )))); let fixups = super::fixup_syntax( span_map.as_ref(), &parsed.syntax_node(), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs index 7ead7e93901..ee15b1b5ce9 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs @@ -10,6 +10,7 @@ use std::sync::OnceLock; +use intern::Symbol; use rustc_hash::FxHashMap; pub struct BuiltinAttribute { @@ -26,11 +27,16 @@ pub struct AttributeTemplate { pub name_value_str: Option<&'static str>, } -pub fn find_builtin_attr_idx(name: &str) -> Option<usize> { - static BUILTIN_LOOKUP_TABLE: OnceLock<FxHashMap<&'static str, usize>> = OnceLock::new(); +pub fn find_builtin_attr_idx(name: &Symbol) -> Option<usize> { + static BUILTIN_LOOKUP_TABLE: OnceLock<FxHashMap<Symbol, usize>> = OnceLock::new(); BUILTIN_LOOKUP_TABLE .get_or_init(|| { - INERT_ATTRIBUTES.iter().map(|attr| attr.name).enumerate().map(|(a, b)| (b, a)).collect() + INERT_ATTRIBUTES + .iter() + .map(|attr| attr.name) + .enumerate() + .map(|(a, b)| (Symbol::intern(b), a)) + .collect() }) .get(name) .copied() @@ -553,7 +559,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ ), BuiltinAttribute { - // name: sym::rustc_diagnostic_item, + // name: sym::rustc_diagnostic_item.clone(), name: "rustc_diagnostic_item", // FIXME: This can be `true` once we always use `tcx.is_diagnostic_item`. // only_local: false, @@ -562,7 +568,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ // duplicates: ErrorFollowing, // gate: Gated( // Stability::Unstable, - // sym::rustc_attrs, + // sym::rustc_attrs.clone(), // "diagnostic items compiler internal support for linting", // cfg_fn!(rustc_attrs), // ), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index e7c34e51e85..18da77d6caa 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -6,9 +6,7 @@ #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] pub mod attrs; -pub mod builtin_attr_macro; -pub mod builtin_derive_macro; -pub mod builtin_fn_macro; +pub mod builtin; pub mod change; pub mod db; pub mod declarative; @@ -19,21 +17,21 @@ pub mod inert_attr_macro; pub mod mod_path; pub mod name; pub mod proc_macro; -pub mod quote; pub mod span_map; mod cfg_process; mod fixup; + use attrs::collect_attrs; use rustc_hash::FxHashMap; use triomphe::Arc; -use std::{fmt, hash::Hash}; +use std::hash::Hash; -use base_db::{salsa::InternValueTrivial, CrateId, FileId}; +use base_db::{salsa::InternValueTrivial, CrateId}; use either::Either; use span::{ - Edition, ErasedFileAstId, FileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, + Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData, SyntaxContextId, }; use syntax::{ @@ -43,23 +41,24 @@ use syntax::{ use crate::{ attrs::AttrId, - builtin_attr_macro::BuiltinAttrExpander, - builtin_derive_macro::BuiltinDeriveExpander, - builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, + builtin::{ + include_input_to_file_id, BuiltinAttrExpander, BuiltinDeriveExpander, + BuiltinFnLikeExpander, EagerExpander, + }, db::ExpandDatabase, mod_path::ModPath, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, span_map::{ExpansionSpanMap, SpanMap}, }; -pub use crate::files::{AstId, ErasedAstId, InFile, InMacroFile, InRealFile}; +pub use crate::files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile}; pub use mbe::{DeclarativeMacro, ValueResult}; pub use span::{HirFileId, MacroCallId, MacroFileId}; pub mod tt { pub use span::Span; - pub use tt::{DelimiterKind, Spacing}; + pub use tt::{token_to_literal, DelimiterKind, IdentIsRaw, LitKind, Spacing}; pub type Delimiter = ::tt::Delimiter<Span>; pub type DelimSpan = ::tt::DelimSpan<Span>; @@ -125,46 +124,79 @@ impl_intern_lookup!( pub type ExpandResult<T> = ValueResult<T, ExpandError>; #[derive(Debug, PartialEq, Eq, Clone, Hash)] -pub enum ExpandError { - UnresolvedProcMacro(CrateId), - /// The macro expansion is disabled. - MacroDisabled, - MacroDefinition, - Mbe(mbe::ExpandError), - RecursionOverflow, - Other(Arc<Box<str>>), - ProcMacroPanic(Arc<Box<str>>), +pub struct ExpandError { + inner: Arc<(ExpandErrorKind, Span)>, } impl ExpandError { - pub fn other(msg: impl Into<Box<str>>) -> Self { - ExpandError::Other(Arc::new(msg.into())) + pub fn new(span: Span, kind: ExpandErrorKind) -> Self { + ExpandError { inner: Arc::new((kind, span)) } + } + pub fn other(span: Span, msg: impl Into<Box<str>>) -> Self { + ExpandError { inner: Arc::new((ExpandErrorKind::Other(msg.into()), span)) } + } + pub fn kind(&self) -> &ExpandErrorKind { + &self.inner.0 + } + pub fn span(&self) -> Span { + self.inner.1 } } -impl From<mbe::ExpandError> for ExpandError { - fn from(mbe: mbe::ExpandError) -> Self { - Self::Mbe(mbe) +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub enum ExpandErrorKind { + /// Attribute macro expansion is disabled. + ProcMacroAttrExpansionDisabled, + MissingProcMacroExpander(CrateId), + /// The macro for this call is disabled. + MacroDisabled, + /// The macro definition has errors. + MacroDefinition, + Mbe(mbe::ExpandErrorKind), + RecursionOverflow, + Other(Box<str>), + ProcMacroPanic(Box<str>), +} + +impl ExpandError { + pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) { + self.inner.0.render_to_string(db) } } -impl fmt::Display for ExpandError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl ExpandErrorKind { + pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) { match self { - ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"), - ExpandError::Mbe(it) => it.fmt(f), - ExpandError::RecursionOverflow => f.write_str("overflow expanding the original macro"), - ExpandError::ProcMacroPanic(it) => { - f.write_str("proc-macro panicked: ")?; - f.write_str(it) + ExpandErrorKind::ProcMacroAttrExpansionDisabled => { + ("procedural attribute macro expansion is disabled".to_owned(), false) + } + ExpandErrorKind::MacroDisabled => { + ("proc-macro is explicitly disabled".to_owned(), false) + } + &ExpandErrorKind::MissingProcMacroExpander(def_crate) => { + match db.proc_macros().get_error_for_crate(def_crate) { + Some((e, hard_err)) => (e.to_owned(), hard_err), + None => ("missing expander".to_owned(), true), + } } - ExpandError::Other(it) => f.write_str(it), - ExpandError::MacroDisabled => f.write_str("macro disabled"), - ExpandError::MacroDefinition => f.write_str("macro definition has parse errors"), + ExpandErrorKind::MacroDefinition => { + ("macro definition has parse errors".to_owned(), true) + } + ExpandErrorKind::Mbe(e) => (e.to_string(), true), + ExpandErrorKind::RecursionOverflow => { + ("overflow expanding the original macro".to_owned(), true) + } + ExpandErrorKind::Other(e) => ((**e).to_owned(), true), + ExpandErrorKind::ProcMacroPanic(e) => ((**e).to_owned(), true), } } } +impl From<mbe::ExpandError> for ExpandError { + fn from(mbe: mbe::ExpandError) -> Self { + ExpandError { inner: Arc::new((ExpandErrorKind::Mbe(mbe.inner.1.clone()), mbe.inner.0)) } + } +} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroCallLoc { pub def: MacroDefId, @@ -243,11 +275,11 @@ pub enum MacroCallKind { pub trait HirFileIdExt { /// Returns the original file of this macro call hierarchy. - fn original_file(self, db: &dyn ExpandDatabase) -> FileId; + fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId; /// Returns the original file of this macro call hierarchy while going into the included file if /// one of the calls comes from an `include!``. - fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> FileId; + fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId; /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>; @@ -256,7 +288,7 @@ pub trait HirFileIdExt { } impl HirFileIdExt for HirFileId { - fn original_file(self, db: &dyn ExpandDatabase) -> FileId { + fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId { let mut file_id = self; loop { match file_id.repr() { @@ -268,7 +300,7 @@ impl HirFileIdExt for HirFileId { } } - fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> FileId { + fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId { loop { match self.repr() { HirFileIdRepr::FileId(id) => break id, @@ -276,11 +308,9 @@ impl HirFileIdExt for HirFileId { let loc = db.lookup_intern_macro_call(file.macro_call_id); if loc.def.is_include() { if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind { - if let Ok(it) = builtin_fn_macro::include_input_to_file_id( - db, - file.macro_call_id, - &eager.arg, - ) { + if let Ok(it) = + include_input_to_file_id(db, file.macro_call_id, &eager.arg) + { break it; } } @@ -568,12 +598,10 @@ impl MacroCallLoc { &self, db: &dyn ExpandDatabase, macro_call_id: MacroCallId, - ) -> Option<FileId> { + ) -> Option<EditionedFileId> { if self.def.is_include() { if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind { - if let Ok(it) = - builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg) - { + if let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg) { return Some(it); } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 12fdf88a2a8..2c26fe414d9 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -8,10 +8,11 @@ use std::{ use crate::{ db::ExpandDatabase, hygiene::{marks_rev, SyntaxContextExt, Transparency}, - name::{known, AsName, Name}, + name::{AsName, Name}, tt, }; use base_db::CrateId; +use intern::sym; use smallvec::SmallVec; use span::SyntaxContextId; use syntax::{ast, AstNode}; @@ -106,10 +107,7 @@ impl ModPath { PathKind::Abs => 0, PathKind::DollarCrate(_) => "$crate".len(), }; - self.segments() - .iter() - .map(|segment| segment.as_str().map_or(0, str::len)) - .fold(base, core::ops::Add::add) + self.segments().iter().map(|segment| segment.as_str().len()).fold(base, core::ops::Add::add) } pub fn is_ident(&self) -> bool { @@ -123,7 +121,7 @@ impl ModPath { #[allow(non_snake_case)] pub fn is_Self(&self) -> bool { self.kind == PathKind::Plain - && matches!(&*self.segments, [name] if *name == known::SELF_TYPE) + && matches!(&*self.segments, [name] if *name == sym::Self_.clone()) } /// If this path is a single identifier, like `foo`, return its name. @@ -265,9 +263,10 @@ fn convert_path( res } } - ast::PathSegmentKind::SelfTypeKw => { - ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE)) - } + ast::PathSegmentKind::SelfTypeKw => ModPath::from_segments( + PathKind::Plain, + Some(Name::new_symbol(sym::Self_.clone(), SyntaxContextId::ROOT)), + ), ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()), ast::PathSegmentKind::SelfKw => handle_super_kw(0)?, ast::PathSegmentKind::SuperKw => handle_super_kw(1)?, @@ -317,30 +316,36 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs, _ => return None, }, - tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => { + tt::Leaf::Ident(tt::Ident { sym: text, span, .. }) if *text == sym::dollar_crate => { resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate) } - tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::SELF, - tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => { + tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::self_ => PathKind::SELF, + tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::super_ => { let mut deg = 1; - while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leaves.next() { - if text != "super" { - segments.push(Name::new_text_dont_use(text.clone())); + while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw })) = leaves.next() { + if *text != sym::super_ { + segments.push(Name::new_symbol_maybe_raw(text.clone(), *is_raw, span.ctx)); break; } deg += 1; } PathKind::Super(deg) } - tt::Leaf::Ident(tt::Ident { text, .. }) if text == "crate" => PathKind::Crate, + tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::crate_ => PathKind::Crate, tt::Leaf::Ident(ident) => { - segments.push(Name::new_text_dont_use(ident.text.clone())); + segments.push(Name::new_symbol_maybe_raw( + ident.sym.clone(), + ident.is_raw, + ident.span.ctx, + )); PathKind::Plain } _ => return None, }; segments.extend(leaves.filter_map(|leaf| match leaf { - ::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())), + ::tt::Leaf::Ident(ident) => { + Some(Name::new_symbol_maybe_raw(ident.sym.clone(), ident.is_raw, ident.span.ctx)) + } _ => None, })); Some(ModPath { kind, segments }) @@ -385,6 +390,8 @@ macro_rules! __known_path { (core::ops::RangeInclusive) => {}; (core::future::Future) => {}; (core::future::IntoFuture) => {}; + (core::fmt::Debug) => {}; + (std::fmt::format) => {}; (core::ops::Try) => {}; ($path:path) => { compile_error!("Please register your known path in the path module") @@ -396,7 +403,7 @@ macro_rules! __path { ($start:ident $(:: $seg:ident)*) => ({ $crate::__known_path!($start $(:: $seg)*); $crate::mod_path::ModPath::from_segments($crate::mod_path::PathKind::Abs, vec![ - $crate::mod_path::__name![$start], $($crate::mod_path::__name![$seg],)* + $crate::name::Name::new_symbol_root(intern::sym::$start.clone()), $($crate::name::Name::new_symbol_root(intern::sym::$seg.clone()),)* ]) }); } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index fe754bc8249..d012d272d74 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -2,7 +2,9 @@ use std::fmt; -use syntax::{ast, format_smolstr, utils::is_raw_identifier, SmolStr}; +use intern::{sym, Symbol}; +use span::SyntaxContextId; +use syntax::{ast, utils::is_raw_identifier}; /// `Name` is a wrapper around string, which is used in hir for both references /// and declarations. In theory, names should also carry hygiene info, but we are @@ -11,66 +13,93 @@ use syntax::{ast, format_smolstr, utils::is_raw_identifier, SmolStr}; /// Note that `Name` holds and prints escaped name i.e. prefixed with "r#" when it /// is a raw identifier. Use [`unescaped()`][Name::unescaped] when you need the /// name without "r#". -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct Name(Repr); +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct Name { + symbol: Symbol, + ctx: (), + // FIXME: We should probably encode rawness as a property here instead, once we have hygiene + // in here we've got 4 bytes of padding to fill anyways +} -/// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier. -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct UnescapedName<'a>(&'a Name); +impl fmt::Debug for Name { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Name") + .field("symbol", &self.symbol.as_str()) + .field("ctx", &self.ctx) + .finish() + } +} -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -enum Repr { - Text(SmolStr), - TupleField(usize), +impl Ord for Name { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.symbol.as_str().cmp(other.symbol.as_str()) + } } -impl UnescapedName<'_> { - /// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over - /// [`ToString::to_string`] if possible as this conversion is cheaper in the general case. - pub fn to_smol_str(&self) -> SmolStr { - match &self.0 .0 { - Repr::Text(it) => { - if let Some(stripped) = it.strip_prefix("r#") { - SmolStr::new(stripped) - } else { - it.clone() - } - } - Repr::TupleField(it) => SmolStr::new(it.to_string()), - } +impl PartialOrd for Name { + fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { + Some(self.cmp(other)) } +} +impl PartialEq<Symbol> for Name { + fn eq(&self, sym: &Symbol) -> bool { + self.symbol == *sym + } +} + +impl PartialEq<Name> for Symbol { + fn eq(&self, name: &Name) -> bool { + *self == name.symbol + } +} + +/// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UnescapedName<'a>(&'a Name); + +impl UnescapedName<'_> { pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ { _ = db; UnescapedDisplay { name: self } } + #[doc(hidden)] + pub fn display_no_db(&self) -> impl fmt::Display + '_ { + UnescapedDisplay { name: self } + } } impl Name { /// Note: this is private to make creating name from random string hard. /// Hopefully, this should allow us to integrate hygiene cleaner in the /// future, and to switch to interned representation of names. - const fn new_text(text: SmolStr) -> Name { - Name(Repr::Text(text)) + fn new_text(text: &str) -> Name { + Name { symbol: Symbol::intern(text), ctx: () } } - // FIXME: See above, unfortunately some places really need this right now - #[doc(hidden)] - pub const fn new_text_dont_use(text: SmolStr) -> Name { - Name(Repr::Text(text)) + pub fn new(text: &str, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Name { + _ = ctx; + Name { + symbol: if raw.yes() { + Symbol::intern(&format!("{}{text}", raw.as_str())) + } else { + Symbol::intern(text) + }, + ctx: (), + } } pub fn new_tuple_field(idx: usize) -> Name { - Name(Repr::TupleField(idx)) + Name { symbol: Symbol::intern(&idx.to_string()), ctx: () } } pub fn new_lifetime(lt: &ast::Lifetime) -> Name { - Self::new_text(lt.text().into()) + Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () } } /// Shortcut to create a name from a string literal. - const fn new_static(text: &'static str) -> Name { - Name::new_text(SmolStr::new_static(text)) + fn new_ref(text: &str) -> Name { + Name { symbol: Symbol::intern(text), ctx: () } } /// Resolve a name from the text of token. @@ -78,14 +107,12 @@ impl Name { match raw_text.strip_prefix("r#") { // When `raw_text` starts with "r#" but the name does not coincide with any // keyword, we never need the prefix so we strip it. - Some(text) if !is_raw_identifier(text) => Name::new_text(SmolStr::new(text)), + Some(text) if !is_raw_identifier(text) => Name::new_ref(text), // Keywords (in the current edition) *can* be used as a name in earlier editions of // Rust, e.g. "try" in Rust 2015. Even in such cases, we keep track of them in their // escaped form. - None if is_raw_identifier(raw_text) => { - Name::new_text(format_smolstr!("r#{}", raw_text)) - } - _ => Name::new_text(raw_text.into()), + None if is_raw_identifier(raw_text) => Name::new_text(&format!("r#{}", raw_text)), + _ => Name::new_text(raw_text), } } @@ -98,8 +125,8 @@ impl Name { /// Ideally, we want a `gensym` semantics for missing names -- each missing /// name is equal only to itself. It's not clear how to implement this in /// salsa though, so we punt on that bit for a moment. - pub const fn missing() -> Name { - Name::new_static("[missing name]") + pub fn missing() -> Name { + Name { symbol: sym::MISSING_NAME.clone(), ctx: () } } /// Returns true if this is a fake name for things missing in the source code. See @@ -115,41 +142,17 @@ impl Name { /// creating desugared locals and labels. The caller is responsible for picking an index /// that is stable across re-executions pub fn generate_new_name(idx: usize) -> Name { - Name::new_text(format_smolstr!("<ra@gennew>{idx}")) + Name::new_text(&format!("<ra@gennew>{idx}")) } /// Returns the tuple index this name represents if it is a tuple field. pub fn as_tuple_index(&self) -> Option<usize> { - match self.0 { - Repr::TupleField(idx) => Some(idx), - _ => None, - } + self.symbol.as_str().parse().ok() } /// Returns the text this name represents if it isn't a tuple field. - pub fn as_text(&self) -> Option<SmolStr> { - match &self.0 { - Repr::Text(it) => Some(it.clone()), - _ => None, - } - } - - /// Returns the text this name represents if it isn't a tuple field. - pub fn as_str(&self) -> Option<&str> { - match &self.0 { - Repr::Text(it) => Some(it), - _ => None, - } - } - - /// Returns the textual representation of this name as a [`SmolStr`]. - /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in - /// the general case. - pub fn to_smol_str(&self) -> SmolStr { - match &self.0 { - Repr::Text(it) => it.clone(), - Repr::TupleField(it) => SmolStr::new(it.to_string()), - } + pub fn as_str(&self) -> &str { + self.symbol.as_str() } pub fn unescaped(&self) -> UnescapedName<'_> { @@ -157,16 +160,41 @@ impl Name { } pub fn is_escaped(&self) -> bool { - match &self.0 { - Repr::Text(it) => it.starts_with("r#"), - Repr::TupleField(_) => false, - } + self.symbol.as_str().starts_with("r#") } pub fn display<'a>(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a { _ = db; Display { name: self } } + + // FIXME: Remove this + #[doc(hidden)] + pub fn display_no_db(&self) -> impl fmt::Display + '_ { + Display { name: self } + } + + pub fn symbol(&self) -> &Symbol { + &self.symbol + } + + pub const fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self { + _ = ctx; + Self { symbol, ctx: () } + } + + pub fn new_symbol_maybe_raw(sym: Symbol, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Self { + if raw.no() { + Self { symbol: sym, ctx: () } + } else { + Name::new(sym.as_str(), raw, ctx) + } + } + + // FIXME: This needs to go once we have hygiene + pub const fn new_symbol_root(sym: Symbol) -> Self { + Self { symbol: sym, ctx: () } + } } struct Display<'a> { @@ -175,10 +203,7 @@ struct Display<'a> { impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self.name.0 { - Repr::Text(text) => fmt::Display::fmt(&text, f), - Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), - } + fmt::Display::fmt(self.name.symbol.as_str(), f) } } @@ -188,13 +213,9 @@ struct UnescapedDisplay<'a> { impl fmt::Display for UnescapedDisplay<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self.name.0 .0 { - Repr::Text(text) => { - let text = text.strip_prefix("r#").unwrap_or(text); - fmt::Display::fmt(&text, f) - } - Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), - } + let symbol = &self.name.0.symbol.as_str(); + let text = symbol.strip_prefix("r#").unwrap_or(symbol); + fmt::Display::fmt(&text, f) } } @@ -228,7 +249,7 @@ impl AsName for ast::NameOrNameRef { impl<Span> AsName for tt::Ident<Span> { fn as_name(&self) -> Name { - Name::resolve(&self.text) + Name::resolve(self.sym.as_str()) } } @@ -246,251 +267,6 @@ impl AsName for ast::FieldKind { impl AsName for base_db::Dependency { fn as_name(&self) -> Name { - Name::new_text(SmolStr::new(&*self.name)) + Name::new_text(&self.name) } } - -pub mod known { - macro_rules! known_names { - ($($ident:ident),* $(,)?) => { - $( - #[allow(bad_style)] - pub const $ident: super::Name = - super::Name::new_static(stringify!($ident)); - )* - }; - } - - known_names!( - // Primitives - isize, - i8, - i16, - i32, - i64, - i128, - usize, - u8, - u16, - u32, - u64, - u128, - f16, - f32, - f64, - f128, - bool, - char, - str, - // Special names - macro_rules, - doc, - cfg, - cfg_attr, - register_attr, - register_tool, - // Components of known path (value or mod name) - std, - core, - alloc, - iter, - ops, - fmt, - future, - result, - string, - boxed, - option, - prelude, - rust_2015, - rust_2018, - rust_2021, - rust_2024, - v1, - new_display, - new_debug, - new_lower_exp, - new_upper_exp, - new_octal, - new_pointer, - new_binary, - new_lower_hex, - new_upper_hex, - from_usize, - panic_2015, - panic_2021, - unreachable_2015, - unreachable_2021, - // Components of known path (type name) - Iterator, - IntoIterator, - Item, - IntoIter, - Try, - Ok, - Future, - IntoFuture, - Result, - Option, - Output, - Target, - Box, - RangeFrom, - RangeFull, - RangeInclusive, - RangeToInclusive, - RangeTo, - Range, - String, - Neg, - Not, - None, - Index, - Left, - Right, - Center, - Unknown, - Is, - Param, - Implied, - // Components of known path (function name) - filter_map, - next, - iter_mut, - len, - is_empty, - as_str, - new, - new_v1_formatted, - none, - // Builtin macros - asm, - assert, - column, - compile_error, - concat_idents, - concat_bytes, - concat, - const_format_args, - core_panic, - env, - file, - format, - format_args_nl, - format_args, - global_asm, - include_bytes, - include_str, - include, - line, - llvm_asm, - log_syntax, - module_path, - option_env, - quote, - std_panic, - stringify, - trace_macros, - unreachable, - // Builtin derives - Copy, - Clone, - Default, - Debug, - Hash, - Ord, - PartialOrd, - Eq, - PartialEq, - // Builtin attributes - bench, - cfg_accessible, - cfg_eval, - crate_type, - derive, - derive_const, - global_allocator, - no_core, - no_std, - test, - test_case, - recursion_limit, - feature, - // known methods of lang items - call_once, - call_mut, - call, - eq, - ne, - ge, - gt, - le, - lt, - // known fields of lang items - pieces, - // lang items - add_assign, - add, - bitand_assign, - bitand, - bitor_assign, - bitor, - bitxor_assign, - bitxor, - branch, - deref_mut, - deref, - div_assign, - div, - drop, - fn_mut, - fn_once, - future_trait, - index, - index_mut, - into_future, - mul_assign, - mul, - neg, - not, - owned_box, - partial_ord, - poll, - r#fn, - rem_assign, - rem, - shl_assign, - shl, - shr_assign, - shr, - sub_assign, - sub, - unsafe_cell, - va_list - ); - - // self/Self cannot be used as an identifier - pub const SELF_PARAM: super::Name = super::Name::new_static("self"); - pub const SELF_TYPE: super::Name = super::Name::new_static("Self"); - - pub const STATIC_LIFETIME: super::Name = super::Name::new_static("'static"); - pub const DOLLAR_CRATE: super::Name = super::Name::new_static("$crate"); - - #[macro_export] - macro_rules! name { - (self) => { - $crate::name::known::SELF_PARAM - }; - (Self) => { - $crate::name::known::SELF_TYPE - }; - ('static) => { - $crate::name::known::STATIC_LIFETIME - }; - ($ident:ident) => { - $crate::name::known::$ident - }; - } -} - -pub use crate::name; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs index def2578b0e3..26bb3a3edda 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs @@ -4,22 +4,11 @@ use core::fmt; use std::{panic::RefUnwindSafe, sync}; use base_db::{CrateId, Env}; +use intern::Symbol; use rustc_hash::FxHashMap; use span::Span; -use stdx::never; -use syntax::SmolStr; -use triomphe::Arc; -use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(u32); - -impl ProcMacroId { - pub fn new(u32: u32) -> Self { - ProcMacroId(u32) - } -} +use crate::{db::ExpandDatabase, tt, ExpandError, ExpandErrorKind, ExpandResult}; #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ProcMacroKind { @@ -28,7 +17,10 @@ pub enum ProcMacroKind { Attr, } +/// A proc-macro expander implementation. pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { + /// Run the expander with the given input subtree, optional attribute input subtree (for + /// [`ProcMacroKind::Attr`]), environment variables, and span information. fn expand( &self, subtree: &tt::Subtree, @@ -42,57 +34,165 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { #[derive(Debug)] pub enum ProcMacroExpansionError { + /// The proc-macro panicked. Panic(String), - /// Things like "proc macro server was killed by OOM". + /// The server itself errored out. System(String), } -pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>; +pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>; +type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>; -pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>; +#[derive(Default, Debug)] +pub struct ProcMacrosBuilder(FxHashMap<CrateId, StoredProcMacroLoadResult>); +impl ProcMacrosBuilder { + pub fn insert(&mut self, proc_macros_crate: CrateId, proc_macro: ProcMacroLoadResult) { + self.0.insert( + proc_macros_crate, + match proc_macro { + Ok(it) => Ok(it.into_boxed_slice()), + Err((e, hard_err)) => Err((e.into_boxed_str(), hard_err)), + }, + ); + } + pub fn build(mut self) -> ProcMacros { + self.0.shrink_to_fit(); + ProcMacros(self.0) + } +} + +#[derive(Default, Debug)] +pub struct ProcMacros(FxHashMap<CrateId, StoredProcMacroLoadResult>); +impl FromIterator<(CrateId, ProcMacroLoadResult)> for ProcMacros { + fn from_iter<T: IntoIterator<Item = (CrateId, ProcMacroLoadResult)>>(iter: T) -> Self { + let mut builder = ProcMacrosBuilder::default(); + for (k, v) in iter { + builder.insert(k, v); + } + builder.build() + } +} + +impl ProcMacros { + fn get(&self, krate: CrateId, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> { + let proc_macros = match self.0.get(&krate) { + Some(Ok(proc_macros)) => proc_macros, + Some(Err(_)) | None => { + return Err(ExpandError::other( + err_span, + "internal error: no proc macros for crate", + )); + } + }; + proc_macros.get(idx as usize).ok_or_else(|| { + ExpandError::other(err_span, + format!( + "internal error: proc-macro index out of bounds: the length is {} but the index is {}", + proc_macros.len(), + idx + ) + ) + } + ) + } + + pub fn get_error_for_crate(&self, krate: CrateId) -> Option<(&str, bool)> { + self.0.get(&krate).and_then(|it| it.as_ref().err()).map(|(e, hard_err)| (&**e, *hard_err)) + } + + /// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate. + pub fn for_crate( + &self, + krate: CrateId, + def_site_ctx: span::SyntaxContextId, + ) -> Option<Box<[(crate::name::Name, CustomProcMacroExpander, bool)]>> { + match self.0.get(&krate) { + Some(Ok(proc_macros)) => Some({ + proc_macros + .iter() + .enumerate() + .map(|(idx, it)| { + let name = crate::name::Name::new_symbol(it.name.clone(), def_site_ctx); + (name, CustomProcMacroExpander::new(idx as u32), it.disabled) + }) + .collect() + }), + _ => None, + } + } +} + +/// A loaded proc-macro. #[derive(Debug, Clone)] pub struct ProcMacro { - pub name: SmolStr, + /// The name of the proc macro. + pub name: Symbol, pub kind: ProcMacroKind, + /// The expander handle for this proc macro. pub expander: sync::Arc<dyn ProcMacroExpander>, + /// Whether this proc-macro is disabled for early name resolution. Notably, the + /// [`Self::expander`] is still usable. pub disabled: bool, } +/// A custom proc-macro expander handle. This handle together with its crate resolves to a [`ProcMacro`] #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub struct CustomProcMacroExpander { - proc_macro_id: ProcMacroId, + proc_macro_id: u32, } impl CustomProcMacroExpander { - const DUMMY_ID: u32 = !0; + const MISSING_EXPANDER: u32 = !0; const DISABLED_ID: u32 = !1; + const PROC_MACRO_ATTR_DISABLED: u32 = !2; - pub fn new(proc_macro_id: ProcMacroId) -> Self { - assert_ne!(proc_macro_id.0, Self::DUMMY_ID); - assert_ne!(proc_macro_id.0, Self::DISABLED_ID); + pub fn new(proc_macro_id: u32) -> Self { + assert_ne!(proc_macro_id, Self::MISSING_EXPANDER); + assert_ne!(proc_macro_id, Self::DISABLED_ID); + assert_ne!(proc_macro_id, Self::PROC_MACRO_ATTR_DISABLED); Self { proc_macro_id } } - /// A dummy expander that always errors. This is used for proc-macros that are missing, usually - /// due to them not being built yet. - pub const fn dummy() -> Self { - Self { proc_macro_id: ProcMacroId(Self::DUMMY_ID) } - } - - /// The macro was not yet resolved. - pub const fn is_dummy(&self) -> bool { - self.proc_macro_id.0 == Self::DUMMY_ID + /// An expander that always errors due to the actual proc-macro expander missing. + pub const fn missing_expander() -> Self { + Self { proc_macro_id: Self::MISSING_EXPANDER } } /// A dummy expander that always errors. This expander is used for macros that have been disabled. pub const fn disabled() -> Self { - Self { proc_macro_id: ProcMacroId(Self::DISABLED_ID) } + Self { proc_macro_id: Self::DISABLED_ID } + } + + /// A dummy expander that always errors. This expander is used for attribute macros when + /// proc-macro attribute expansion is disabled. + pub const fn disabled_proc_attr() -> Self { + Self { proc_macro_id: Self::PROC_MACRO_ATTR_DISABLED } + } + + /// The macro-expander is missing or has yet to be build. + pub const fn is_missing(&self) -> bool { + self.proc_macro_id == Self::MISSING_EXPANDER } /// The macro is explicitly disabled and cannot be expanded. pub const fn is_disabled(&self) -> bool { - self.proc_macro_id.0 == Self::DISABLED_ID + self.proc_macro_id == Self::DISABLED_ID + } + + /// The macro is explicitly disabled due to proc-macro attribute expansion being disabled. + pub const fn is_disabled_proc_attr(&self) -> bool { + self.proc_macro_id == Self::PROC_MACRO_ATTR_DISABLED + } + + /// The macro is explicitly disabled due to proc-macro attribute expansion being disabled. + pub fn as_expand_error(&self, def_crate: CrateId) -> Option<ExpandErrorKind> { + match self.proc_macro_id { + Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandErrorKind::ProcMacroAttrExpansionDisabled), + Self::DISABLED_ID => Some(ExpandErrorKind::MacroDisabled), + Self::MISSING_EXPANDER => Some(ExpandErrorKind::MissingProcMacroExpander(def_crate)), + _ => None, + } } pub fn expand( @@ -107,38 +207,27 @@ impl CustomProcMacroExpander { mixed_site: Span, ) -> ExpandResult<tt::Subtree> { match self.proc_macro_id { - ProcMacroId(Self::DUMMY_ID) => ExpandResult::new( + Self::PROC_MACRO_ATTR_DISABLED => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::new(call_site, ExpandErrorKind::ProcMacroAttrExpansionDisabled), + ), + Self::MISSING_EXPANDER => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::UnresolvedProcMacro(def_crate), + ExpandError::new(call_site, ExpandErrorKind::MissingProcMacroExpander(def_crate)), ), - ProcMacroId(Self::DISABLED_ID) => ExpandResult::new( + Self::DISABLED_ID => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::MacroDisabled, + ExpandError::new(call_site, ExpandErrorKind::MacroDisabled), ), - ProcMacroId(id) => { + id => { let proc_macros = db.proc_macros(); - let proc_macros = match proc_macros.get(&def_crate) { - Some(Ok(proc_macros)) => proc_macros, - Some(Err(_)) | None => { - never!("Non-dummy expander even though there are no proc macros"); - return ExpandResult::new( - tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other("Internal error"), - ); - } - }; - let proc_macro = match proc_macros.get(id as usize) { - Some(proc_macro) => proc_macro, - None => { - never!( - "Proc macro index out of bounds: the length is {} but the index is {}", - proc_macros.len(), - id - ); + let proc_macro = match proc_macros.get(def_crate, id, call_site) { + Ok(proc_macro) => proc_macro, + Err(e) => { return ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other("Internal error: proc-macro index out of bounds"), - ); + e, + ) } }; @@ -153,12 +242,18 @@ impl CustomProcMacroExpander { ProcMacroExpansionError::System(text) if proc_macro.kind == ProcMacroKind::Attr => { - ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) } + ExpandResult { + value: tt.clone(), + err: Some(ExpandError::other(call_site, text)), + } } ProcMacroExpansionError::System(text) | ProcMacroExpansionError::Panic(text) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::ProcMacroPanic(Arc::new(text.into_boxed_str())), + ExpandError::new( + call_site, + ExpandErrorKind::ProcMacroPanic(text.into_boxed_str()), + ), ), }, } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs index eae2c8fb632..3be88ee9dae 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs @@ -1,6 +1,6 @@ //! Span maps for real files and macro expansions. -use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId}; +use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId}; use stdx::TupleExt; use syntax::{ast, AstNode, TextRange}; use triomphe::Arc; @@ -79,7 +79,7 @@ impl SpanMapRef<'_> { } } -pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> { +pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -> Arc<RealSpanMap> { use syntax::ast::HasModuleItem; let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; let ast_id_map = db.ast_id_map(file_id.into()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index b706cef0b3a..ecfc1ff99e9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -5,7 +5,8 @@ use chalk_ir::cast::Cast; use hir_def::lang_item::LangItem; -use hir_expand::name::name; +use hir_expand::name::Name; +use intern::sym; use limit::Limit; use triomphe::Arc; @@ -151,7 +152,9 @@ pub(crate) fn deref_by_trait( let deref_trait = db.lang_item(table.trait_env.krate, LangItem::Deref).and_then(|l| l.as_trait())?; - let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; + let target = db + .trait_data(deref_trait) + .associated_type_by_name(&Name::new_symbol_root(sym::Target.clone()))?; let projection = { let b = TyBuilder::subst_for_def(db, deref_trait, None); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 3ac8cbaaf8b..d506e00ca12 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -3,6 +3,8 @@ use core::ops; use std::{iter, ops::ControlFlow, sync::Arc}; +use hir_expand::name::Name; +use intern::sym; use tracing::debug; use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds}; @@ -16,7 +18,6 @@ use hir_def::{ AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId, }; -use hir_expand::name::name; use crate::{ db::{HirDatabase, InternedCoroutine}, @@ -288,15 +289,16 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> { chalk_ir::Binders::new(binders, bound) } crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => { - if let Some((future_trait, future_output)) = self - .db - .lang_item(self.krate, LangItem::Future) - .and_then(|item| item.as_trait()) - .and_then(|trait_| { - let alias = - self.db.trait_data(trait_).associated_type_by_name(&name![Output])?; - Some((trait_, alias)) - }) + if let Some((future_trait, future_output)) = + self.db + .lang_item(self.krate, LangItem::Future) + .and_then(|item| item.as_trait()) + .and_then(|trait_| { + let alias = self.db.trait_data(trait_).associated_type_by_name( + &Name::new_symbol_root(sym::Output.clone()), + )?; + Some((trait_, alias)) + }) { // Making up Symbol’s value as variable is void: AsyncBlock<T>: // diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index 095f2eb6c9f..dc3817ce3f4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -1,10 +1,10 @@ -use base_db::FileId; use chalk_ir::Substitution; use hir_def::db::DefDatabase; use rustc_apfloat::{ ieee::{Half as f16, Quad as f128}, Float, }; +use span::EditionedFileId; use test_fixture::WithFixture; use test_utils::skip_slow_tests; @@ -102,8 +102,8 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String { err } -fn eval_goal(db: &TestDB, file_id: FileId) -> Result<Const, ConstEvalError> { - let module_id = db.module_for_file(file_id); +fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> { + let module_id = db.module_for_file(file_id.file_id()); let def_map = module_id.def_map(db); let scope = &def_map[module_id.local_id].scope; let const_id = scope diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 734aad49458..9a1f2158bf7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -61,6 +61,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { ) -> Result<Arc<MirBody>, MirLowerError>; #[salsa::invoke(crate::mir::borrowck_query)] + #[salsa::lru] fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>; #[salsa::invoke(crate::consteval::const_eval_query)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 15ecf9aafcf..b0934400608 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -17,17 +17,18 @@ use std::fmt; use hir_def::{ data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId, - EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId, StructId, - TraitId, TypeAliasId, + EnumId, EnumVariantId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId, + StructId, TraitId, TypeAliasId, }; use hir_expand::{ name::{AsName, Name}, HirFileId, MacroFileIdExt, }; +use intern::sym; use stdx::{always, never}; use syntax::{ ast::{self, HasName}, - AstNode, AstPtr, + AstNode, AstPtr, ToSmolStr, }; use crate::db::HirDatabase; @@ -163,8 +164,8 @@ impl<'a> DeclValidator<'a> { let is_allowed = |def_id| { let attrs = self.db.attrs(def_id); // don't bug the user about directly no_mangle annotated stuff, they can't do anything about it - (!recursing && attrs.by_key("no_mangle").exists()) - || attrs.by_key("allow").tt_values().any(|tt| { + (!recursing && attrs.by_key(&sym::no_mangle).exists()) + || attrs.by_key(&sym::allow).tt_values().any(|tt| { let allows = tt.to_string(); allows.contains(allow_name) || allows.contains(allow::BAD_STYLE) @@ -247,7 +248,7 @@ impl<'a> DeclValidator<'a> { // Check the module name. let Some(module_name) = module_id.name(self.db.upcast()) else { return }; let Some(module_name_replacement) = - module_name.as_str().and_then(to_lower_snake_case).map(|new_name| Replacement { + to_lower_snake_case(module_name.as_str()).map(|new_name| Replacement { current_name: module_name, suggested_text: new_name, expected_case: CaseType::LowerSnakeCase, @@ -325,7 +326,9 @@ impl<'a> DeclValidator<'a> { let bind_name = &body.bindings[*id].name; let replacement = Replacement { current_name: bind_name.clone(), - suggested_text: to_lower_snake_case(&bind_name.to_smol_str())?, + suggested_text: to_lower_snake_case( + &bind_name.display_no_db().to_smolstr(), + )?, expected_case: CaseType::LowerSnakeCase, }; Some((pat_id, replacement)) @@ -353,17 +356,16 @@ impl<'a> DeclValidator<'a> { continue; }; - let is_param = ast::Param::can_cast(parent.kind()); - // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement, - // because e.g. match arms are patterns as well. - // In other words, we check that it's a named variable binding. - let is_binding = ast::LetStmt::can_cast(parent.kind()) - || (ast::MatchArm::can_cast(parent.kind()) && ident_pat.at_token().is_some()); - if !(is_param || is_binding) { - // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm. + let is_shorthand = ast::RecordPatField::cast(parent.clone()) + .map(|parent| parent.name_ref().is_none()) + .unwrap_or_default(); + if is_shorthand { + // We don't check shorthand field patterns, such as 'field' in `Thing { field }`, + // since the shorthand isn't the declaration. continue; } + let is_param = ast::Param::can_cast(parent.kind()); let ident_type = if is_param { IdentType::Parameter } else { IdentType::Variable }; self.create_incorrect_case_diagnostic_for_ast_node( @@ -406,10 +408,12 @@ impl<'a> DeclValidator<'a> { let mut struct_fields_replacements = fields .iter() .filter_map(|(_, field)| { - to_lower_snake_case(&field.name.to_smol_str()).map(|new_name| Replacement { - current_name: field.name.clone(), - suggested_text: new_name, - expected_case: CaseType::LowerSnakeCase, + to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| { + Replacement { + current_name: field.name.clone(), + suggested_text: new_name, + expected_case: CaseType::LowerSnakeCase, + } }) }) .peekable(); @@ -489,11 +493,16 @@ impl<'a> DeclValidator<'a> { /// Check incorrect names for enum variants. fn validate_enum_variants(&mut self, enum_id: EnumId) { let data = self.db.enum_data(enum_id); + + for (variant_id, _) in data.variants.iter() { + self.validate_enum_variant_fields(*variant_id); + } + let mut enum_variants_replacements = data .variants .iter() .filter_map(|(_, name)| { - to_camel_case(&name.to_smol_str()).map(|new_name| Replacement { + to_camel_case(&name.display_no_db().to_smolstr()).map(|new_name| Replacement { current_name: name.clone(), suggested_text: new_name, expected_case: CaseType::UpperCamelCase, @@ -551,6 +560,77 @@ impl<'a> DeclValidator<'a> { } } + /// Check incorrect names for fields of enum variant. + fn validate_enum_variant_fields(&mut self, variant_id: EnumVariantId) { + let variant_data = self.db.enum_variant_data(variant_id); + let VariantData::Record(fields) = variant_data.variant_data.as_ref() else { + return; + }; + let mut variant_field_replacements = fields + .iter() + .filter_map(|(_, field)| { + to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| { + Replacement { + current_name: field.name.clone(), + suggested_text: new_name, + expected_case: CaseType::LowerSnakeCase, + } + }) + }) + .peekable(); + + // XXX: only look at sources if we do have incorrect names + if variant_field_replacements.peek().is_none() { + return; + } + + let variant_loc = variant_id.lookup(self.db.upcast()); + let variant_src = variant_loc.source(self.db.upcast()); + + let Some(ast::FieldList::RecordFieldList(variant_fields_list)) = + variant_src.value.field_list() + else { + always!( + variant_field_replacements.peek().is_none(), + "Replacements ({:?}) were generated for an enum variant \ + which had no fields list: {:?}", + variant_field_replacements.collect::<Vec<_>>(), + variant_src + ); + return; + }; + let mut variant_variants_iter = variant_fields_list.fields(); + for field_replacement in variant_field_replacements { + // We assume that parameters in replacement are in the same order as in the + // actual params list, but just some of them (ones that named correctly) are skipped. + let field = loop { + if let Some(field) = variant_variants_iter.next() { + let Some(field_name) = field.name() else { + continue; + }; + if field_name.as_name() == field_replacement.current_name { + break field; + } + } else { + never!( + "Replacement ({:?}) was generated for an enum variant field \ + which was not found: {:?}", + field_replacement, + variant_src + ); + return; + } + }; + + self.create_incorrect_case_diagnostic_for_ast_node( + field_replacement, + variant_src.file_id, + &field, + IdentType::Field, + ); + } + } + fn validate_const(&mut self, const_id: ConstId) { let container = const_id.lookup(self.db.upcast()).container; if self.is_trait_impl_container(container) { @@ -631,9 +711,11 @@ impl<'a> DeclValidator<'a> { CaseType::UpperSnakeCase => to_upper_snake_case, CaseType::UpperCamelCase => to_camel_case, }; - let Some(replacement) = to_expected_case_type(&name.to_smol_str()).map(|new_name| { - Replacement { current_name: name.clone(), suggested_text: new_name, expected_case } - }) else { + let Some(replacement) = + to_expected_case_type(&name.display(self.db.upcast()).to_smolstr()).map(|new_name| { + Replacement { current_name: name.clone(), suggested_text: new_name, expected_case } + }) + else { return; }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index c28ab2e98af..e52fae06d7f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -8,7 +8,7 @@ use either::Either; use hir_def::lang_item::LangItem; use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule}; use hir_def::{ItemContainerId, Lookup}; -use hir_expand::name; +use intern::sym; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::constructor::Constructor; @@ -423,7 +423,9 @@ impl FilterMapNextChecker { ItemContainerId::TraitId(iterator_trait_id) => { let iterator_trait_items = &db.trait_data(iterator_trait_id).items; iterator_trait_items.iter().find_map(|(name, it)| match it { - &AssocItemId::FunctionId(id) if *name == name![filter_map] => Some(id), + &AssocItemId::FunctionId(id) if *name == sym::filter_map.clone() => { + Some(id) + } _ => None, }) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index 8dcc14feb27..a0ee7c0748b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -206,7 +206,7 @@ impl<'a> PatCtxt<'a> { &mut self, pats: &[PatId], expected_len: usize, - ellipsis: Option<usize>, + ellipsis: Option<u32>, ) -> Vec<FieldPat> { if pats.len() > expected_len { self.errors.push(PatternError::ExtraFields); @@ -214,7 +214,7 @@ impl<'a> PatCtxt<'a> { } pats.iter() - .enumerate_and_adjust(expected_len, ellipsis) + .enumerate_and_adjust(expected_len, ellipsis.map(|it| it as usize)) .map(|(i, &subpattern)| FieldPat { field: LocalFieldId::from_raw((i as u32).into()), pattern: self.lower_pattern(subpattern), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index bf2ff1a917c..a12e201cf3d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -3,6 +3,7 @@ use std::fmt; use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; +use intern::sym; use once_cell::unsync::Lazy; use rustc_pattern_analysis::{ constructor::{Constructor, ConstructorSet, VariantVisibility}, @@ -74,9 +75,9 @@ pub(crate) struct MatchCheckCtx<'db> { impl<'db> MatchCheckCtx<'db> { pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); - let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); + let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns); let min_exhaustive_patterns = - def_map.is_unstable_feature_enabled("min_exhaustive_patterns"); + def_map.is_unstable_feature_enabled(&sym::min_exhaustive_patterns); Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } } @@ -85,6 +86,15 @@ impl<'db> MatchCheckCtx<'db> { arms: &[MatchArm<'db>], scrut_ty: Ty, ) -> Result<UsefulnessReport<'db, Self>, ()> { + if scrut_ty.contains_unknown() { + return Err(()); + } + for arm in arms { + if arm.pat.ty().contains_unknown() { + return Err(()); + } + } + // FIXME: Determine place validity correctly. For now, err on the safe side. let place_validity = PlaceValidity::MaybeInvalid; // Measured to take ~100ms on modern hardware. @@ -99,7 +109,7 @@ impl<'db> MatchCheckCtx<'db> { /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`. fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool { let is_local = adt.krate(self.db.upcast()) == self.module.krate(); - !is_local && self.db.attrs(adt.into()).by_key("non_exhaustive").exists() + !is_local && self.db.attrs(adt.into()).by_key(&sym::non_exhaustive).exists() } fn variant_id_for_adt( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index a9a5d829f5f..a433ecfd778 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -25,7 +25,7 @@ use hir_def::{ ModuleId, TraitId, }; use hir_expand::name::Name; -use intern::{Internable, Interned}; +use intern::{sym, Internable, Interned}; use itertools::Itertools; use la_arena::ArenaMap; use rustc_apfloat::{ @@ -1171,7 +1171,9 @@ impl HirDisplay for Ty { .lang_item(body.module(db.upcast()).krate(), LangItem::Future) .and_then(LangItemTarget::as_trait); let output = future_trait.and_then(|t| { - db.trait_data(t).associated_type_by_name(&hir_expand::name!(Output)) + db.trait_data(t).associated_type_by_name(&Name::new_symbol_root( + sym::Output.clone(), + )) }); write!(f, "impl ")?; if let Some(t) = future_trait { @@ -1933,7 +1935,7 @@ impl HirDisplay for TypeRef { } if let Some(abi) = abi { f.write_str("extern \"")?; - f.write_str(abi)?; + f.write_str(abi.as_str())?; f.write_str("\" ")?; } write!(f, "fn(")?; @@ -2042,7 +2044,7 @@ impl HirDisplay for Path { .display_name .as_ref() .map(|name| name.canonical_name()) - .unwrap_or("$crate"); + .unwrap_or(&sym::dollar_crate); write!(f, "{name}")? } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 66ee02d74d9..804bc53905a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -46,8 +46,9 @@ use hir_def::{ AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, }; -use hir_expand::name::{name, Name}; +use hir_expand::name::Name; use indexmap::IndexSet; +use intern::sym; use la_arena::{ArenaMap, Entry}; use once_cell::unsync::OnceCell; use rustc_hash::{FxHashMap, FxHashSet}; @@ -811,7 +812,7 @@ impl<'a> InferenceContext<'a> { None => self.err_ty(), }; - param_tys.push(va_list_ty) + param_tys.push(va_list_ty); } let mut param_tys = param_tys.into_iter().chain(iter::repeat(self.table.new_type_var())); if let Some(self_param) = self.body.self_param { @@ -1424,7 +1425,9 @@ impl<'a> InferenceContext<'a> { } fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> { - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + self.db + .trait_data(trait_) + .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone())) } fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index b7c7b665453..034ed2d691b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -15,7 +15,8 @@ use hir_def::{ resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId, }; -use hir_expand::name; +use hir_expand::name::Name; +use intern::sym; use rustc_hash::FxHashMap; use smallvec::SmallVec; use stdx::never; @@ -268,9 +269,7 @@ impl CapturedItem { } let variant_data = f.parent.variant_data(db.upcast()); let field = match &*variant_data { - VariantData::Record(fields) => { - fields[f.local_id].name.as_str().unwrap_or("[missing field]").to_owned() - } + VariantData::Record(fields) => fields[f.local_id].name.as_str().to_owned(), VariantData::Tuple(fields) => fields .iter() .position(|it| it.0 == f.local_id) @@ -621,8 +620,10 @@ impl InferenceContext<'_> { if let Some(deref_trait) = self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) { - if let Some(deref_fn) = - self.db.trait_data(deref_trait).method_by_name(&name![deref_mut]) + if let Some(deref_fn) = self + .db + .trait_data(deref_trait) + .method_by_name(&Name::new_symbol_root(sym::deref_mut.clone())) { break 'b deref_fn == f; } @@ -888,7 +889,7 @@ impl InferenceContext<'_> { match &self.body[pat] { Pat::Missing | Pat::Wild => (), Pat::Tuple { args, ellipsis } => { - let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len())); + let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); let field_count = match self.result[pat].kind(Interner) { TyKind::Tuple(_, s) => s.len(Interner), _ => return, @@ -963,7 +964,7 @@ impl InferenceContext<'_> { } VariantId::StructId(s) => { let vd = &*self.db.struct_data(s).variant_data; - let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len())); + let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); let fields = vd.fields().iter(); let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 7a0f7872a64..3d762b174ac 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -12,10 +12,11 @@ use hir_def::{ ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp, }, lang_item::{LangItem, LangItemTarget}, - path::{GenericArgs, Path}, + path::{GenericArg, GenericArgs, Path}, BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; -use hir_expand::name::{name, Name}; +use hir_expand::name::Name; +use intern::sym; use stdx::always; use syntax::ast::RangeOp; @@ -646,8 +647,10 @@ impl InferenceContext<'_> { match op { UnaryOp::Deref => { if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) { - if let Some(deref_fn) = - self.db.trait_data(deref_trait).method_by_name(&name![deref]) + if let Some(deref_fn) = self + .db + .trait_data(deref_trait) + .method_by_name(&Name::new_symbol_root(sym::deref.clone())) { // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that // the mutability is not wrong, and will be fixed in `self.infer_mut`). @@ -785,8 +788,10 @@ impl InferenceContext<'_> { // mutability will be fixed up in `InferenceContext::infer_mut`; adj.push(Adjustment::borrow(Mutability::Not, self_ty.clone())); self.write_expr_adj(*base, adj); - if let Some(func) = - self.db.trait_data(index_trait).method_by_name(&name!(index)) + if let Some(func) = self + .db + .trait_data(index_trait) + .method_by_name(&Name::new_symbol_root(sym::index.clone())) { let substs = TyBuilder::subst_for_def(self.db, index_trait, None) .push(self_ty.clone()) @@ -1165,7 +1170,7 @@ impl InferenceContext<'_> { Expr::Tuple { exprs, .. } => { // We don't consider multiple ellipses. This is analogous to // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`. - let ellipsis = exprs.iter().position(|e| is_rest_expr(*e)); + let ellipsis = exprs.iter().position(|e| is_rest_expr(*e)).map(|it| it as u32); let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect(); self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs) @@ -1179,7 +1184,7 @@ impl InferenceContext<'_> { // We don't consider multiple ellipses. This is analogous to // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`. - let ellipsis = args.iter().position(|e| is_rest_expr(*e)); + let ellipsis = args.iter().position(|e| is_rest_expr(*e)).map(|it| it as u32); let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect(); self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args) @@ -1846,29 +1851,45 @@ impl InferenceContext<'_> { if let Some(generic_args) = generic_args { // if args are provided, it should be all of them, but we can't rely on that let self_params = type_params + const_params + lifetime_params; - for (arg, kind_id) in - generic_args.args.iter().zip(def_generics.iter_self_id()).take(self_params) - { - let arg = generic_arg_to_chalk( - self.db, - kind_id, - arg, - self, - |this, type_ref| this.make_ty(type_ref), - |this, c, ty| { - const_or_path_to_chalk( - this.db, - &this.resolver, - this.owner.into(), - ty, - c, - ParamLoweringMode::Placeholder, - || this.generics(), - DebruijnIndex::INNERMOST, - ) - }, - |this, lt_ref| this.make_lifetime(lt_ref), - ); + + let mut args = generic_args.args.iter().peekable(); + for kind_id in def_generics.iter_self_id().take(self_params) { + let arg = args.peek(); + let arg = match (kind_id, arg) { + // Lifetimes can be elided. + // Once we have implemented lifetime elision correctly, + // this should be handled in a proper way. + ( + GenericParamId::LifetimeParamId(_), + None | Some(GenericArg::Type(_) | GenericArg::Const(_)), + ) => error_lifetime().cast(Interner), + + // If we run out of `generic_args`, stop pushing substs + (_, None) => break, + + // Normal cases + (_, Some(_)) => generic_arg_to_chalk( + self.db, + kind_id, + args.next().unwrap(), // `peek()` is `Some(_)`, so guaranteed no panic + self, + |this, type_ref| this.make_ty(type_ref), + |this, c, ty| { + const_or_path_to_chalk( + this.db, + &this.resolver, + this.owner.into(), + ty, + c, + ParamLoweringMode::Placeholder, + || this.generics(), + DebruijnIndex::INNERMOST, + ) + }, + |this, lt_ref| this.make_lifetime(lt_ref), + ), + }; + substs.push(arg); } }; @@ -1945,25 +1966,25 @@ impl InferenceContext<'_> { }; let data = self.db.function_data(func); - if data.legacy_const_generics_indices.is_empty() { + let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else { return Default::default(); - } + }; // only use legacy const generics if the param count matches with them - if data.params.len() + data.legacy_const_generics_indices.len() != args.len() { + if data.params.len() + legacy_const_generics_indices.len() != args.len() { if args.len() <= data.params.len() { return Default::default(); } else { // there are more parameters than there should be without legacy // const params; use them - let mut indices = data.legacy_const_generics_indices.clone(); + let mut indices = legacy_const_generics_indices.as_ref().clone(); indices.sort(); return indices; } } // check legacy const parameters - for (subst_idx, arg_idx) in data.legacy_const_generics_indices.iter().copied().enumerate() { + for (subst_idx, arg_idx) in legacy_const_generics_indices.iter().copied().enumerate() { let arg = match subst.at(Interner, subst_idx).constant(Interner) { Some(c) => c, None => continue, // not a const parameter? @@ -1976,7 +1997,7 @@ impl InferenceContext<'_> { self.infer_expr(args[arg_idx as usize], &expected); // FIXME: evaluate and unify with the const } - let mut indices = data.legacy_const_generics_indices.clone(); + let mut indices = legacy_const_generics_indices.as_ref().clone(); indices.sort(); indices } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 00e5eac229f..66267e08db6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -1,14 +1,18 @@ //! Finds if an expression is an immutable context or a mutable context, which is used in selecting //! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar. -use chalk_ir::Mutability; +use chalk_ir::{cast::Cast, Mutability}; use hir_def::{ hir::{Array, BinaryOp, BindingAnnotation, Expr, ExprId, PatId, Statement, UnaryOp}, lang_item::LangItem, }; -use hir_expand::name; +use hir_expand::name::Name; +use intern::sym; -use crate::{lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, OverloadedDeref}; +use crate::{ + infer::Expectation, lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, Interner, + OverloadedDeref, TyBuilder, TyKind, +}; use super::InferenceContext; @@ -100,7 +104,7 @@ impl InferenceContext<'_> { Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } - &Expr::Index { base, index, is_assignee_expr: _ } => { + &Expr::Index { base, index, is_assignee_expr } => { if mutability == Mutability::Mut { if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { if let Some(index_trait) = self @@ -108,10 +112,13 @@ impl InferenceContext<'_> { .lang_item(self.table.trait_env.krate, LangItem::IndexMut) .and_then(|l| l.as_trait()) { - if let Some(index_fn) = - self.db.trait_data(index_trait).method_by_name(&name![index_mut]) + if let Some(index_fn) = self + .db + .trait_data(index_trait) + .method_by_name(&Name::new_symbol_root(sym::index_mut.clone())) { *f = index_fn; + let mut base_ty = None; let base_adjustments = self .result .expr_adjustments @@ -119,11 +126,32 @@ impl InferenceContext<'_> { .and_then(|it| it.last_mut()); if let Some(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutability)), - .. + target, }) = base_adjustments { + // For assignee exprs `IndexMut` obiligations are already applied + if !is_assignee_expr { + if let TyKind::Ref(_, _, ty) = target.kind(Interner) { + base_ty = Some(ty.clone()); + } + } *mutability = Mutability::Mut; } + + // Apply `IndexMut` obligation for non-assignee expr + if let Some(base_ty) = base_ty { + let index_ty = + if let Some(ty) = self.result.type_of_expr.get(index) { + ty.clone() + } else { + self.infer_expr(index, &Expectation::none()) + }; + let trait_ref = TyBuilder::trait_ref(self.db, index_trait) + .push(base_ty) + .fill(|_| index_ty.clone().cast(Interner)) + .build(); + self.push_obligation(trait_ref.cast(Interner)); + } } } } @@ -139,8 +167,10 @@ impl InferenceContext<'_> { .lang_item(self.table.trait_env.krate, LangItem::DerefMut) .and_then(|l| l.as_trait()) { - if let Some(deref_fn) = - self.db.trait_data(deref_trait).method_by_name(&name![deref_mut]) + if let Some(deref_fn) = self + .db + .trait_data(deref_trait) + .method_by_name(&Name::new_symbol_root(sym::deref_mut.clone())) { *f = deref_fn; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index dac5a5ea699..f3c6f13a08d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -68,7 +68,7 @@ impl InferenceContext<'_> { expected: &Ty, default_bm: T::BindingMode, id: T, - ellipsis: Option<usize>, + ellipsis: Option<u32>, subs: &[T], ) -> Ty { let (ty, def) = self.resolve_variant(path, true); @@ -98,7 +98,7 @@ impl InferenceContext<'_> { let visibilities = self.db.field_visibilities(def); let (pre, post) = match ellipsis { - Some(idx) => subs.split_at(idx), + Some(idx) => subs.split_at(idx as usize), None => (subs, &[][..]), }; let post_idx_offset = field_types.iter().count().saturating_sub(post.len()); @@ -219,7 +219,7 @@ impl InferenceContext<'_> { &mut self, expected: &Ty, default_bm: T::BindingMode, - ellipsis: Option<usize>, + ellipsis: Option<u32>, subs: &[T], ) -> Ty { let expected = self.resolve_ty_shallow(expected); @@ -229,7 +229,9 @@ impl InferenceContext<'_> { }; let ((pre, post), n_uncovered_patterns) = match ellipsis { - Some(idx) => (subs.split_at(idx), expectations.len().saturating_sub(subs.len())), + Some(idx) => { + (subs.split_at(idx as usize), expectations.len().saturating_sub(subs.len())) + } None => ((subs, &[][..]), 0), }; let mut expectations_iter = expectations diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index 490ecfd7fa3..0b44bbec70f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -7,6 +7,7 @@ use hir_def::{ AdtId, AssocItemId, GenericDefId, ItemContainerId, Lookup, }; use hir_expand::name::Name; +use intern::sym; use stdx::never; use crate::{ @@ -227,7 +228,7 @@ impl InferenceContext<'_> { Path::LangItem(..) => ( PathSegment { name: { - _d = hir_expand::name::known::Unknown; + _d = Name::new_symbol_root(sym::Unknown.clone()); &_d }, args_and_bindings: None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index ed4d55d2037..7ee63af1c22 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -9,7 +9,8 @@ use chalk_ir::{ use chalk_solve::infer::ParameterEnaVariableExt; use either::Either; use ena::unify::UnifyKey; -use hir_expand::name; +use hir_expand::name::Name; +use intern::sym; use rustc_hash::FxHashMap; use smallvec::SmallVec; use triomphe::Arc; @@ -781,7 +782,8 @@ impl<'a> InferenceTable<'a> { let krate = self.trait_env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; let trait_data = self.db.trait_data(fn_once_trait); - let output_assoc_type = trait_data.associated_type_by_name(&name![Output])?; + let output_assoc_type = + trait_data.associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))?; let mut arg_tys = Vec::with_capacity(num_args); let arg_ty = TyBuilder::tuple(num_args) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs index f5fb2ffd781..c0a781b17ee 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs @@ -6,6 +6,7 @@ use chalk_ir::{ DebruijnIndex, }; use hir_def::{visibility::Visibility, AdtId, EnumVariantId, HasModule, ModuleId, VariantId}; +use intern::sym; use rustc_hash::FxHashSet; use crate::{ @@ -118,7 +119,7 @@ impl UninhabitedFrom<'_> { subst: &Substitution, ) -> ControlFlow<VisiblyUninhabited> { let is_local = variant.krate(self.db.upcast()) == self.target_mod.krate(); - if !is_local && self.db.attrs(variant.into()).by_key("non_exhaustive").exists() { + if !is_local && self.db.attrs(variant.into()).by_key(&sym::non_exhaustive).exists() { return CONTINUE_OPAQUELY_INHABITED; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs index 85ed46b9632..f704b59d303 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs @@ -2,6 +2,7 @@ use hir_def::{data::adt::StructFlags, lang_item::LangItem, AdtId}; use hir_expand::name::Name; +use intern::sym; use crate::db::HirDatabase; @@ -16,48 +17,57 @@ pub fn is_unsafe_cell(db: &dyn HirDatabase, adt: AdtId) -> bool { } pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> { - use hir_expand::name; use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering}; Some(match op { BinaryOp::LogicOp(_) => return None, BinaryOp::ArithOp(aop) => match aop { - ArithOp::Add => (name![add], LangItem::Add), - ArithOp::Mul => (name![mul], LangItem::Mul), - ArithOp::Sub => (name![sub], LangItem::Sub), - ArithOp::Div => (name![div], LangItem::Div), - ArithOp::Rem => (name![rem], LangItem::Rem), - ArithOp::Shl => (name![shl], LangItem::Shl), - ArithOp::Shr => (name![shr], LangItem::Shr), - ArithOp::BitXor => (name![bitxor], LangItem::BitXor), - ArithOp::BitOr => (name![bitor], LangItem::BitOr), - ArithOp::BitAnd => (name![bitand], LangItem::BitAnd), + ArithOp::Add => (Name::new_symbol_root(sym::add.clone()), LangItem::Add), + ArithOp::Mul => (Name::new_symbol_root(sym::mul.clone()), LangItem::Mul), + ArithOp::Sub => (Name::new_symbol_root(sym::sub.clone()), LangItem::Sub), + ArithOp::Div => (Name::new_symbol_root(sym::div.clone()), LangItem::Div), + ArithOp::Rem => (Name::new_symbol_root(sym::rem.clone()), LangItem::Rem), + ArithOp::Shl => (Name::new_symbol_root(sym::shl.clone()), LangItem::Shl), + ArithOp::Shr => (Name::new_symbol_root(sym::shr.clone()), LangItem::Shr), + ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor.clone()), LangItem::BitXor), + ArithOp::BitOr => (Name::new_symbol_root(sym::bitor.clone()), LangItem::BitOr), + ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand.clone()), LangItem::BitAnd), }, BinaryOp::Assignment { op: Some(aop) } => match aop { - ArithOp::Add => (name![add_assign], LangItem::AddAssign), - ArithOp::Mul => (name![mul_assign], LangItem::MulAssign), - ArithOp::Sub => (name![sub_assign], LangItem::SubAssign), - ArithOp::Div => (name![div_assign], LangItem::DivAssign), - ArithOp::Rem => (name![rem_assign], LangItem::RemAssign), - ArithOp::Shl => (name![shl_assign], LangItem::ShlAssign), - ArithOp::Shr => (name![shr_assign], LangItem::ShrAssign), - ArithOp::BitXor => (name![bitxor_assign], LangItem::BitXorAssign), - ArithOp::BitOr => (name![bitor_assign], LangItem::BitOrAssign), - ArithOp::BitAnd => (name![bitand_assign], LangItem::BitAndAssign), + ArithOp::Add => (Name::new_symbol_root(sym::add_assign.clone()), LangItem::AddAssign), + ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign.clone()), LangItem::MulAssign), + ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign.clone()), LangItem::SubAssign), + ArithOp::Div => (Name::new_symbol_root(sym::div_assign.clone()), LangItem::DivAssign), + ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign.clone()), LangItem::RemAssign), + ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign.clone()), LangItem::ShlAssign), + ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign.clone()), LangItem::ShrAssign), + ArithOp::BitXor => { + (Name::new_symbol_root(sym::bitxor_assign.clone()), LangItem::BitXorAssign) + } + ArithOp::BitOr => { + (Name::new_symbol_root(sym::bitor_assign.clone()), LangItem::BitOrAssign) + } + ArithOp::BitAnd => { + (Name::new_symbol_root(sym::bitand_assign.clone()), LangItem::BitAndAssign) + } }, BinaryOp::CmpOp(cop) => match cop { - CmpOp::Eq { negated: false } => (name![eq], LangItem::PartialEq), - CmpOp::Eq { negated: true } => (name![ne], LangItem::PartialEq), + CmpOp::Eq { negated: false } => { + (Name::new_symbol_root(sym::eq.clone()), LangItem::PartialEq) + } + CmpOp::Eq { negated: true } => { + (Name::new_symbol_root(sym::ne.clone()), LangItem::PartialEq) + } CmpOp::Ord { ordering: Ordering::Less, strict: false } => { - (name![le], LangItem::PartialOrd) + (Name::new_symbol_root(sym::le.clone()), LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Less, strict: true } => { - (name![lt], LangItem::PartialOrd) + (Name::new_symbol_root(sym::lt.clone()), LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: false } => { - (name![ge], LangItem::PartialOrd) + (Name::new_symbol_root(sym::ge.clone()), LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: true } => { - (name![gt], LangItem::PartialOrd) + (Name::new_symbol_root(sym::gt.clone()), LangItem::PartialOrd) } }, BinaryOp::Assignment { op: None } => return None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index 4cc7dffc24e..3463e690972 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -8,6 +8,7 @@ use hir_def::{ layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout}, AdtId, VariantId, }; +use intern::sym; use rustc_index::IndexVec; use smallvec::SmallVec; use triomphe::Arc; @@ -129,7 +130,10 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, } Bound::Unbounded }; - (get("rustc_layout_scalar_valid_range_start"), get("rustc_layout_scalar_valid_range_end")) + ( + get(&sym::rustc_layout_scalar_valid_range_start), + get(&sym::rustc_layout_scalar_valid_range_end), + ) } pub fn layout_of_adt_recover( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index 35ea13eb119..8cb428a610a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -3,6 +3,7 @@ use either::Either; use hir_def::db::DefDatabase; use project_model::{target_data_layout::RustcDataLayoutConfig, Sysroot}; use rustc_hash::FxHashMap; +use syntax::ToSmolStr; use test_fixture::WithFixture; use triomphe::Arc; @@ -34,20 +35,26 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro let adt_or_type_alias_id = file_ids .into_iter() .find_map(|file_id| { - let module_id = db.module_for_file(file_id); + let module_id = db.module_for_file(file_id.file_id()); let def_map = module_id.def_map(&db); let scope = &def_map[module_id.local_id].scope; let adt_or_type_alias_id = scope.declarations().find_map(|x| match x { hir_def::ModuleDefId::AdtId(x) => { let name = match x { - hir_def::AdtId::StructId(x) => db.struct_data(x).name.to_smol_str(), - hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(), - hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(), + hir_def::AdtId::StructId(x) => { + db.struct_data(x).name.display_no_db().to_smolstr() + } + hir_def::AdtId::UnionId(x) => { + db.union_data(x).name.display_no_db().to_smolstr() + } + hir_def::AdtId::EnumId(x) => { + db.enum_data(x).name.display_no_db().to_smolstr() + } }; (name == "Goal").then_some(Either::Left(x)) } hir_def::ModuleDefId::TypeAliasId(x) => { - let name = db.type_alias_data(x).name.to_smol_str(); + let name = db.type_alias_data(x).name.display_no_db().to_smolstr(); (name == "Goal").then_some(Either::Right(x)) } _ => None, @@ -80,21 +87,26 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro ); let (db, file_id) = TestDB::with_single_file(&ra_fixture); - let module_id = db.module_for_file(file_id); + let module_id = db.module_for_file(file_id.file_id()); let def_map = module_id.def_map(&db); let scope = &def_map[module_id.local_id].scope; let function_id = scope .declarations() .find_map(|x| match x { hir_def::ModuleDefId::FunctionId(x) => { - let name = db.function_data(x).name.to_smol_str(); + let name = db.function_data(x).name.display_no_db().to_smolstr(); (name == "main").then_some(x) } _ => None, }) .unwrap(); let hir_body = db.body(function_id.into()); - let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0; + let b = hir_body + .bindings + .iter() + .find(|x| x.1.name.display_no_db().to_smolstr() == "goal") + .unwrap() + .0; let infer = db.infer(function_id.into()); let goal_ty = infer.type_of_binding[b].clone(); db.layout_of_ty(goal_ty, db.trait_environment(function_id.into())) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index bd650869bb3..2f93ce31816 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -61,7 +61,8 @@ use chalk_ir::{ }; use either::Either; use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId}; -use hir_expand::name; +use hir_expand::name::Name; +use intern::{sym, Symbol}; use la_arena::{Arena, Idx}; use mir::{MirEvalError, VTableMap}; use rustc_hash::{FxHashMap, FxHashSet}; @@ -422,45 +423,45 @@ impl Hash for FnAbi { } impl FnAbi { - #[allow(clippy::should_implement_trait)] - pub fn from_str(s: &str) -> FnAbi { + #[rustfmt::skip] + pub fn from_symbol(s: &Symbol) -> FnAbi { match s { - "aapcs-unwind" => FnAbi::AapcsUnwind, - "aapcs" => FnAbi::Aapcs, - "avr-interrupt" => FnAbi::AvrInterrupt, - "avr-non-blocking-interrupt" => FnAbi::AvrNonBlockingInterrupt, - "C-cmse-nonsecure-call" => FnAbi::CCmseNonsecureCall, - "C-unwind" => FnAbi::CUnwind, - "C" => FnAbi::C, - "cdecl-unwind" => FnAbi::CDeclUnwind, - "cdecl" => FnAbi::CDecl, - "efiapi" => FnAbi::Efiapi, - "fastcall-unwind" => FnAbi::FastcallUnwind, - "fastcall" => FnAbi::Fastcall, - "msp430-interrupt" => FnAbi::Msp430Interrupt, - "platform-intrinsic" => FnAbi::PlatformIntrinsic, - "ptx-kernel" => FnAbi::PtxKernel, - "riscv-interrupt-m" => FnAbi::RiscvInterruptM, - "riscv-interrupt-s" => FnAbi::RiscvInterruptS, - "rust-call" => FnAbi::RustCall, - "rust-cold" => FnAbi::RustCold, - "rust-intrinsic" => FnAbi::RustIntrinsic, - "Rust" => FnAbi::Rust, - "stdcall-unwind" => FnAbi::StdcallUnwind, - "stdcall" => FnAbi::Stdcall, - "system-unwind" => FnAbi::SystemUnwind, - "system" => FnAbi::System, - "sysv64-unwind" => FnAbi::Sysv64Unwind, - "sysv64" => FnAbi::Sysv64, - "thiscall-unwind" => FnAbi::ThiscallUnwind, - "thiscall" => FnAbi::Thiscall, - "unadjusted" => FnAbi::Unadjusted, - "vectorcall-unwind" => FnAbi::VectorcallUnwind, - "vectorcall" => FnAbi::Vectorcall, - "wasm" => FnAbi::Wasm, - "win64-unwind" => FnAbi::Win64Unwind, - "win64" => FnAbi::Win64, - "x86-interrupt" => FnAbi::X86Interrupt, + s if *s == sym::aapcs_dash_unwind => FnAbi::AapcsUnwind, + s if *s == sym::aapcs => FnAbi::Aapcs, + s if *s == sym::avr_dash_interrupt => FnAbi::AvrInterrupt, + s if *s == sym::avr_dash_non_dash_blocking_dash_interrupt => FnAbi::AvrNonBlockingInterrupt, + s if *s == sym::C_dash_cmse_dash_nonsecure_dash_call => FnAbi::CCmseNonsecureCall, + s if *s == sym::C_dash_unwind => FnAbi::CUnwind, + s if *s == sym::C => FnAbi::C, + s if *s == sym::cdecl_dash_unwind => FnAbi::CDeclUnwind, + s if *s == sym::cdecl => FnAbi::CDecl, + s if *s == sym::efiapi => FnAbi::Efiapi, + s if *s == sym::fastcall_dash_unwind => FnAbi::FastcallUnwind, + s if *s == sym::fastcall => FnAbi::Fastcall, + s if *s == sym::msp430_dash_interrupt => FnAbi::Msp430Interrupt, + s if *s == sym::platform_dash_intrinsic => FnAbi::PlatformIntrinsic, + s if *s == sym::ptx_dash_kernel => FnAbi::PtxKernel, + s if *s == sym::riscv_dash_interrupt_dash_m => FnAbi::RiscvInterruptM, + s if *s == sym::riscv_dash_interrupt_dash_s => FnAbi::RiscvInterruptS, + s if *s == sym::rust_dash_call => FnAbi::RustCall, + s if *s == sym::rust_dash_cold => FnAbi::RustCold, + s if *s == sym::rust_dash_intrinsic => FnAbi::RustIntrinsic, + s if *s == sym::Rust => FnAbi::Rust, + s if *s == sym::stdcall_dash_unwind => FnAbi::StdcallUnwind, + s if *s == sym::stdcall => FnAbi::Stdcall, + s if *s == sym::system_dash_unwind => FnAbi::SystemUnwind, + s if *s == sym::system => FnAbi::System, + s if *s == sym::sysv64_dash_unwind => FnAbi::Sysv64Unwind, + s if *s == sym::sysv64 => FnAbi::Sysv64, + s if *s == sym::thiscall_dash_unwind => FnAbi::ThiscallUnwind, + s if *s == sym::thiscall => FnAbi::Thiscall, + s if *s == sym::unadjusted => FnAbi::Unadjusted, + s if *s == sym::vectorcall_dash_unwind => FnAbi::VectorcallUnwind, + s if *s == sym::vectorcall => FnAbi::Vectorcall, + s if *s == sym::wasm => FnAbi::Wasm, + s if *s == sym::win64_dash_unwind => FnAbi::Win64Unwind, + s if *s == sym::win64 => FnAbi::Win64, + s if *s == sym::x86_dash_interrupt => FnAbi::X86Interrupt, _ => FnAbi::Unknown, } } @@ -894,7 +895,9 @@ pub fn callable_sig_from_fn_trait( ) -> Option<(FnTrait, CallableSig)> { let krate = trait_env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; - let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; + let output_assoc_type = db + .trait_data(fn_once_trait) + .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))?; let mut table = InferenceTable::new(db, trait_env.clone()); let b = TyBuilder::trait_ref(db, fn_once_trait); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index d421e72d364..444628ff521 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -298,7 +298,7 @@ impl<'a> TyLoweringContext<'a> { TyKind::Function(FnPointer { num_binders: 0, // FIXME lower `for<'a> fn()` correctly sig: FnSig { - abi: abi.as_deref().map_or(FnAbi::Rust, FnAbi::from_str), + abi: abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), safety: if is_unsafe { Safety::Unsafe } else { Safety::Safe }, variadic, }, @@ -1858,7 +1858,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { ret, data.is_varargs(), if data.has_unsafe_kw() { Safety::Unsafe } else { Safety::Safe }, - data.abi.as_deref().map_or(FnAbi::Rust, FnAbi::from_str), + data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), ); make_binders(db, &generics, sig) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index fad74c2448c..8ba8071d36e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -5,7 +5,7 @@ use std::ops::ControlFlow; use base_db::CrateId; -use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex, WhereClause}; +use chalk_ir::{cast::Cast, UniverseIndex, WithKind}; use hir_def::{ data::{adt::StructFlags, ImplData}, nameres::DefMap, @@ -13,9 +13,9 @@ use hir_def::{ ModuleId, TraitId, }; use hir_expand::name::Name; +use intern::sym; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; -use span::Edition; use stdx::never; use triomphe::Arc; @@ -24,12 +24,14 @@ use crate::{ db::HirDatabase, error_lifetime, from_chalk_trait_id, from_foreign_def_id, infer::{unify::InferenceTable, Adjust, Adjustment, OverloadedDeref, PointerCast}, + lang_items::is_box, primitive::{FloatTy, IntTy, UintTy}, to_chalk_trait_id, utils::all_super_traits, - AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, Goal, Guidance, - InEnvironment, Interner, Scalar, Solution, Substitution, TraitEnvironment, TraitRef, - TraitRefExt, Ty, TyBuilder, TyExt, + AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, GenericArgData, + Goal, Guidance, InEnvironment, Interner, Mutability, Scalar, Solution, Substitution, + TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, TyVariableKind, + VariableKind, WhereClause, }; /// This is used as a key for indexing impls. @@ -200,7 +202,7 @@ impl TraitImpls { // FIXME: Reservation impls should be considered during coherence checks. If we are // (ever) to implement coherence checks, this filtering should be done by the trait // solver. - if db.attrs(impl_id.into()).by_key("rustc_reservation_impl").exists() { + if db.attrs(impl_id.into()).by_key(&sym::rustc_reservation_impl).exists() { continue; } let target_trait = match db.impl_trait(impl_id) { @@ -1081,6 +1083,11 @@ fn iterate_method_candidates_by_receiver( table.run_in_snapshot(|table| { let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true); while let Some((self_ty, _)) = autoderef.next() { + if matches!(self_ty.kind(Interner), TyKind::InferenceVar(_, TyVariableKind::General)) { + // don't try to resolve methods on unknown types + return ControlFlow::Continue(()); + } + iterate_trait_method_candidates( &self_ty, autoderef.table, @@ -1145,17 +1152,30 @@ fn iterate_trait_method_candidates( 'traits: for &t in traits_in_scope { let data = db.trait_data(t); - // Traits annotated with `#[rustc_skip_array_during_method_dispatch]` are skipped during + // Traits annotated with `#[rustc_skip_during_method_dispatch]` are skipped during // method resolution, if the receiver is an array, and we're compiling for editions before // 2021. // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for // arrays. if data.skip_array_during_method_dispatch - && matches!(self_ty.kind(Interner), chalk_ir::TyKind::Array(..)) + && matches!(self_ty.kind(Interner), TyKind::Array(..)) { // FIXME: this should really be using the edition of the method name's span, in case it // comes from a macro - if db.crate_graph()[krate].edition < Edition::Edition2021 { + if !db.crate_graph()[krate].edition.at_least_2021() { + continue; + } + } + if data.skip_boxed_slice_during_method_dispatch + && matches!( + self_ty.kind(Interner), TyKind::Adt(AdtId(def), subst) + if is_box(table.db, *def) + && matches!(subst.at(Interner, 0).assert_ty_ref(Interner).kind(Interner), TyKind::Slice(..)) + ) + { + // FIXME: this should really be using the edition of the method name's span, in case it + // comes from a macro + if !db.crate_graph()[krate].edition.at_least_2024() { continue; } } @@ -1618,15 +1638,11 @@ fn generic_implements_goal( let kinds = binders.iter().cloned().chain(trait_ref.substitution.iter(Interner).skip(1).map(|it| { let vk = match it.data(Interner) { - chalk_ir::GenericArgData::Ty(_) => { - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) - } - chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime, - chalk_ir::GenericArgData::Const(c) => { - chalk_ir::VariableKind::Const(c.data(Interner).ty.clone()) - } + GenericArgData::Ty(_) => VariableKind::Ty(chalk_ir::TyVariableKind::General), + GenericArgData::Lifetime(_) => VariableKind::Lifetime, + GenericArgData::Const(c) => VariableKind::Const(c.data(Interner).ty.clone()), }; - chalk_ir::WithKind::new(vk, UniverseIndex::ROOT) + WithKind::new(vk, UniverseIndex::ROOT) })); let binders = CanonicalVarKinds::from_iter(Interner, kinds); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 2e106877cbc..06a4236e0ac 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -1164,6 +1164,7 @@ impl MirBody { pub enum MirSpan { ExprId(ExprId), PatId(PatId), + BindingId(BindingId), SelfParam, Unknown, } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 2d9c221b732..f8083e89858 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -2,7 +2,7 @@ use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range}; -use base_db::{CrateId, FileId}; +use base_db::CrateId; use chalk_ir::{cast::Cast, Mutability}; use either::Either; use hir_def::{ @@ -14,8 +14,8 @@ use hir_def::{ AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, VariantId, }; -use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile}; -use intern::Interned; +use hir_expand::{mod_path::path, name::Name, HirFileIdExt, InFile}; +use intern::sym; use la_arena::ArenaMap; use rustc_abi::TargetDataLayout; use rustc_apfloat::{ @@ -23,6 +23,7 @@ use rustc_apfloat::{ Float, }; use rustc_hash::{FxHashMap, FxHashSet}; +use span::FileId; use stdx::never; use syntax::{SyntaxNodePtr, TextRange}; use triomphe::Arc; @@ -35,7 +36,7 @@ use crate::{ layout::{Layout, LayoutError, RustcEnumVariantIdx}, mapping::from_chalk, method_resolution::{is_dyn_method, lookup_impl_const}, - name, static_lifetime, + static_lifetime, traits::FnTrait, utils::{detect_variant_from_bytes, ClosureSubst}, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstScalar, FnDefId, Interner, MemoryMap, @@ -387,6 +388,16 @@ impl MirEvalError { Ok(s) => s.map(|it| it.syntax_node_ptr()), Err(_) => continue, }, + MirSpan::BindingId(b) => { + match source_map + .patterns_for_binding(*b) + .iter() + .find_map(|p| source_map.pat_syntax(*p).ok()) + { + Some(s) => s.map(|it| it.syntax_node_ptr()), + None => continue, + } + } MirSpan::SelfParam => match source_map.self_param_syntax() { Some(s) => s.map(|it| it.syntax_node_ptr()), None => continue, @@ -395,7 +406,7 @@ impl MirEvalError { }; let file_id = span.file_id.original_file(db.upcast()); let text_range = span.value.text_range(); - writeln!(f, "{}", span_formatter(file_id, text_range))?; + writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?; } } match err { @@ -631,15 +642,21 @@ impl Evaluator<'_> { cached_fn_trait_func: db .lang_item(crate_id, LangItem::Fn) .and_then(|x| x.as_trait()) - .and_then(|x| db.trait_data(x).method_by_name(&name![call])), + .and_then(|x| { + db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call.clone())) + }), cached_fn_mut_trait_func: db .lang_item(crate_id, LangItem::FnMut) .and_then(|x| x.as_trait()) - .and_then(|x| db.trait_data(x).method_by_name(&name![call_mut])), + .and_then(|x| { + db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call_mut.clone())) + }), cached_fn_once_trait_func: db .lang_item(crate_id, LangItem::FnOnce) .and_then(|x| x.as_trait()) - .and_then(|x| db.trait_data(x).method_by_name(&name![call_once])), + .and_then(|x| { + db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call_once.clone())) + }), }) } @@ -2633,10 +2650,7 @@ impl Evaluator<'_> { let static_data = self.db.static_data(st); let result = if !static_data.is_extern { let konst = self.db.const_eval_static(st).map_err(|e| { - MirEvalError::ConstEvalError( - static_data.name.as_str().unwrap_or("_").to_owned(), - Box::new(e), - ) + MirEvalError::ConstEvalError(static_data.name.as_str().to_owned(), Box::new(e)) })?; self.allocate_const_in_heap(locals, &konst)? } else { @@ -2693,7 +2707,7 @@ impl Evaluator<'_> { ) -> Result<()> { let Some(drop_fn) = (|| { let drop_trait = self.db.lang_item(self.crate_id, LangItem::Drop)?.as_trait()?; - self.db.trait_data(drop_trait).method_by_name(&name![drop]) + self.db.trait_data(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop.clone())) })() else { // in some tests we don't have drop trait in minicore, and // we can ignore drop in them. @@ -2797,14 +2811,13 @@ pub fn render_const_using_debug_impl( let resolver = owner.resolver(db.upcast()); let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully( db.upcast(), - &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments( - hir_expand::mod_path::PathKind::Abs, - [name![core], name![fmt], name![Debug]], - )), + &hir_def::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]), ) else { not_supported!("core::fmt::Debug not found"); }; - let Some(debug_fmt_fn) = db.trait_data(debug_trait).method_by_name(&name![fmt]) else { + let Some(debug_fmt_fn) = + db.trait_data(debug_trait).method_by_name(&Name::new_symbol_root(sym::fmt.clone())) + else { not_supported!("core::fmt::Debug::fmt not found"); }; // a1 = &[""] @@ -2829,10 +2842,7 @@ pub fn render_const_using_debug_impl( evaluator.write_memory(a3.offset(5 * evaluator.ptr_size()), &[1])?; let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully( db.upcast(), - &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments( - hir_expand::mod_path::PathKind::Abs, - [name![std], name![fmt], name![format]], - )), + &hir_def::path::Path::from_known_path_with_no_generic(path![std::fmt::format]), ) else { not_supported!("std::fmt::format not found"); }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index ce22e9d2c2c..bd43a62341d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -8,14 +8,16 @@ use hir_def::{ builtin_type::{BuiltinInt, BuiltinUint}, resolver::HasResolver, }; +use hir_expand::name::Name; +use intern::sym; use crate::{ error_lifetime, mir::eval::{ - name, pad16, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, - HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned, - ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, - Result, Substitution, Ty, TyBuilder, TyExt, + pad16, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, HirDisplay, + InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, + LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution, + Ty, TyBuilder, TyExt, }, }; @@ -52,19 +54,19 @@ impl Evaluator<'_> { let function_data = self.db.function_data(def); let is_intrinsic = match &function_data.abi { - Some(abi) => *abi == Interned::new_str("rust-intrinsic"), + Some(abi) => *abi == sym::rust_dash_intrinsic, None => match def.lookup(self.db.upcast()).container { hir_def::ItemContainerId::ExternBlockId(block) => { let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value].abi.as_deref() - == Some("rust-intrinsic") + id.item_tree(self.db.upcast())[id.value].abi.as_ref() + == Some(&sym::rust_dash_intrinsic) } _ => false, }, }; if is_intrinsic { self.exec_intrinsic( - function_data.name.as_text().unwrap_or_default().as_str(), + function_data.name.as_str(), args, generic_args, destination, @@ -74,19 +76,19 @@ impl Evaluator<'_> { return Ok(true); } let is_platform_intrinsic = match &function_data.abi { - Some(abi) => *abi == Interned::new_str("platform-intrinsic"), + Some(abi) => *abi == sym::platform_dash_intrinsic, None => match def.lookup(self.db.upcast()).container { hir_def::ItemContainerId::ExternBlockId(block) => { let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value].abi.as_deref() - == Some("platform-intrinsic") + id.item_tree(self.db.upcast())[id.value].abi.as_ref() + == Some(&sym::platform_dash_intrinsic) } _ => false, }, }; if is_platform_intrinsic { self.exec_platform_intrinsic( - function_data.name.as_text().unwrap_or_default().as_str(), + function_data.name.as_str(), args, generic_args, destination, @@ -98,13 +100,13 @@ impl Evaluator<'_> { let is_extern_c = match def.lookup(self.db.upcast()).container { hir_def::ItemContainerId::ExternBlockId(block) => { let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value].abi.as_deref() == Some("C") + id.item_tree(self.db.upcast())[id.value].abi.as_ref() == Some(&sym::C) } _ => false, }; if is_extern_c { self.exec_extern_c( - function_data.name.as_text().unwrap_or_default().as_str(), + function_data.name.as_str(), args, generic_args, destination, @@ -117,7 +119,7 @@ impl Evaluator<'_> { .attrs .iter() .filter_map(|it| it.path().as_ident()) - .filter_map(|it| it.as_str()) + .map(|it| it.as_str()) .find(|it| { [ "rustc_allocator", @@ -312,12 +314,12 @@ impl Evaluator<'_> { use LangItem::*; let attrs = self.db.attrs(def.into()); - if attrs.by_key("rustc_const_panic_str").exists() { + if attrs.by_key(&sym::rustc_const_panic_str).exists() { // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE. return Some(LangItem::BeginPanic); } - let candidate = attrs.by_key("lang").string_value().and_then(LangItem::from_str)?; + let candidate = attrs.lang_item()?; // We want to execute these functions with special logic // `PanicFmt` is not detected here as it's redirected later. if [BeginPanic, SliceLen, DropInPlace].contains(&candidate) { @@ -1274,10 +1276,11 @@ impl Evaluator<'_> { args.push(IntervalAndTy::new(addr, field, self, locals)?); } if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) { - if let Some(def) = target - .as_trait() - .and_then(|it| self.db.trait_data(it).method_by_name(&name![call_once])) - { + if let Some(def) = target.as_trait().and_then(|it| { + self.db + .trait_data(it) + .method_by_name(&Name::new_symbol_root(sym::call_once.clone())) + }) { self.exec_fn_trait( def, &args, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index c3b35cd553d..b21a401fa76 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -1,5 +1,5 @@ -use base_db::FileId; use hir_def::db::DefDatabase; +use span::EditionedFileId; use syntax::{TextRange, TextSize}; use test_fixture::WithFixture; @@ -7,7 +7,7 @@ use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution}; use super::{interpret_mir, MirEvalError}; -fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalError> { +fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> { let module_id = db.module_for_file(file_id); let def_map = module_id.def_map(db); let scope = &def_map[module_id.local_id].scope; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 1a0a1b780a1..057f5533805 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -2,7 +2,7 @@ use std::{fmt::Write, iter, mem}; -use base_db::{salsa::Cycle, FileId}; +use base_db::salsa::Cycle; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ body::Body, @@ -21,6 +21,7 @@ use hir_expand::name::Name; use la_arena::ArenaMap; use rustc_apfloat::Float; use rustc_hash::FxHashMap; +use span::FileId; use syntax::TextRange; use triomphe::Arc; @@ -1113,9 +1114,9 @@ impl<'ctx> MirLowerCtx<'ctx> { .iter() .map(|it| { let o = match it.1.name.as_str() { - Some("start") => lp.take(), - Some("end") => rp.take(), - Some("exhausted") => { + "start" => lp.take(), + "end" => rp.take(), + "exhausted" => { Some(Operand::from_bytes(Box::new([0]), TyBuilder::bool())) } _ => None, @@ -1406,6 +1407,7 @@ impl<'ctx> MirLowerCtx<'ctx> { const USIZE_SIZE: usize = mem::size_of::<usize>(); let bytes: Box<[_]> = match l { hir_def::hir::Literal::String(b) => { + let b = b.as_str(); let mut data = [0; { 2 * USIZE_SIZE }]; data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes()); data[USIZE_SIZE..].copy_from_slice(&b.len().to_le_bytes()); @@ -1718,14 +1720,8 @@ impl<'ctx> MirLowerCtx<'ctx> { /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and /// `Drop` in the appropriated places. fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<()> { - let span = self.body.bindings[b] - .definitions - .first() - .copied() - .map(MirSpan::PatId) - .unwrap_or(MirSpan::Unknown); let l = self.binding_local(b)?; - self.push_storage_live_for_local(l, current, span) + self.push_storage_live_for_local(l, current, MirSpan::BindingId(b)) } fn push_storage_live_for_local( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index 4ad00909e41..424ee1160c8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -4,7 +4,7 @@ use crate::mir::MutBorrowKind; use super::*; use hir_def::FunctionId; -use hir_expand::name; +use intern::sym; macro_rules! not_supported { ($it: expr) => { @@ -189,10 +189,10 @@ impl MirLowerCtx<'_> { if let Some(deref_trait) = self.resolve_lang_item(LangItem::DerefMut)?.as_trait() { - if let Some(deref_fn) = self - .db - .trait_data(deref_trait) - .method_by_name(&name![deref_mut]) + if let Some(deref_fn) = + self.db.trait_data(deref_trait).method_by_name( + &Name::new_symbol_root(sym::deref_mut.clone()), + ) { break 'b deref_fn == f; } @@ -324,12 +324,17 @@ impl MirLowerCtx<'_> { mutability: bool, ) -> Result<Option<(Place, BasicBlockId)>> { let (chalk_mut, trait_lang_item, trait_method_name, borrow_kind) = if !mutability { - (Mutability::Not, LangItem::Deref, name![deref], BorrowKind::Shared) + ( + Mutability::Not, + LangItem::Deref, + Name::new_symbol_root(sym::deref.clone()), + BorrowKind::Shared, + ) } else { ( Mutability::Mut, LangItem::DerefMut, - name![deref_mut], + Name::new_symbol_root(sym::deref_mut.clone()), BorrowKind::Mut { kind: MutBorrowKind::Default }, ) }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 75969067943..34e0f30afb7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -23,7 +23,7 @@ macro_rules! not_supported { } pub(super) enum AdtPatternShape<'a> { - Tuple { args: &'a [PatId], ellipsis: Option<usize> }, + Tuple { args: &'a [PatId], ellipsis: Option<u32> }, Record { args: &'a [RecordFieldPat] }, Unit, } @@ -627,12 +627,12 @@ impl MirLowerCtx<'_> { current: BasicBlockId, current_else: Option<BasicBlockId>, args: &[PatId], - ellipsis: Option<usize>, + ellipsis: Option<u32>, fields: impl DoubleEndedIterator<Item = PlaceElem> + Clone, cond_place: &Place, mode: MatchingMode, ) -> Result<(BasicBlockId, Option<BasicBlockId>)> { - let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len())); + let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); let it = al .iter() .zip(fields.clone()) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs index 460aabd7336..108ae198d50 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs @@ -4,11 +4,12 @@ use std::{fmt, panic, sync::Mutex}; use base_db::{ salsa::{self, Durability}, - AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, + AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, }; use hir_def::{db::DefDatabase, ModuleId}; use hir_expand::db::ExpandDatabase; -use nohash_hasher::IntMap; +use rustc_hash::FxHashMap; +use span::{EditionedFileId, FileId}; use syntax::TextRange; use test_utils::extract_annotations; use triomphe::Arc; @@ -86,11 +87,12 @@ impl FileLoader for TestDB { } impl TestDB { - pub(crate) fn module_for_file_opt(&self, file_id: FileId) -> Option<ModuleId> { + pub(crate) fn module_for_file_opt(&self, file_id: impl Into<FileId>) -> Option<ModuleId> { + let file_id = file_id.into(); for &krate in self.relevant_crates(file_id).iter() { let crate_def_map = self.crate_def_map(krate); for (local_id, data) in crate_def_map.modules() { - if data.origin.file_id() == Some(file_id) { + if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) { return Some(crate_def_map.module_id(local_id)); } } @@ -98,11 +100,13 @@ impl TestDB { None } - pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId { - self.module_for_file_opt(file_id).unwrap() + pub(crate) fn module_for_file(&self, file_id: impl Into<FileId>) -> ModuleId { + self.module_for_file_opt(file_id.into()).unwrap() } - pub(crate) fn extract_annotations(&self) -> IntMap<FileId, Vec<(TextRange, String)>> { + pub(crate) fn extract_annotations( + &self, + ) -> FxHashMap<EditionedFileId, Vec<(TextRange, String)>> { let mut files = Vec::new(); let crate_graph = self.crate_graph(); for krate in crate_graph.iter() { @@ -115,7 +119,7 @@ impl TestDB { files .into_iter() .filter_map(|file_id| { - let text = self.file_text(file_id); + let text = self.file_text(file_id.file_id()); let annotations = extract_annotations(&text); if annotations.is_empty() { return None; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 2a46becbfda..e67124d57a2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -12,7 +12,7 @@ mod traits; use std::env; -use base_db::{FileRange, SourceDatabaseExt2 as _}; +use base_db::SourceDatabaseExt2 as _; use expect_test::Expect; use hir_def::{ body::{Body, BodySourceMap, SyntheticSyntax}, @@ -23,7 +23,7 @@ use hir_def::{ src::HasSource, AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, }; -use hir_expand::{db::ExpandDatabase, InFile}; +use hir_expand::{db::ExpandDatabase, FileRange, InFile}; use once_cell::race::OnceBool; use rustc_hash::FxHashMap; use stdx::format_to; @@ -344,7 +344,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { } else { (node.value.text_range(), node.value.text().to_string().replace('\n', " ")) }; - let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" }; + let macro_prefix = if node.file_id != file_id { "!" } else { "" }; format_to!( buf, "{}{:?} '{}': {}\n", @@ -361,7 +361,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { }); for (src_ptr, mismatch) in &mismatches { let range = src_ptr.value.text_range(); - let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; + let macro_prefix = if src_ptr.file_id != file_id { "!" } else { "" }; format_to!( buf, "{}{:?}: expected {}, got {}\n", @@ -584,7 +584,7 @@ fn salsa_bug() { } "; - db.set_file_text(pos.file_id, new_text); + db.set_file_text(pos.file_id.file_id(), new_text); let module = db.module_for_file(pos.file_id); let crate_def_map = module.def_map(&db); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index 6066ec69c9a..e9c62d34169 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -16,7 +16,7 @@ fn foo() -> i32 { ); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id); + let module = db.module_for_file(pos.file_id.file_id()); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { db.infer(def); @@ -32,11 +32,11 @@ fn foo() -> i32 { 1 }"; - db.set_file_text(pos.file_id, new_text); + db.set_file_text(pos.file_id.file_id(), new_text); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id); + let module = db.module_for_file(pos.file_id.file_id()); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { db.infer(def); @@ -63,7 +63,7 @@ fn baz() -> i32 { ); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id); + let module = db.module_for_file(pos.file_id.file_id()); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { db.infer(def); @@ -84,11 +84,11 @@ fn baz() -> i32 { } "; - db.set_file_text(pos.file_id, new_text); + db.set_file_text(pos.file_id.file_id(), new_text); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id); + let module = db.module_for_file(pos.file_id.file_id()); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { db.infer(def); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs index a0899cb1d63..5454a496ba8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs @@ -703,7 +703,7 @@ fn infer_builtin_macros_file() { } "#, expect![[r#" - !0..2 '""': &'static str + !0..6 '"file"': &'static str 63..87 '{ ...!(); }': () 73..74 'x': &'static str "#]], diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs index 63a83d403fa..14e2e746531 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs @@ -1641,6 +1641,55 @@ impl<'a, T> IntoIterator for &'a [T] { } #[test] +fn skip_during_method_dispatch() { + check_types( + r#" +//- /main2018.rs crate:main2018 deps:core edition:2018 +use core::IntoIterator; + +fn f() { + let v = [4].into_iter(); + v; + //^ &'? i32 + + let a = [0, 1].into_iter(); + a; + //^ &'? i32 +} + +//- /main2021.rs crate:main2021 deps:core edition:2021 +use core::IntoIterator; + +fn f() { + let v = [4].into_iter(); + v; + //^ i32 + + let a = [0, 1].into_iter(); + a; + //^ &'? i32 +} + +//- /core.rs crate:core +#[rustc_skip_during_method_dispatch(array, boxed_slice)] +pub trait IntoIterator { + type Out; + fn into_iter(self) -> Self::Out; +} + +impl<T> IntoIterator for [T; 1] { + type Out = T; + fn into_iter(self) -> Self::Out { loop {} } +} +impl<'a, T> IntoIterator for &'a [T] { + type Out = &'a T; + fn into_iter(self) -> Self::Out { loop {} } +} + "#, + ); +} + +#[test] fn sized_blanket_impl() { check_infer( r#" @@ -2050,3 +2099,42 @@ fn test() { "#, ); } + +#[test] +fn mismatched_args_due_to_supertraits_with_deref() { + check_no_mismatches( + r#" +//- minicore: deref +use core::ops::Deref; + +trait Trait1 { + type Assoc: Deref<Target = String>; +} + +trait Trait2: Trait1 { +} + +trait Trait3 { + type T1: Trait1; + type T2: Trait2; + fn bar(&self, x: bool, y: bool); +} + +struct Foo; + +impl Foo { + fn bar(&mut self, _: &'static str) {} +} + +impl Deref for Foo { + type Target = u32; + fn deref(&self) -> &Self::Target { &0 } +} + +fn problem_method<T: Trait3>() { + let mut foo = Foo; + foo.bar("hello"); // Rustc ok, RA errors (mismatched args) +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs index 1c1f7055efd..57866acc063 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs @@ -1126,6 +1126,23 @@ fn var_args() { pub struct VaListImpl<'f>; fn my_fn(foo: ...) {} //^^^ VaListImpl<'?> +fn my_fn2(bar: u32, foo: ...) {} + //^^^ VaListImpl<'?> +"#, + ); +} + +#[test] +fn var_args_cond() { + check_types( + r#" +#[lang = "va_list"] +pub struct VaListImpl<'f>; +fn my_fn(bar: u32, #[cfg(FALSE)] foo: ..., #[cfg(not(FALSE))] foo: u32) { + foo; + //^^^ u32 + +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index aa7b00b8deb..ac2dfea1010 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2041,3 +2041,84 @@ fn main() { "#, ); } + +#[test] +fn issue_17734() { + check_types( + r#" +fn test() { + let x = S::foo::<'static, &()>(&S); + // ^ Wrap<'?, ()> + let x = S::foo::<&()>(&S); + // ^ Wrap<'?, ()> + let x = S.foo::<'static, &()>(); + // ^ Wrap<'?, ()> + let x = S.foo::<&()>(); + // ^ Wrap<'?, ()> +} + +struct S; + +impl S { + pub fn foo<'a, T: Trait<'a>>(&'a self) -> T::Proj { + loop {} + } +} + +struct Wrap<'a, T>(T); +trait Trait<'a> { + type Proj; +} +impl<'a, T> Trait<'a> for &'a T { + type Proj = Wrap<'a, T>; +} +"#, + ) +} + +#[test] +fn issue_17738() { + check_types( + r#" +//- minicore: index +use core::ops::{Index, IndexMut}; + +struct Foo<K, V>(K, V); + +struct Bar; + +impl Bar { + fn bar(&mut self) {} +} + +impl<K, V> Foo<K, V> { + fn new(_v: V) -> Self { + loop {} + } +} + +impl<K, B, V> Index<B> for Foo<K, V> { + type Output = V; + fn index(&self, _index: B) -> &Self::Output { + loop {} + } +} + +impl<K, V> IndexMut<K> for Foo<K, V> { + fn index_mut(&mut self, _index: K) -> &mut Self::Output { + loop {} + } +} + +fn test() { + let mut t1 = Foo::new(Bar); + // ^^^^^^ Foo<&'? (), Bar> + t1[&()] = Bar; + + let mut t2 = Foo::new(Bar); + // ^^^^^^ Foo<&'? (), Bar> + t2[&()].bar(); +} +"#, + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 4283a94657b..c46382a0ea8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -12,7 +12,8 @@ use hir_def::{ lang_item::{LangItem, LangItemTarget}, BlockId, TraitId, }; -use hir_expand::name::{name, Name}; +use hir_expand::name::Name; +use intern::sym; use stdx::panic_context; use triomphe::Arc; @@ -256,9 +257,9 @@ impl FnTrait { pub fn method_name(self) -> Name { match self { - FnTrait::FnOnce => name!(call_once), - FnTrait::FnMut => name!(call_mut), - FnTrait::Fn => name!(call), + FnTrait::FnOnce => Name::new_symbol_root(sym::call_once.clone()), + FnTrait::FnMut => Name::new_symbol_root(sym::call_mut.clone()), + FnTrait::Fn => Name::new_symbol_root(sym::call.clone()), } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 738e8421463..fbec332885d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -18,6 +18,7 @@ use hir_def::{ TypeOrConstParamId, }; use hir_expand::name::Name; +use intern::sym; use rustc_abi::TargetDataLayout; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; @@ -254,7 +255,7 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { let data = db.function_data(func); if data.has_unsafe_kw() { // Functions that are `#[rustc_deprecated_safe_2024]` are safe to call before 2024. - if db.attrs(func.into()).by_key("rustc_deprecated_safe_2024").exists() { + if db.attrs(func.into()).by_key(&sym::rustc_deprecated_safe_2024).exists() { // FIXME: Properly check the caller span and mark it as unsafe after 2024. return false; } @@ -268,11 +269,11 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { let id = block.lookup(db.upcast()).id; let is_intrinsic = - id.item_tree(db.upcast())[id.value].abi.as_deref() == Some("rust-intrinsic"); + id.item_tree(db.upcast())[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic); if is_intrinsic { // Intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute - !data.attrs.by_key("rustc_safe_intrinsic").exists() + !data.attrs.by_key(&sym::rustc_safe_intrinsic).exists() } else { // Extern items are always unsafe true diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index 7b3ff7b0645..02d92620e05 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -12,6 +12,7 @@ use hir_def::{ }; use hir_expand::{mod_path::PathKind, name::Name}; use hir_ty::{db::HirDatabase, method_resolution}; +use span::SyntaxContextId; use crate::{ Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, @@ -327,9 +328,11 @@ fn doc_modpath_from_str(link: &str) -> Option<ModPath> { }; let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() { Ok(idx) => Name::new_tuple_field(idx), - Err(_) => { - Name::new_text_dont_use(segment.split_once('<').map_or(segment, |it| it.0).into()) - } + Err(_) => Name::new( + segment.split_once('<').map_or(segment, |it| it.0), + tt::IdentIsRaw::No, + SyntaxContextId::ROOT, + ), }); Some(ModPath::from_segments(kind, parts)) }; diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 72272934ab7..4bb8c140a1f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -6,7 +6,6 @@ pub use hir_ty::diagnostics::{CaseType, IncorrectCase}; use hir_ty::{db::HirDatabase, diagnostics::BodyValidationDiagnostic, InferenceDiagnostic}; -use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; pub use hir_def::VariantId; @@ -15,7 +14,7 @@ use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange}; use triomphe::Arc; -use crate::{AssocItem, Field, Local, MacroKind, Trait, Type}; +use crate::{AssocItem, Field, Local, Trait, Type}; macro_rules! diagnostics { ($($diag:ident,)*) => { @@ -90,7 +89,6 @@ diagnostics![ UnresolvedMethodCall, UnresolvedModule, UnresolvedIdent, - UnresolvedProcMacro, UnusedMut, UnusedVariable, ]; @@ -151,22 +149,11 @@ pub struct InactiveCode { } #[derive(Debug, Clone, Eq, PartialEq)] -pub struct UnresolvedProcMacro { - pub node: InFile<SyntaxNodePtr>, - /// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange` - /// to use instead. - pub precise_location: Option<TextRange>, - pub macro_name: Option<String>, - pub kind: MacroKind, - /// The crate id of the proc-macro this macro belongs to, or `None` if the proc-macro can't be found. - pub krate: CrateId, -} - -#[derive(Debug, Clone, Eq, PartialEq)] pub struct MacroError { pub node: InFile<SyntaxNodePtr>, pub precise_location: Option<TextRange>, pub message: String, + pub error: bool, } #[derive(Debug, Clone, Eq, PartialEq)] diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 72e79af75df..7def828e95f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -82,8 +82,7 @@ impl HirDisplay for Function { f.write_str("unsafe ")?; } if let Some(abi) = &data.abi { - // FIXME: String escape? - write!(f, "extern \"{}\" ", &**abi)?; + write!(f, "extern \"{}\" ", abi.as_str())?; } write!(f, "fn {}", data.name.display(f.db.upcast()))?; @@ -115,7 +114,10 @@ impl HirDisplay for Function { } if data.is_varargs() { - f.write_str(", ...")?; + if !first { + f.write_str(", ")?; + } + f.write_str("...")?; } f.write_char(')')?; @@ -135,9 +137,9 @@ impl HirDisplay for Function { .as_ref() .unwrap() } - _ => panic!("Async fn ret_type should be impl Future"), + _ => &TypeRef::Error, }, - _ => panic!("Async fn ret_type should be impl Future"), + _ => &TypeRef::Error, } }; diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs index 18e27130f37..7d52a28b91e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs @@ -1,6 +1,5 @@ //! Provides set of implementation for hir's objects that allows get back location in file. -use base_db::FileId; use either::Either; use hir_def::{ nameres::{ModuleOrigin, ModuleSource}, @@ -9,6 +8,7 @@ use hir_def::{ }; use hir_expand::{HirFileId, InFile}; use hir_ty::db::InternedClosure; +use span::EditionedFileId; use syntax::ast; use tt::TextRange; @@ -58,7 +58,7 @@ impl Module { } } - pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<FileId> { + pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<EditionedFileId> { let def_map = self.id.def_map(db.upcast()); match def_map[self.id.local_id].origin { ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => { diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 016f3418517..266ef2a55c5 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -37,14 +37,14 @@ mod display; use std::{mem::discriminant, ops::ControlFlow}; use arrayvec::ArrayVec; -use base_db::{CrateDisplayName, CrateId, CrateOrigin, FileId}; +use base_db::{CrateDisplayName, CrateId, CrateOrigin}; use either::Either; use hir_def::{ body::{BodyDiagnostic, SyntheticSyntax}, data::adt::VariantData, generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance}, hir::{BindingAnnotation, BindingId, ExprOrPatId, LabelId, Pat}, - item_tree::ItemTreeNode, + item_tree::{AttrOwner, FieldParent, ItemTreeFieldId, ItemTreeNode}, lang_item::LangItemTarget, layout::{self, ReprOptions, TargetDataLayout}, nameres::{self, diagnostics::DefDiagnostic}, @@ -58,7 +58,7 @@ use hir_def::{ TypeOrConstParamId, TypeParamId, UnionId, }; use hir_expand::{ - attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult, + attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult, }; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, @@ -78,11 +78,11 @@ use hir_ty::{ use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; use rustc_hash::FxHashSet; -use span::{Edition, MacroCallId}; +use span::{Edition, EditionedFileId, FileId, MacroCallId}; use stdx::{impl_from, never}; use syntax::{ - ast::{self, HasAttrs as _, HasName}, - format_smolstr, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T, + ast::{self, HasAttrs as _, HasGenericParams, HasName}, + format_smolstr, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, T, }; use triomphe::Arc; @@ -129,12 +129,16 @@ pub use { hir_expand::{ attrs::{Attr, AttrId}, change::ChangeWithProcMacros, + files::{ + FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition, + HirFileRange, InFile, InFileWrapper, InMacroFile, InRealFile, MacroFilePosition, + MacroFileRange, + }, hygiene::{marks_rev, SyntaxContextExt}, inert_attr_macro::AttributeTemplate, - name::{known, Name}, - proc_macro::ProcMacros, - tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, - MacroFileIdExt, + name::Name, + proc_macro::{ProcMacros, ProcMacrosBuilder}, + tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, }, hir_ty::{ consteval::ConstEvalError, @@ -145,6 +149,7 @@ pub use { }, // FIXME: Properly encapsulate mir hir_ty::{mir, Interner as ChalkTyInterner}, + intern::{sym, Symbol}, }; // These are negative re-exports: pub using these names is forbidden, they @@ -258,7 +263,7 @@ impl Crate { pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> { // Look for #![doc(html_root_url = "...")] let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into())); - let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url"); + let doc_url = attrs.by_key(&sym::doc).find_string_value_in_tt(&sym::html_root_url); doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") } @@ -676,9 +681,9 @@ impl Module { TypeOrConstParamId { parent, local_id }, )) }); - let res = type_params - .chain(lifetime_params) - .any(|p| db.attrs(AttrDefId::GenericParamId(p)).by_key("may_dangle").exists()); + let res = type_params.chain(lifetime_params).any(|p| { + db.attrs(AttrDefId::GenericParamId(p)).by_key(&sym::may_dangle).exists() + }); Some(res) })() .unwrap_or(false); @@ -828,19 +833,27 @@ fn macro_call_diagnostics( let ValueResult { value: parse_errors, err } = &*e; if let Some(err) = err { let loc = db.lookup_intern_macro_call(macro_call_id); - let (node, precise_location, macro_name, kind) = precise_macro_call_location(&loc.kind, db); - let diag = match err { - &hir_expand::ExpandError::UnresolvedProcMacro(krate) => { - UnresolvedProcMacro { node, precise_location, macro_name, kind, krate }.into() - } - err => MacroError { node, precise_location, message: err.to_string() }.into(), + let file_id = loc.kind.file_id(); + let node = + InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id())); + let (message, error) = err.render_to_string(db.upcast()); + let precise_location = if err.span().anchor.file_id == file_id { + Some( + err.span().range + + db.ast_id_map(err.span().anchor.file_id.into()) + .get_erased(err.span().anchor.ast_id) + .text_range() + .start(), + ) + } else { + None }; - acc.push(diag); + acc.push(MacroError { node, precise_location, message, error }.into()); } if !parse_errors.is_empty() { let loc = db.lookup_intern_macro_call(macro_call_id); - let (node, precise_location, _, _) = precise_macro_call_location(&loc.kind, db); + let (node, precise_location) = precise_macro_call_location(&loc.kind, db); acc.push( MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() } .into(), @@ -890,6 +903,19 @@ fn emit_def_diagnostic_( acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into()); } + DefDiagnosticKind::MacroError { ast, path, err } => { + let item = ast.to_ptr(db.upcast()); + let (message, error) = err.render_to_string(db.upcast()); + acc.push( + MacroError { + node: InFile::new(ast.file_id, item.syntax_node_ptr()), + precise_location: None, + message: format!("{}: {message}", path.display(db.upcast())), + error, + } + .into(), + ) + } DefDiagnosticKind::UnresolvedImport { id, index } => { let file_id = id.file_id(); let item_tree = id.item_tree(db.upcast()); @@ -901,22 +927,93 @@ fn emit_def_diagnostic_( ); } - DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { - let item = ast.to_ptr(db.upcast()); - acc.push( - InactiveCode { node: ast.with_value(item), cfg: cfg.clone(), opts: opts.clone() } - .into(), - ); - } - DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => { - let (node, precise_location, macro_name, kind) = precise_macro_call_location(ast, db); - acc.push( - UnresolvedProcMacro { node, precise_location, macro_name, kind, krate: *krate } + DefDiagnosticKind::UnconfiguredCode { tree, item, cfg, opts } => { + let item_tree = tree.item_tree(db.upcast()); + let ast_id_map = db.ast_id_map(tree.file_id()); + // FIXME: This parses... We could probably store relative ranges for the children things + // here in the item tree? + (|| { + let process_field_list = + |field_list: Option<_>, idx: ItemTreeFieldId| match field_list? { + ast::FieldList::RecordFieldList(it) => Some(SyntaxNodePtr::new( + it.fields().nth(idx.into_raw().into_u32() as usize)?.syntax(), + )), + ast::FieldList::TupleFieldList(it) => Some(SyntaxNodePtr::new( + it.fields().nth(idx.into_raw().into_u32() as usize)?.syntax(), + )), + }; + let ptr = match *item { + AttrOwner::ModItem(it) => { + ast_id_map.get(it.ast_id(&item_tree)).syntax_node_ptr() + } + AttrOwner::TopLevel => ast_id_map.root(), + AttrOwner::Variant(it) => { + ast_id_map.get(item_tree[it].ast_id).syntax_node_ptr() + } + AttrOwner::Field(FieldParent::Variant(parent), idx) => process_field_list( + ast_id_map + .get(item_tree[parent].ast_id) + .to_node(&db.parse_or_expand(tree.file_id())) + .field_list(), + idx, + )?, + AttrOwner::Field(FieldParent::Struct(parent), idx) => process_field_list( + ast_id_map + .get(item_tree[parent.index()].ast_id) + .to_node(&db.parse_or_expand(tree.file_id())) + .field_list(), + idx, + )?, + AttrOwner::Field(FieldParent::Union(parent), idx) => SyntaxNodePtr::new( + ast_id_map + .get(item_tree[parent.index()].ast_id) + .to_node(&db.parse_or_expand(tree.file_id())) + .record_field_list()? + .fields() + .nth(idx.into_raw().into_u32() as usize)? + .syntax(), + ), + AttrOwner::Param(parent, idx) => SyntaxNodePtr::new( + ast_id_map + .get(item_tree[parent.index()].ast_id) + .to_node(&db.parse_or_expand(tree.file_id())) + .param_list()? + .params() + .nth(idx.into_raw().into_u32() as usize)? + .syntax(), + ), + AttrOwner::TypeOrConstParamData(parent, idx) => SyntaxNodePtr::new( + ast_id_map + .get(parent.ast_id(&item_tree)) + .to_node(&db.parse_or_expand(tree.file_id())) + .generic_param_list()? + .type_or_const_params() + .nth(idx.into_raw().into_u32() as usize)? + .syntax(), + ), + AttrOwner::LifetimeParamData(parent, idx) => SyntaxNodePtr::new( + ast_id_map + .get(parent.ast_id(&item_tree)) + .to_node(&db.parse_or_expand(tree.file_id())) + .generic_param_list()? + .lifetime_params() + .nth(idx.into_raw().into_u32() as usize)? + .syntax(), + ), + }; + acc.push( + InactiveCode { + node: InFile::new(tree.file_id(), ptr), + cfg: cfg.clone(), + opts: opts.clone(), + } .into(), - ); + ); + Some(()) + })(); } DefDiagnosticKind::UnresolvedMacroCall { ast, path } => { - let (node, precise_location, _, _) = precise_macro_call_location(ast, db); + let (node, precise_location) = precise_macro_call_location(ast, db); acc.push( UnresolvedMacroCall { macro_call: node, @@ -985,7 +1082,7 @@ fn emit_def_diagnostic_( fn precise_macro_call_location( ast: &MacroCallKind, db: &dyn HirDatabase, -) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) { +) -> (InFile<SyntaxNodePtr>, Option<TextRange>) { // FIXME: maybe we actually want slightly different ranges for the different macro diagnostics // - e.g. the full attribute for macro errors, but only the name for name resolution match ast { @@ -997,8 +1094,6 @@ fn precise_macro_call_location( .and_then(|it| it.segment()) .and_then(|it| it.name_ref()) .map(|it| it.syntax().text_range()), - node.path().and_then(|it| it.segment()).map(|it| it.to_string()), - MacroKind::ProcMacro, ) } MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { @@ -1027,8 +1122,6 @@ fn precise_macro_call_location( ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), token.as_ref().map(|tok| tok.text_range()), - token.as_ref().map(ToString::to_string), - MacroKind::Derive, ) } MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { @@ -1043,12 +1136,6 @@ fn precise_macro_call_location( ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), Some(attr.syntax().text_range()), - attr.path() - .and_then(|path| path.segment()) - .and_then(|seg| seg.name_ref()) - .as_ref() - .map(ToString::to_string), - MacroKind::Attr, ) } } @@ -1712,20 +1799,28 @@ impl DefWithBody { BodyDiagnostic::InactiveCode { node, cfg, opts } => { InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() } - BodyDiagnostic::MacroError { node, message } => MacroError { - node: (*node).map(|it| it.into()), - precise_location: None, - message: message.to_string(), - } - .into(), - BodyDiagnostic::UnresolvedProcMacro { node, krate } => UnresolvedProcMacro { - node: (*node).map(|it| it.into()), - precise_location: None, - macro_name: None, - kind: MacroKind::ProcMacro, - krate: *krate, + BodyDiagnostic::MacroError { node, err } => { + let (message, error) = err.render_to_string(db.upcast()); + + let precise_location = if err.span().anchor.file_id == node.file_id { + Some( + err.span().range + + db.ast_id_map(err.span().anchor.file_id.into()) + .get_erased(err.span().anchor.ast_id) + .text_range() + .start(), + ) + } else { + None + }; + MacroError { + node: (*node).map(|it| it.into()), + precise_location, + message, + error, + } + .into() } - .into(), BodyDiagnostic::UnresolvedMacroCall { node, path } => UnresolvedMacroCall { macro_call: (*node).map(|ast_ptr| ast_ptr.into()), precise_location: None, @@ -1801,6 +1896,16 @@ impl DefWithBody { Some(s) => s.map(|it| it.into()), None => continue, }, + mir::MirSpan::BindingId(b) => { + match source_map + .patterns_for_binding(b) + .iter() + .find_map(|p| source_map.pat_syntax(*p).ok()) + { + Some(s) => s.map(|it| it.into()), + None => continue, + } + } mir::MirSpan::Unknown => continue, }; acc.push( @@ -1817,8 +1922,8 @@ impl DefWithBody { let Some(&local) = mir_body.binding_locals.get(binding_id) else { continue; }; - if body[binding_id] - .definitions + if source_map + .patterns_for_binding(binding_id) .iter() .any(|&pat| source_map.pat_syntax(pat).is_err()) { @@ -1826,7 +1931,7 @@ impl DefWithBody { continue; } let mut need_mut = &mol[local]; - if body[binding_id].name.as_str() == Some("self") + if body[binding_id].name == sym::self_.clone() && need_mut == &mir::MutabilityReason::Unused { need_mut = &mir::MutabilityReason::Not; @@ -1836,7 +1941,7 @@ impl DefWithBody { match (need_mut, is_mut) { (mir::MutabilityReason::Unused, _) => { - let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_')); + let should_ignore = body[binding_id].name.as_str().starts_with('_'); if !should_ignore { acc.push(UnusedVariable { local }.into()) } @@ -1854,6 +1959,16 @@ impl DefWithBody { Ok(s) => s.map(|it| it.into()), Err(_) => continue, }, + mir::MirSpan::BindingId(b) => { + match source_map + .patterns_for_binding(*b) + .iter() + .find_map(|p| source_map.pat_syntax(*p).ok()) + { + Some(s) => s.map(|it| it.into()), + None => continue, + } + } mir::MirSpan::SelfParam => match source_map.self_param_syntax() { Some(s) => s.map(|it| it.into()), @@ -1866,7 +1981,7 @@ impl DefWithBody { } (mir::MutabilityReason::Not, true) => { if !infer.mutated_bindings_in_closure.contains(&binding_id) { - let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_')); + let should_ignore = body[binding_id].name.as_str().starts_with('_'); if !should_ignore { acc.push(UnusedMut { local }.into()) } @@ -1972,7 +2087,6 @@ impl Function { return Type::new_with_resolver_inner(db, &resolver, output_eq.ty).into(); } } - never!("Async fn ret_type should be impl Future"); None } @@ -2087,14 +2201,14 @@ impl Function { /// is this a `fn main` or a function with an `export_name` of `main`? pub fn is_main(self, db: &dyn HirDatabase) -> bool { let data = db.function_data(self.id); - data.attrs.export_name() == Some("main") - || self.module(db).is_crate_root() && data.name.to_smol_str() == "main" + data.attrs.export_name() == Some(&sym::main) + || self.module(db).is_crate_root() && data.name == sym::main } /// Is this a function with an `export_name` of `main`? pub fn exported_main(self, db: &dyn HirDatabase) -> bool { let data = db.function_data(self.id); - data.attrs.export_name() == Some("main") + data.attrs.export_name() == Some(&sym::main) } /// Does this function have the ignore attribute? @@ -2588,7 +2702,7 @@ pub struct StaticLifetime; impl StaticLifetime { pub fn name(self) -> Name { - known::STATIC_LIFETIME + Name::new_symbol_root(sym::tick_static.clone()) } } @@ -3199,7 +3313,7 @@ impl LocalSource { } } - pub fn original_file(&self, db: &dyn HirDatabase) -> FileId { + pub fn original_file(&self, db: &dyn HirDatabase) -> EditionedFileId { self.source.file_id.original_file(db.upcast()) } @@ -3248,7 +3362,7 @@ impl Local { } pub fn is_self(self, db: &dyn HirDatabase) -> bool { - self.name(db) == name![self] + self.name(db) == sym::self_.clone() } pub fn is_mut(self, db: &dyn HirDatabase) -> bool { @@ -3287,8 +3401,8 @@ impl Local { source: source.map(|ast| Either::Right(ast.to_node(&root))), }] } - _ => body[self.binding_id] - .definitions + _ => source_map + .patterns_for_binding(self.binding_id) .iter() .map(|&definition| { let src = source_map.pat_syntax(definition).unwrap(); // Hmm... @@ -3316,8 +3430,8 @@ impl Local { source: source.map(|ast| Either::Right(ast.to_node(&root))), } } - _ => body[self.binding_id] - .definitions + _ => source_map + .patterns_for_binding(self.binding_id) .first() .map(|&definition| { let src = source_map.pat_syntax(definition).unwrap(); // Hmm... @@ -3379,23 +3493,27 @@ impl BuiltinAttr { if let builtin @ Some(_) = Self::builtin(name) { return builtin; } - let idx = - db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)? as u32; + let idx = db + .crate_def_map(krate.id) + .registered_attrs() + .iter() + .position(|it| it.as_str() == name)? as u32; Some(BuiltinAttr { krate: Some(krate.id), idx }) } fn builtin(name: &str) -> Option<Self> { - hir_expand::inert_attr_macro::find_builtin_attr_idx(name) + hir_expand::inert_attr_macro::find_builtin_attr_idx(&Symbol::intern(name)) .map(|idx| BuiltinAttr { krate: None, idx: idx as u32 }) } - pub fn name(&self, db: &dyn HirDatabase) -> SmolStr { - // FIXME: Return a `Name` here + pub fn name(&self, db: &dyn HirDatabase) -> Name { match self.krate { - Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(), - None => { - SmolStr::new(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name) - } + Some(krate) => Name::new_symbol_root( + db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(), + ), + None => Name::new_symbol_root(Symbol::intern( + hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name, + )), } } @@ -3419,13 +3537,15 @@ impl ToolModule { pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> { let krate = krate.id; let idx = - db.crate_def_map(krate).registered_tools().iter().position(|it| it == name)? as u32; + db.crate_def_map(krate).registered_tools().iter().position(|it| it.as_str() == name)? + as u32; Some(ToolModule { krate, idx }) } - pub fn name(&self, db: &dyn HirDatabase) -> SmolStr { - // FIXME: Return a `Name` here - db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone() + pub fn name(&self, db: &dyn HirDatabase) -> Name { + Name::new_symbol_root( + db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone(), + ) } } @@ -3694,6 +3814,10 @@ impl Impl { inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect() } + pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec<Impl> { + module.id.def_map(db.upcast())[module.id.local_id].scope.impls().map(Into::into).collect() + } + pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> { let def_crates = match method_resolution::def_crates(db, &ty, env.krate) { Some(def_crates) => def_crates, @@ -4600,7 +4724,7 @@ impl Type { ) -> impl Iterator<Item = SmolStr> + 'a { // iterate the lifetime self.as_adt() - .and_then(|a| a.lifetime(db).map(|lt| lt.name.to_smol_str())) + .and_then(|a| a.lifetime(db).map(|lt| lt.name.display_no_db().to_smolstr())) .into_iter() // add the type and const parameters .chain(self.type_and_const_arguments(db)) diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index f6c88edbff7..29f98972dcd 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -8,7 +8,6 @@ use std::{ ops::{self, ControlFlow, Not}, }; -use base_db::{FileId, FileRange}; use either::Either; use hir_def::{ hir::Expr, @@ -20,16 +19,16 @@ use hir_def::{ }; use hir_expand::{ attrs::collect_attrs, - builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, + builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::InRealFile, name::AsName, - InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, + FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; -use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; +use span::{EditionedFileId, FileId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, @@ -225,12 +224,12 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.resolve_variant(record_lit).map(VariantDef::from) } - pub fn file_to_module_def(&self, file: FileId) -> Option<Module> { - self.imp.file_to_module_defs(file).next() + pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> { + self.imp.file_to_module_defs(file.into()).next() } - pub fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> { - self.imp.file_to_module_defs(file) + pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> { + self.imp.file_to_module_defs(file.into()) } pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> { @@ -300,7 +299,23 @@ impl<'db> SemanticsImpl<'db> { } } - pub fn parse(&self, file_id: FileId) -> ast::SourceFile { + pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile { + let tree = self.db.parse(file_id).tree(); + self.cache(tree.syntax().clone(), file_id.into()); + tree + } + + pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> { + Some(EditionedFileId::new( + file, + self.file_to_module_defs(file).next()?.krate().edition(self.db), + )) + } + + pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile { + let file_id = self + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); tree @@ -757,7 +772,7 @@ impl<'db> SemanticsImpl<'db> { // iterate related crates and find all include! invocations that include_file_id matches for (invoc, _) in self .db - .relevant_crates(file_id) + .relevant_crates(file_id.file_id()) .iter() .flat_map(|krate| self.db.include_macro_invoc(*krate)) .filter(|&(_, include_file_id)| include_file_id == file_id) @@ -1089,6 +1104,7 @@ impl<'db> SemanticsImpl<'db> { node.original_file_range_opt(self.db.upcast()) .filter(|(_, ctx)| ctx.is_root()) .map(TupleExt::head) + .map(Into::into) } /// Attempts to map the node out of macro expanded files. diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 74ed2640f40..1376dddf671 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -85,7 +85,6 @@ //! active crate for a given position, and then provide an API to resolve all //! syntax nodes against this specific crate. -use base_db::FileId; use either::Either; use hir_def::{ child_by_source::ChildBySource, @@ -103,7 +102,7 @@ use hir_expand::{ }; use rustc_hash::FxHashMap; use smallvec::SmallVec; -use span::MacroFileId; +use span::{FileId, MacroFileId}; use stdx::impl_from; use syntax::{ ast::{self, HasName}, @@ -162,7 +161,7 @@ impl SourceToDefCtx<'_, '_> { } None => { let file_id = src.file_id.original_file(self.db.upcast()); - self.file_to_def(file_id).first().copied() + self.file_to_def(file_id.file_id()).first().copied() } }?; @@ -175,7 +174,7 @@ impl SourceToDefCtx<'_, '_> { pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> { let _p = tracing::info_span!("source_file_to_def").entered(); let file_id = src.file_id.original_file(self.db.upcast()); - self.file_to_def(file_id).first().copied() + self.file_to_def(file_id.file_id()).first().copied() } pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> { @@ -412,7 +411,10 @@ impl SourceToDefCtx<'_, '_> { return Some(def); } - let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?; + let def = self + .file_to_def(src.file_id.original_file(self.db.upcast()).file_id()) + .first() + .copied()?; Some(def.into()) } diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 81c57f6caeb..be0116862b9 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -25,11 +25,8 @@ use hir_def::{ }; use hir_expand::{ mod_path::path, + name::{AsName, Name}, HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt, - { - name, - name::{AsName, Name}, - }, }; use hir_ty::{ diagnostics::{ @@ -40,6 +37,7 @@ use hir_ty::{ method_resolution, Adjustment, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, TyLoweringContext, }; +use intern::sym; use itertools::Itertools; use smallvec::SmallVec; use syntax::{ @@ -368,7 +366,7 @@ impl SourceAnalyzer { let items = into_future_trait.items(db); let into_future_type = items.into_iter().find_map(|item| match item { AssocItem::TypeAlias(alias) - if alias.name(db) == hir_expand::name![IntoFuture] => + if alias.name(db) == Name::new_symbol_root(sym::IntoFuture.clone()) => { Some(alias) } @@ -397,15 +395,21 @@ impl SourceAnalyzer { // This can be either `Deref::deref` or `DerefMut::deref_mut`. // Since deref kind is inferenced and stored in `InferenceResult.method_resolution`, // use that result to find out which one it is. - let (deref_trait, deref) = - self.lang_trait_fn(db, LangItem::Deref, &name![deref])?; + let (deref_trait, deref) = self.lang_trait_fn( + db, + LangItem::Deref, + &Name::new_symbol_root(sym::deref.clone()), + )?; self.infer .as_ref() .and_then(|infer| { let expr = self.expr_id(db, &prefix_expr.clone().into())?; let (func, _) = infer.method_resolution(expr)?; - let (deref_mut_trait, deref_mut) = - self.lang_trait_fn(db, LangItem::DerefMut, &name![deref_mut])?; + let (deref_mut_trait, deref_mut) = self.lang_trait_fn( + db, + LangItem::DerefMut, + &Name::new_symbol_root(sym::deref_mut.clone()), + )?; if func == deref_mut { Some((deref_mut_trait, deref_mut)) } else { @@ -414,8 +418,12 @@ impl SourceAnalyzer { }) .unwrap_or((deref_trait, deref)) } - ast::UnaryOp::Not => self.lang_trait_fn(db, LangItem::Not, &name![not])?, - ast::UnaryOp::Neg => self.lang_trait_fn(db, LangItem::Neg, &name![neg])?, + ast::UnaryOp::Not => { + self.lang_trait_fn(db, LangItem::Not, &Name::new_symbol_root(sym::not.clone()))? + } + ast::UnaryOp::Neg => { + self.lang_trait_fn(db, LangItem::Neg, &Name::new_symbol_root(sym::neg.clone()))? + } }; let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?; @@ -435,15 +443,19 @@ impl SourceAnalyzer { let base_ty = self.ty_of_expr(db, &index_expr.base()?)?; let index_ty = self.ty_of_expr(db, &index_expr.index()?)?; - let (index_trait, index_fn) = self.lang_trait_fn(db, LangItem::Index, &name![index])?; + let (index_trait, index_fn) = + self.lang_trait_fn(db, LangItem::Index, &Name::new_symbol_root(sym::index.clone()))?; let (op_trait, op_fn) = self .infer .as_ref() .and_then(|infer| { let expr = self.expr_id(db, &index_expr.clone().into())?; let (func, _) = infer.method_resolution(expr)?; - let (index_mut_trait, index_mut_fn) = - self.lang_trait_fn(db, LangItem::IndexMut, &name![index_mut])?; + let (index_mut_trait, index_mut_fn) = self.lang_trait_fn( + db, + LangItem::IndexMut, + &Name::new_symbol_root(sym::index_mut.clone()), + )?; if func == index_mut_fn { Some((index_mut_trait, index_mut_fn)) } else { diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index 02905ca2ce4..b1f5df681f2 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -1,6 +1,5 @@ //! File symbol extraction. -use base_db::FileRange; use hir_def::{ db::DefDatabase, item_scope::ItemInNs, @@ -8,9 +7,9 @@ use hir_def::{ AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, }; -use hir_expand::{HirFileId, InFile}; +use hir_expand::HirFileId; use hir_ty::{db::HirDatabase, display::HirDisplay}; -use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr}; +use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr}; use crate::{Module, ModuleDef, Semantics}; @@ -42,25 +41,6 @@ impl DeclarationLocation { let root = sema.parse_or_expand(self.hir_file_id); self.ptr.to_node(&root) } - - pub fn original_range(&self, db: &dyn HirDatabase) -> FileRange { - if let Some(file_id) = self.hir_file_id.file_id() { - // fast path to prevent parsing - return FileRange { file_id, range: self.ptr.text_range() }; - } - let node = resolve_node(db, self.hir_file_id, &self.ptr); - node.as_ref().original_file_range_rooted(db.upcast()) - } -} - -fn resolve_node( - db: &dyn HirDatabase, - file_id: HirFileId, - ptr: &SyntaxNodePtr, -) -> InFile<SyntaxNode> { - let root = db.parse_or_expand(file_id); - let node = ptr.to_node(&root); - InFile::new(file_id, node) } /// Represents an outstanding module that the symbol collector must collect symbols from. @@ -239,7 +219,7 @@ impl<'a> SymbolCollector<'a> { fn collect_from_trait(&mut self, trait_id: TraitId) { let trait_data = self.db.trait_data(trait_id); - self.with_container_name(trait_data.name.as_text(), |s| { + self.with_container_name(Some(trait_data.name.as_str().into()), |s| { for &(_, assoc_item_id) in &trait_data.items { s.push_assoc_item(assoc_item_id); } @@ -258,10 +238,18 @@ impl<'a> SymbolCollector<'a> { fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> { match body_id { - DefWithBodyId::FunctionId(id) => Some(self.db.function_data(id).name.to_smol_str()), - DefWithBodyId::StaticId(id) => Some(self.db.static_data(id).name.to_smol_str()), - DefWithBodyId::ConstId(id) => Some(self.db.const_data(id).name.as_ref()?.to_smol_str()), - DefWithBodyId::VariantId(id) => Some(self.db.enum_variant_data(id).name.to_smol_str()), + DefWithBodyId::FunctionId(id) => { + Some(self.db.function_data(id).name.display_no_db().to_smolstr()) + } + DefWithBodyId::StaticId(id) => { + Some(self.db.static_data(id).name.display_no_db().to_smolstr()) + } + DefWithBodyId::ConstId(id) => { + Some(self.db.const_data(id).name.as_ref()?.display_no_db().to_smolstr()) + } + DefWithBodyId::VariantId(id) => { + Some(self.db.enum_variant_data(id).name.display_no_db().to_smolstr()) + } DefWithBodyId::InTypeConstId(_) => Some("in type const".into()), } } @@ -293,7 +281,7 @@ impl<'a> SymbolCollector<'a> { if let Some(attrs) = def.attrs(self.db) { for alias in attrs.doc_aliases() { self.symbols.push(FileSymbol { - name: alias, + name: alias.as_str().into(), def, loc: dec_loc.clone(), container_name: self.current_container_name.clone(), @@ -330,7 +318,7 @@ impl<'a> SymbolCollector<'a> { if let Some(attrs) = def.attrs(self.db) { for alias in attrs.doc_aliases() { self.symbols.push(FileSymbol { - name: alias, + name: alias.as_str().into(), def, loc: dec_loc.clone(), container_name: self.current_container_name.clone(), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs index f1de6aba05b..82d8db42589 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs @@ -4,6 +4,7 @@ //! module, and we use to statically check that we only produce snippet //! assists if we are allowed to. +use hir::ImportPathConfig; use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap}; use crate::AssistKind; @@ -20,3 +21,13 @@ pub struct AssistConfig { pub term_search_fuel: u64, pub term_search_borrowck: bool, } + +impl AssistConfig { + pub fn import_path_config(&self) -> ImportPathConfig { + ImportPathConfig { + prefer_no_std: self.prefer_no_std, + prefer_prelude: self.prefer_prelude, + prefer_absolute: self.prefer_absolute, + } + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs index 8c7670e0cb7..1d2d3350f7c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs @@ -1,8 +1,8 @@ //! See [`AssistContext`]. -use hir::Semantics; -use ide_db::base_db::{FileId, FileRange}; -use ide_db::{label::Label, RootDatabase}; +use hir::{FileRange, Semantics}; +use ide_db::EditionedFileId; +use ide_db::{label::Label, FileId, RootDatabase}; use syntax::{ algo::{self, find_node_at_offset, find_node_at_range}, AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, @@ -90,7 +90,7 @@ impl<'a> AssistContext<'a> { self.frange.range.start() } - pub(crate) fn file_id(&self) -> FileId { + pub(crate) fn file_id(&self) -> EditionedFileId { self.frange.file_id } @@ -139,7 +139,7 @@ impl Assists { pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists { Assists { resolve, - file: ctx.frange.file_id, + file: ctx.frange.file_id.file_id(), buf: Vec::new(), allowed: ctx.config.allowed.clone(), } @@ -185,11 +185,11 @@ impl Assists { return None; } - let mut trigger_signature_help = false; + let mut command = None; let source_change = if self.resolve.should_resolve(&id) { let mut builder = SourceChangeBuilder::new(self.file); f(&mut builder); - trigger_signature_help = builder.trigger_signature_help; + command = builder.command.take(); Some(builder.finish()) } else { None @@ -197,7 +197,7 @@ impl Assists { let label = Label::new(label); let group = group.cloned(); - self.buf.push(Assist { id, label, group, target, source_change, trigger_signature_help }); + self.buf.push(Assist { id, label, group, target, source_change, command }); Some(()) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 4eb29a2378a..f4569ca848f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -1,7 +1,7 @@ use std::iter::{self, Peekable}; use either::Either; -use hir::{Adt, Crate, HasAttrs, HasSource, ImportPathConfig, ModuleDef, Semantics}; +use hir::{sym, Adt, Crate, HasAttrs, HasSource, ImportPathConfig, ModuleDef, Semantics}; use ide_db::RootDatabase; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast}; use itertools::Itertools; @@ -71,11 +71,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) .filter(|pat| !matches!(pat, Pat::WildcardPat(_))) .collect(); - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); let module = ctx.sema.scope(expr.syntax())?.module(); let (mut missing_pats, is_non_exhaustive, has_hidden_variants): ( @@ -381,7 +377,7 @@ impl ExtendedEnum { fn is_non_exhaustive(self, db: &RootDatabase, krate: Crate) -> bool { match self { ExtendedEnum::Enum(e) => { - e.attrs(db).by_key("non_exhaustive").exists() && e.module(db).krate() != krate + e.attrs(db).by_key(&sym::non_exhaustive).exists() && e.module(db).krate() != krate } _ => false, } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs index 327709b28a3..17efbcbd6c9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs @@ -124,7 +124,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti "Add `::<>`", ident.text_range(), |edit| { - edit.trigger_signature_help(); + edit.trigger_parameter_hints(); let new_arg_list = match turbofish_target { Either::Left(path_segment) => { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs index f17635972b7..db53e49d846 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs @@ -1,6 +1,6 @@ use std::cmp::Reverse; -use hir::{db::HirDatabase, ImportPathConfig, Module}; +use hir::{db::HirDatabase, Module}; use ide_db::{ helpers::mod_path_to_ast, imports::{ @@ -90,11 +90,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; // # pub mod std { pub mod collections { pub struct HashMap { } } } // ``` pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; let mut proposed_imports: Vec<_> = import_assets @@ -108,7 +104,6 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< NodeOrToken::Node(node) => ctx.sema.original_range(node).range, NodeOrToken::Token(token) => token.text_range(), }; - let group_label = group_label(import_assets.import_candidate()); let scope = ImportScope::find_insert_use_container( &match syntax_under_caret { NodeOrToken::Node(it) => it, @@ -121,18 +116,12 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path)); proposed_imports.dedup_by(|a, b| a.import_path == b.import_path); - let current_node = match ctx.covering_element() { - NodeOrToken::Node(node) => Some(node), - NodeOrToken::Token(token) => token.parent(), - }; - - let current_module = - current_node.as_ref().and_then(|node| ctx.sema.scope(node)).map(|scope| scope.module()); - + let current_module = ctx.sema.scope(scope.as_syntax_node()).map(|scope| scope.module()); // prioritize more relevant imports proposed_imports .sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref()))); + let group_label = group_label(import_assets.import_candidate()); for import in proposed_imports { let import_path = import.import_path; @@ -226,7 +215,7 @@ fn group_label(import_candidate: &ImportCandidate) -> GroupLabel { /// Determine how relevant a given import is in the current context. Higher scores are more /// relevant. -fn relevance_score( +pub(crate) fn relevance_score( ctx: &AssistContext<'_>, import: &LocatedImport, current_module: Option<&Module>, @@ -288,8 +277,8 @@ fn module_distance_heuristic(db: &dyn HirDatabase, current: &Module, item: &Modu mod tests { use super::*; - use hir::Semantics; - use ide_db::{assists::AssistResolveStrategy, base_db::FileRange, RootDatabase}; + use hir::{FileRange, Semantics}; + use ide_db::{assists::AssistResolveStrategy, RootDatabase}; use test_fixture::WithFixture; use crate::tests::{ @@ -1637,8 +1626,8 @@ mod bar { #[test] fn local_inline_import_has_alias() { - // FIXME - check_assist_not_applicable( + // FIXME wrong import + check_assist( auto_import, r#" struct S<T>(T); @@ -1648,13 +1637,23 @@ mod foo { pub fn bar() -> S$0<()> {} } "#, + r#" +struct S<T>(T); +use S as IoResult; + +mod foo { + use crate::S; + + pub fn bar() -> S<()> {} +} +"#, ); } #[test] fn alias_local() { - // FIXME - check_assist_not_applicable( + // FIXME wrong import + check_assist( auto_import, r#" struct S<T>(T); @@ -1664,6 +1663,16 @@ mod foo { pub fn bar() -> IoResult$0<()> {} } "#, + r#" +struct S<T>(T); +use S as IoResult; + +mod foo { + use crate::S; + + pub fn bar() -> IoResult<()> {} +} +"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs index 45c1f0ccae3..839ffa2614b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs @@ -43,10 +43,10 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O acc.add( AssistId("bind_unused_param", AssistKind::QuickFix), - &format!("Bind as `let _ = {};`", &ident_pat), + &format!("Bind as `let _ = {ident_pat};`"), param.syntax().text_range(), |builder| { - let line_index = ctx.db().line_index(ctx.file_id()); + let line_index = ctx.db().line_index(ctx.file_id().into()); let indent = func.indent_level(); let text_indent = indent + 1; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs index ab25e0167bf..3a0754d60f8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{ImportPathConfig, ModuleDef}; +use hir::ModuleDef; use ide_db::{ assists::{AssistId, AssistKind}, defs::Definition, @@ -208,7 +208,7 @@ fn replace_usages( delayed_mutations: &mut Vec<(ImportScope, ast::Path)>, ) { for (file_id, references) in usages { - edit.edit_file(file_id); + edit.edit_file(file_id.file_id()); let refs_with_imports = augment_references_with_imports(ctx, references, target_module); @@ -337,11 +337,7 @@ fn augment_references_with_imports( ) -> Vec<FileReferenceWithImport> { let mut visited_modules = FxHashSet::default(); - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); references .into_iter() @@ -470,7 +466,7 @@ fn add_enum_def( .module() .scope(ctx.db(), Some(*target_module)) .iter() - .any(|(name, _)| name.as_str() == Some("Bool")) + .any(|(name, _)| name.as_str() == "Bool") { return None; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs index 61b7b412177..77f9c66b354 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -1,4 +1,4 @@ -use hir::{known, AsAssocItem, Semantics}; +use hir::{sym, AsAssocItem, Semantics}; use ide_db::{ famous_defs::FamousDefs, syntax_helpers::node_ext::{ @@ -223,7 +223,7 @@ fn option_variants( let fam = FamousDefs(sema, sema.scope(expr)?.krate()); let option_variants = fam.core_option_Option()?.variants(sema.db); match &*option_variants { - &[variant0, variant1] => Some(if variant0.name(sema.db) == known::None { + &[variant0, variant1] => Some(if variant0.name(sema.db) == sym::None.clone() { (variant0, variant1) } else { (variant1, variant0) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs index 953119fd1ff..c7b1314c861 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs @@ -40,7 +40,7 @@ fn doc_to_comment(acc: &mut Assists, comment: ast::Comment) -> Option<()> { acc.add( AssistId("doc_to_comment", AssistKind::RefactorRewrite), - "Replace comment with doc comment", + "Replace doc comment with comment", target, |edit| { // We need to either replace the first occurrence of /* with /***, or we need to replace @@ -87,7 +87,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem acc.add( AssistId("comment_to_doc", AssistKind::RefactorRewrite), - "Replace doc comment with comment", + "Replace comment with doc comment", target, |edit| { // We need to either replace the first occurrence of /* with /***, or we need to replace diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs index 5349e86cf38..5aa94590e67 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -1,4 +1,3 @@ -use hir::ImportPathConfig; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait}; use syntax::ast::{self, AstNode, HasGenericArgs, HasName}; @@ -44,11 +43,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - return None; } - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); let src_type_path = { let src_type_path = src_type.syntax().descendants().find_map(ast::Path::cast)?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs index db96c8fe40a..e86ff0dbebc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs @@ -1,4 +1,4 @@ -use hir::known; +use hir::{sym, Name}; use ide_db::famous_defs::FamousDefs; use stdx::format_to; use syntax::{ @@ -149,7 +149,11 @@ fn is_ref_and_impls_iter_method( ast::Expr::RefExpr(r) => r, _ => return None, }; - let wanted_method = if ref_expr.mut_token().is_some() { known::iter_mut } else { known::iter }; + let wanted_method = Name::new_symbol_root(if ref_expr.mut_token().is_some() { + sym::iter_mut.clone() + } else { + sym::iter.clone() + }); let expr_behind_ref = ref_expr.expr()?; let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted(); let scope = sema.scope(iterable.syntax())?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs index 43ff1158864..37055979276 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -143,7 +143,7 @@ fn edit_struct_references( let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); for (file_id, refs) in usages { - edit.edit_file(file_id); + edit.edit_file(file_id.file_id()); for r in refs { process_struct_name_reference(ctx, r, edit); } @@ -221,7 +221,7 @@ fn edit_field_references( let def = Definition::Field(field); let usages = def.usages(&ctx.sema).all(); for (file_id, refs) in usages { - edit.edit_file(file_id); + edit.edit_file(file_id.file_id()); for r in refs { if let Some(name_ref) = r.name.as_name_ref() { // Only edit the field reference if it's part of a `.field` access diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index c55ff24ae38..0f0b4442d8a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{ImportPathConfig, ModuleDef}; +use hir::ModuleDef; use ide_db::{ assists::{AssistId, AssistKind}, defs::Definition, @@ -105,7 +105,7 @@ fn replace_usages( target_module: &hir::Module, ) { for (file_id, references) in usages.iter() { - edit.edit_file(*file_id); + edit.edit_file(file_id.file_id()); let refs_with_imports = augment_references_with_imports(edit, ctx, references, struct_name, target_module); @@ -183,11 +183,7 @@ fn augment_references_with_imports( ) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> { let mut visited_modules = FxHashSet::default(); - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); references .iter() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index a77bf403fdb..44f31dcb849 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -188,7 +188,7 @@ fn edit_struct_references( }; for (file_id, refs) in usages { - edit.edit_file(file_id); + edit.edit_file(file_id.file_id()); for r in refs { for node in r.name.syntax().ancestors() { if edit_node(edit, node).is_some() { @@ -213,7 +213,7 @@ fn edit_field_references( let def = Definition::Field(field); let usages = def.usages(&ctx.sema).all(); for (file_id, refs) in usages { - edit.edit_file(file_id); + edit.edit_file(file_id.file_id()); for r in refs { if let Some(name_ref) = r.name.as_name_ref() { edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs index 666e1a1496e..095b8f958d0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -1,4 +1,4 @@ -use hir::{HasVisibility, ImportPathConfig}; +use hir::{sym, HasVisibility}; use ide_db::{ assists::{AssistId, AssistKind}, defs::Definition, @@ -7,7 +7,7 @@ use ide_db::{ FxHashMap, FxHashSet, }; use itertools::Itertools; -use syntax::{ast, ted, AstNode, SmolStr, SyntaxNode}; +use syntax::{ast, ted, AstNode, SmolStr, SyntaxNode, ToSmolStr}; use text_edit::TextRange; use crate::{ @@ -87,18 +87,14 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str let ty = ctx.sema.type_of_binding_in_pat(&ident_pat)?; let hir::Adt::Struct(struct_type) = ty.strip_references().as_adt()? else { return None }; - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); let module = ctx.sema.scope(ident_pat.syntax())?.module(); let struct_def = hir::ModuleDef::from(struct_type); let kind = struct_type.kind(ctx.db()); let struct_def_path = module.find_path(ctx.db(), struct_def, cfg)?; - let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key("non_exhaustive").exists(); + let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key(&sym::non_exhaustive).exists(); let is_foreign_crate = struct_def.module(ctx.db()).map_or(false, |m| m.krate() != module.krate()); @@ -169,8 +165,8 @@ fn get_names_in_scope( let mut names = FxHashSet::default(); scope.process_all_names(&mut |name, scope| { - if let (Some(name), hir::ScopeDef::Local(_)) = (name.as_text(), scope) { - names.insert(name); + if let hir::ScopeDef::Local(_) = scope { + names.insert(name.as_str().into()); } }); Some(names) @@ -251,7 +247,7 @@ fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<( .visible_fields .iter() .map(|field| { - let field_name = field.name(ctx.db()).to_smol_str(); + let field_name = field.name(ctx.db()).display_no_db().to_smolstr(); let new_name = new_field_name(field_name.clone(), &data.names_in_scope); (field_name, new_name) }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index 709be517992..9ecfb83ed53 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -224,7 +224,7 @@ fn edit_tuple_usages( // tree mutation in the same file breaks when `builder.edit_file` // is called - if let Some((_, refs)) = usages.iter().find(|(file_id, _)| **file_id == ctx.file_id()) { + if let Some((_, refs)) = usages.iter().find(|(file_id, _)| *file_id == ctx.file_id()) { current_file_usages = Some( refs.iter() .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern)) @@ -233,11 +233,11 @@ fn edit_tuple_usages( } for (file_id, refs) in usages.iter() { - if *file_id == ctx.file_id() { + if file_id == ctx.file_id() { continue; } - edit.edit_file(*file_id); + edit.edit_file(file_id.file_id()); let tuple_edits = refs .iter() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 28f645171c8..9180d8dfcbb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -110,7 +110,7 @@ pub(crate) fn extract_expressions_from_format_string( Arg::Expr(s) => { // insert arg // FIXME: use the crate's edition for parsing - let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT).syntax_node(); + let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT_FIXME).syntax_node(); let mut expr_tt = utils::tt_from_syntax(expr); new_tt_bits.append(&mut expr_tt); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 20c37f92337..0a2cb6d5ef8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -3,8 +3,8 @@ use std::{iter, ops::RangeInclusive}; use ast::make; use either::Either; use hir::{ - DescendPreference, HasSource, HirDisplay, ImportPathConfig, InFile, Local, LocalSource, - ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam, + DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, + PathResolution, Semantics, TypeInfo, TypeParam, }; use ide_db::{ defs::{Definition, NameRefClass}, @@ -213,11 +213,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ctx.sema.db, ModuleDef::from(control_flow_enum), ctx.config.insert_use.prefix_kind, - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ); if let Some(mod_path) = mod_path { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs index 42f935651cf..e4cba666af7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs @@ -4,10 +4,9 @@ use either::Either; use hir::{HasSource, HirFileIdExt, ModuleSource}; use ide_db::{ assists::{AssistId, AssistKind}, - base_db::FileId, defs::{Definition, NameClass, NameRefClass}, search::{FileReference, SearchScope}, - FxHashMap, FxHashSet, + FileId, FxHashMap, FxHashSet, }; use itertools::Itertools; use smallvec::SmallVec; @@ -364,7 +363,7 @@ impl Module { None }); - refs_in_files.entry(file_id).or_default().extend(usages); + refs_in_files.entry(file_id.file_id()).or_default().extend(usages); } } @@ -477,8 +476,13 @@ impl Module { } } - let (def_in_mod, def_out_sel) = - check_def_in_mod_and_out_sel(def, ctx, curr_parent_module, selection_range, file_id); + let (def_in_mod, def_out_sel) = check_def_in_mod_and_out_sel( + def, + ctx, + curr_parent_module, + selection_range, + file_id.file_id(), + ); // Find use stmt that use def in current file let use_stmt: Option<ast::Use> = usage_res diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index 54323e2928e..a62fdeb6173 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -1,7 +1,7 @@ use std::iter; use either::Either; -use hir::{ImportPathConfig, Module, ModuleDef, Name, Variant}; +use hir::{Module, ModuleDef, Name, Variant}; use ide_db::{ defs::Definition, helpers::mod_path_to_ast, @@ -72,7 +72,7 @@ pub(crate) fn extract_struct_from_enum_variant( def_file_references = Some(references); continue; } - builder.edit_file(file_id); + builder.edit_file(file_id.file_id()); let processed = process_references( ctx, builder, @@ -390,11 +390,7 @@ fn process_references( ctx.sema.db, *enum_module_def, ctx.config.insert_use.prefix_kind, - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ); if let Some(mut mod_path) = mod_path { mod_path.pop_segment(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index 36d31226512..0ef71a38661 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -3,7 +3,7 @@ use syntax::{ ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName}, ted, NodeOrToken, SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR}, - SyntaxNode, + SyntaxNode, T, }; use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; @@ -26,8 +26,8 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; // ``` pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let node = if ctx.has_empty_selection() { - if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() { - expr_stmt.syntax().clone() + if let Some(t) = ctx.token_at_offset().find(|it| it.kind() == T![;]) { + t.parent().and_then(ast::ExprStmt::cast)?.syntax().clone() } else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() { expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone() } else { @@ -197,6 +197,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op block.indent(indent_to); } } + edit.rename(); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs index 2887e0c3e56..758f50d3f47 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs @@ -1,6 +1,6 @@ use syntax::{ ast::{self, make}, - AstNode, + AstNode, ToSmolStr, }; use crate::{AssistContext, AssistId, Assists}; @@ -45,8 +45,9 @@ pub(crate) fn fill_record_pattern_fields(acc: &mut Assists, ctx: &AssistContext< let new_field_list = make::record_pat_field_list(old_field_list.fields(), None).clone_for_update(); for (f, _) in missing_fields.iter() { - let field = - make::record_pat_field_shorthand(make::name_ref(&f.name(ctx.sema.db).to_smol_str())); + let field = make::record_pat_field_shorthand(make::name_ref( + &f.name(ctx.sema.db).display_no_db().to_smolstr(), + )); new_field_list.add_field(field.clone_for_update()); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs index 589591a6777..9950f9c1474 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs @@ -1,10 +1,10 @@ use hir::{ db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, }; -use ide_db::base_db::FileId; +use ide_db::FileId; use syntax::{ ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, - AstNode, TextRange, + AstNode, TextRange, ToSmolStr, }; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -48,7 +48,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) let (_, def) = module .scope(ctx.db(), None) .into_iter() - .find(|(name, _)| name.to_smol_str() == name_ref.text().as_str())?; + .find(|(name, _)| name.display_no_db().to_smolstr() == name_ref.text().as_str())?; let ScopeDef::ModuleDef(def) = def else { return None; }; @@ -128,7 +128,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> ); acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| { - edit.edit_file(target_file); + edit.edit_file(target_file.file_id()); let vis_owner = edit.make_mut(vis_owner); vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); @@ -155,7 +155,11 @@ fn target_data_for_def( let in_file_syntax = source.syntax(); let file_id = in_file_syntax.file_id; let range = in_file_syntax.value.text_range(); - Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db.upcast()))) + Some(( + ast::AnyHasVisibility::new(source.value), + range, + file_id.original_file(db.upcast()).file_id(), + )) } let target_name; @@ -197,7 +201,7 @@ fn target_data_for_def( let in_file_source = m.declaration_source(db)?; let file_id = in_file_source.file_id.original_file(db.upcast()); let range = in_file_source.value.syntax().text_range(); - (ast::AnyHasVisibility::new(in_file_source.value), range, file_id) + (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id()) } // FIXME hir::ModuleDef::Macro(_) => return None, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs index 4d8116a7156..25076dd5255 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs @@ -2,8 +2,9 @@ use crate::assist_context::{AssistContext, Assists}; use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module}; use ide_db::{ assists::{AssistId, AssistKind}, - base_db::{FileId, Upcast}, + base_db::Upcast, defs::{Definition, NameRefClass}, + FileId, }; use syntax::{ ast::{self, edit::IndentLevel, NameRef}, @@ -139,9 +140,9 @@ fn target_data_for_generate_constant( .any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n')); let post_string = if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") }; - Some((offset, indent + 1, Some(file_id), post_string)) + Some((offset, indent + 1, Some(file_id.file_id()), post_string)) } - _ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())), + _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id()), "\n".into())), } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 78def51a4a9..5a3457e5b7a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -22,7 +22,7 @@ use syntax::{ WherePred, }, ted::{self, Position}, - AstNode, NodeOrToken, SmolStr, SyntaxKind, + AstNode, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr, }; // Assist: generate_delegate_trait @@ -170,11 +170,11 @@ impl Delegee { for m in it.module(db).path_to_root(db).iter().rev() { if let Some(name) = m.name(db) { - s.push_str(&format!("{}::", name.to_smol_str())); + s.push_str(&format!("{}::", name.display_no_db().to_smolstr())); } } - s.push_str(&it.name(db).to_smol_str()); + s.push_str(&it.name(db).display_no_db().to_smolstr()); s } } @@ -259,7 +259,7 @@ fn generate_impl( strukt_params.clone(), strukt_params.map(|params| params.to_generic_args()), delegee.is_auto(db), - make::ty(&delegee.name(db).to_smol_str()), + make::ty(&delegee.name(db).display_no_db().to_smolstr()), strukt_ty, bound_def.where_clause(), ast_strukt.where_clause(), @@ -349,7 +349,8 @@ fn generate_impl( let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args()); - let path_type = make::ty(&trait_.name(db).to_smol_str()).clone_for_update(); + let path_type = + make::ty(&trait_.name(db).display_no_db().to_smolstr()).clone_for_update(); transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?; // 3) Generate delegate trait impl diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs index cc33439dd59..2ac7057fe79 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs @@ -1,6 +1,6 @@ use std::fmt::Display; -use hir::{ImportPathConfig, ModPath, ModuleDef}; +use hir::{ModPath, ModuleDef}; use ide_db::{famous_defs::FamousDefs, RootDatabase}; use syntax::{ ast::{self, HasName}, @@ -58,15 +58,8 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; - let trait_path = module.find_path( - ctx.db(), - ModuleDef::Trait(trait_), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, - )?; + let trait_path = + module.find_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.import_path_config())?; let field_type = field.ty()?; let field_name = field.name()?; @@ -106,15 +99,8 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; - let trait_path = module.find_path( - ctx.db(), - ModuleDef::Trait(trait_), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, - )?; + let trait_path = + module.find_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.import_path_config())?; let field_type = field.ty()?; let target = field.syntax().text_range(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs index 7faf2d5b132..5d584591210 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -121,7 +121,7 @@ fn add_variant_to_accumulator( "Generate variant", target, |builder| { - builder.edit_file(file_id); + builder.edit_file(file_id.file_id()); let node = builder.make_mut(enum_node); let variant = make_variant(ctx, name_ref, parent); if let Some(it) = node.variant_list() { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 41693855bea..b2980d5c630 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -3,13 +3,12 @@ use hir::{ StructKind, Type, TypeInfo, }; use ide_db::{ - base_db::FileId, defs::{Definition, NameRefClass}, famous_defs::FamousDefs, helpers::is_editable_crate, path_transform::PathTransform, source_change::SourceChangeBuilder, - FxHashMap, FxHashSet, RootDatabase, SnippetCap, + FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap, }; use itertools::Itertools; use stdx::to_lower_snake_case; @@ -208,7 +207,8 @@ fn get_adt_source( let file = ctx.sema.parse(range.file_id); let adt_source = ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; - find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]).map(|impl_| (impl_, range.file_id)) + find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]) + .map(|impl_| (impl_, range.file_id.file_id())) } struct FunctionBuilder { @@ -482,7 +482,7 @@ fn get_fn_target( target_module: Option<Module>, call: CallExpr, ) -> Option<(GeneratedFunctionTarget, FileId)> { - let mut file = ctx.file_id(); + let mut file = ctx.file_id().into(); let target = match target_module { Some(target_module) => { let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module); @@ -1168,7 +1168,7 @@ fn next_space_for_fn_in_module( } }; - (file, assist_item) + (file.file_id(), assist_item) } #[derive(Clone, Copy)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs index 4d369e705e8..ad422b25c39 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs @@ -1,4 +1,4 @@ -use hir::{known, HasSource, Name}; +use hir::{sym, HasSource, Name}; use syntax::{ ast::{self, HasName}, AstNode, @@ -54,13 +54,13 @@ pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext< } let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?; - let len_fn = get_impl_method(ctx, &impl_, &known::len)?; + let len_fn = get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::len.clone()))?; if !len_fn.ret_type(ctx.sema.db).is_usize() { cov_mark::hit!(len_fn_different_return_type); return None; } - if get_impl_method(ctx, &impl_, &known::is_empty).is_some() { + if get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::is_empty.clone())).is_some() { cov_mark::hit!(is_empty_already_implemented); return None; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 6056c808880..b985b5e66c4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -1,4 +1,3 @@ -use hir::ImportPathConfig; use ide_db::{ imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor, }; @@ -62,11 +61,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let type_path = current_module.find_path( ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?, - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), )?; let expr = use_trivial_constructor( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index 8c9fe23bb0b..5bd204dd573 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -2,16 +2,15 @@ use std::collections::BTreeSet; use ast::make; use either::Either; -use hir::{db::HirDatabase, PathResolution, Semantics, TypeInfo}; +use hir::{db::HirDatabase, sym, FileRange, PathResolution, Semantics, TypeInfo}; use ide_db::{ - base_db::{FileId, FileRange}, defs::Definition, imports::insert_use::remove_path_if_in_use_stmt, path_transform::PathTransform, search::{FileReference, FileReferenceNode, SearchScope}, source_change::SourceChangeBuilder, syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref}, - RootDatabase, + EditionedFileId, RootDatabase, }; use itertools::{izip, Itertools}; use syntax::{ @@ -304,7 +303,7 @@ fn get_fn_params( fn inline( sema: &Semantics<'_, RootDatabase>, - function_def_file_id: FileId, + function_def_file_id: EditionedFileId, function: hir::Function, fn_body: &ast::BlockExpr, params: &[(ast::Pat, Option<ast::Type>, hir::Param)], @@ -430,10 +429,7 @@ fn inline( let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty); - let is_self = param - .name(sema.db) - .and_then(|name| name.as_text()) - .is_some_and(|name| name == "self"); + let is_self = param.name(sema.db).is_some_and(|name| name == sym::self_.clone()); if is_self { let mut this_pat = make::ident_pat(false, false, make::name("this")); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs index 5d8ba43ec84..6a1f7f26c92 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs @@ -1,9 +1,8 @@ use hir::{PathResolution, Semantics}; use ide_db::{ - base_db::FileId, defs::Definition, search::{FileReference, FileReferenceNode, UsageSearchResult}, - RootDatabase, + EditionedFileId, RootDatabase, }; use syntax::{ ast::{self, AstNode, AstToken, HasName}, @@ -150,7 +149,7 @@ fn inline_let( sema: &Semantics<'_, RootDatabase>, let_stmt: ast::LetStmt, range: TextRange, - file_id: FileId, + file_id: EditionedFileId, ) -> Option<InlineData> { let bind_pat = match let_stmt.pat()? { ast::Pat::IdentPat(pat) => pat, @@ -185,7 +184,7 @@ fn inline_usage( sema: &Semantics<'_, RootDatabase>, path_expr: ast::PathExpr, range: TextRange, - file_id: FileId, + file_id: EditionedFileId, ) -> Option<InlineData> { let path = path_expr.path()?; let name = path.as_single_name_ref()?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs index e2f3d9edcd1..f6624d6c872 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -92,7 +92,7 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) }; for (file_id, refs) in usages.into_iter() { - inline_refs_for_file(file_id, refs); + inline_refs_for_file(file_id.file_id(), refs); } if !definition_deleted { builder.edit_file(ctx.file_id()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs index 7f751c93e48..41712308369 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs @@ -164,6 +164,7 @@ impl Merge for ast::UseTree { } } +#[derive(Debug)] enum Edit { Remove(Either<ast::Use, ast::UseTree>), Replace(SyntaxNode, SyntaxNode), @@ -733,4 +734,72 @@ use std::{ r"use std::fmt::{Debug, Display};", ); } + + #[test] + fn test_merge_with_synonymous_imports_1() { + check_assist( + merge_imports, + r" +mod top { + pub(crate) mod a { + pub(crate) struct A; + } + pub(crate) mod b { + pub(crate) struct B; + pub(crate) struct D; + } +} + +use top::a::A; +use $0top::b::{B, B as C}; +", + r" +mod top { + pub(crate) mod a { + pub(crate) struct A; + } + pub(crate) mod b { + pub(crate) struct B; + pub(crate) struct D; + } +} + +use top::{a::A, b::{B, B as C}}; +", + ); + } + + #[test] + fn test_merge_with_synonymous_imports_2() { + check_assist( + merge_imports, + r" +mod top { + pub(crate) mod a { + pub(crate) struct A; + } + pub(crate) mod b { + pub(crate) struct B; + pub(crate) struct D; + } +} + +use top::a::A; +use $0top::b::{B as D, B as C}; +", + r" +mod top { + pub(crate) mod a { + pub(crate) struct A; + } + pub(crate) mod b { + pub(crate) struct B; + pub(crate) struct D; + } +} + +use top::{a::A, b::{B as D, B as C}}; +", + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs index 22d536b5afc..3057745a97b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -1,5 +1,5 @@ -use hir::{AsAssocItem, AssocItemContainer, HasCrate, HasSource}; -use ide_db::{assists::AssistId, base_db::FileRange, defs::Definition, search::SearchScope}; +use hir::{AsAssocItem, AssocItemContainer, FileRange, HasCrate, HasSource}; +use ide_db::{assists::AssistId, defs::Definition, search::SearchScope}; use syntax::{ ast::{self, edit::IndentLevel, edit_in_place::Indent, AstNode}, SyntaxKind, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs index a256f60c421..14381085a78 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs @@ -41,7 +41,7 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let target = source_file.syntax().text_range(); let module_name = module.name(ctx.db())?.display(ctx.db()).to_string(); let path = format!("../{module_name}.rs"); - let dst = AnchoredPathBuf { anchor: ctx.file_id(), path }; + let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; acc.add( AssistId("move_from_mod_rs", AssistKind::Refactor), format!("Convert {module_name}/mod.rs to {module_name}.rs"), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs index 9af8411f4cb..e679a68f446 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs @@ -1,7 +1,7 @@ use std::iter; use ast::edit::IndentLevel; -use hir::HasAttrs; +use hir::{sym, HasAttrs}; use ide_db::base_db::AnchoredPathBuf; use itertools::Itertools; use stdx::format_to; @@ -57,7 +57,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> if !parent_module.is_mod_rs(db) && parent_module .attrs(db) - .by_key("path") + .by_key(&sym::path) .string_value_unescape() .is_none() => { @@ -104,7 +104,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> buf, ); - let dst = AnchoredPathBuf { anchor: ctx.file_id(), path }; + let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; builder.create_file(dst, contents); }, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs index a8a124eebb6..c89d54ff039 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs @@ -41,7 +41,7 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let target = source_file.syntax().text_range(); let module_name = module.name(ctx.db())?.display(ctx.db()).to_string(); let path = format!("./{module_name}/mod.rs"); - let dst = AnchoredPathBuf { anchor: ctx.file_id(), path }; + let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; acc.add( AssistId("move_to_mod_rs", AssistKind::Refactor), format!("Convert {module_name}.rs to {module_name}/mod.rs"), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs index 89e24fafc55..b1e98045fcf 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -1,7 +1,4 @@ -use hir::{ - db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ImportPathConfig, ItemInNs, - ModuleDef, -}; +use hir::{db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ast, AstNode}; @@ -50,11 +47,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> let receiver_path = current_module.find_path( ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?, - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), )?; let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs index 4164a4c1024..d8e7da15d5b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs @@ -1,6 +1,7 @@ +use std::cmp::Reverse; use std::iter; -use hir::{AsAssocItem, ImportPathConfig}; +use hir::AsAssocItem; use ide_db::RootDatabase; use ide_db::{ helpers::mod_path_to_ast, @@ -38,11 +39,7 @@ use crate::{ // ``` pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); let mut proposed_imports: Vec<_> = import_assets.search_for_relative_paths(&ctx.sema, cfg).collect(); @@ -50,12 +47,8 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option return None; } - let range = match &syntax_under_caret { - NodeOrToken::Node(node) => ctx.sema.original_range(node).range, - NodeOrToken::Token(token) => token.text_range(), - }; let candidate = import_assets.import_candidate(); - let qualify_candidate = match syntax_under_caret { + let qualify_candidate = match syntax_under_caret.clone() { NodeOrToken::Node(syntax_under_caret) => match candidate { ImportCandidate::Path(candidate) if candidate.qualifier.is_some() => { cov_mark::hit!(qualify_path_qualifier_start); @@ -89,6 +82,22 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path)); proposed_imports.dedup_by(|a, b| a.import_path == b.import_path); + let range = match &syntax_under_caret { + NodeOrToken::Node(node) => ctx.sema.original_range(node).range, + NodeOrToken::Token(token) => token.text_range(), + }; + let current_module = ctx + .sema + .scope(&match syntax_under_caret { + NodeOrToken::Node(node) => node.clone(), + NodeOrToken::Token(t) => t.parent()?, + }) + .map(|scope| scope.module()); + // prioritize more relevant imports + proposed_imports.sort_by_key(|import| { + Reverse(super::auto_import::relevance_score(ctx, import, current_module.as_ref())) + }); + let group_label = group_label(candidate); for import in proposed_imports { acc.add_group( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs index b653f3b6650..d4fdc072fb5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,8 +1,7 @@ use std::collections::hash_map::Entry; -use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; +use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; use ide_db::{ - base_db::FileRange, defs::Definition, search::{FileReference, ReferenceCategory, SearchScope}, FxHashMap, RootDatabase, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs index 0772b168d49..376243c2681 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs @@ -1,4 +1,4 @@ -use ide_db::{base_db::FileId, defs::Definition, search::FileReference}; +use ide_db::{defs::Definition, search::FileReference, EditionedFileId}; use syntax::{ algo::find_node_at_range, ast::{self, HasArgList}, @@ -90,7 +90,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> fn process_usages( ctx: &AssistContext<'_>, builder: &mut SourceChangeBuilder, - file_id: FileId, + file_id: EditionedFileId, references: Vec<FileReference>, arg_to_remove: usize, is_self_present: bool, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 5582256a170..5ff4af19fbf 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1,4 +1,4 @@ -use hir::{ImportPathConfig, InFile, MacroFileIdExt, ModuleDef}; +use hir::{InFile, MacroFileIdExt, ModuleDef}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use itertools::Itertools; use syntax::{ @@ -83,15 +83,7 @@ pub(crate) fn replace_derive_with_manual_impl( }) .flat_map(|trait_| { current_module - .find_path( - ctx.sema.db, - hir::ModuleDef::Trait(trait_), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, - ) + .find_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.import_path_config()) .as_ref() .map(mod_path_to_ast) .zip(Some(trait_)) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs index 37ea5123a71..12d025f0759 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs @@ -47,7 +47,7 @@ pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_ None, None, |func| { - let valid = func.name(ctx.sema.db).as_str() == Some(&*method_name_lazy) + let valid = func.name(ctx.sema.db).as_str() == &*method_name_lazy && func.num_params(ctx.sema.db) == n_params && { let params = func.params_without_self(ctx.sema.db); @@ -133,7 +133,7 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<' None, None, |func| { - let valid = func.name(ctx.sema.db).as_str() == Some(method_name_eager) + let valid = func.name(ctx.sema.db).as_str() == method_name_eager && func.num_params(ctx.sema.db) == n_params; valid.then_some(func) }, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs index e61ce481727..8a6c2937d90 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs @@ -1,9 +1,8 @@ -use hir::Semantics; +use hir::{FileRange, Semantics}; use ide_db::{ - base_db::{FileId, FileRange}, defs::Definition, search::{SearchScope, UsageSearchResult}, - RootDatabase, + EditionedFileId, RootDatabase, }; use syntax::{ ast::{ @@ -157,7 +156,7 @@ fn find_usages( sema: &Semantics<'_, RootDatabase>, fn_: &ast::Fn, type_param_def: Definition, - file_id: FileId, + file_id: EditionedFileId, ) -> UsageSearchResult { let file_range = FileRange { file_id, range: fn_.syntax().text_range() }; type_param_def.usages(sema).in_scope(&SearchScope::file_range(file_range)).all() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index b4e1a49aab5..d0aa835e79a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, ImportPathConfig}; +use hir::AsAssocItem; use ide_db::{ helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, @@ -67,11 +67,7 @@ pub(crate) fn replace_qualified_name_with_use( ctx.sema.db, module, ctx.config.insert_use.prefix_kind, - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ) }) .flatten(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs index 7a911799757..4913cfdea94 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs @@ -1,8 +1,5 @@ //! Term search assist -use hir::{ - term_search::{TermSearchConfig, TermSearchCtx}, - ImportPathConfig, -}; +use hir::term_search::{TermSearchConfig, TermSearchCtx}; use ide_db::{ assists::{AssistId, AssistKind, GroupLabel}, famous_defs::FamousDefs, @@ -54,16 +51,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let paths = paths .into_iter() .filter_map(|path| { - path.gen_source_code( - &scope, - &mut formatter, - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, - ) - .ok() + path.gen_source_code(&scope, &mut formatter, ctx.config.import_path_config()).ok() }) .unique(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs index 31d18a60138..98975a324dc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs @@ -1,4 +1,4 @@ -use hir::{ImportPathConfig, ModuleDef}; +use hir::ModuleDef; use ide_db::{ assists::{AssistId, AssistKind}, famous_defs::FamousDefs, @@ -139,11 +139,7 @@ pub(crate) fn desugar_async_into_impl_future( let trait_path = module.find_path( ctx.db(), ModuleDef::Trait(future_trait), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), )?; let trait_path = trait_path.display(ctx.db()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs index b2e8c4cf9fd..a83b27867b4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -1,9 +1,9 @@ use ide_db::{ assists::{AssistId, AssistKind}, - base_db::FileId, defs::Definition, search::{FileReference, FileReferenceNode}, syntax_helpers::node_ext::full_path_of_name_ref, + EditionedFileId, }; use syntax::{ ast::{self, NameRef}, @@ -95,7 +95,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O fn find_all_references( ctx: &AssistContext<'_>, def: &Definition, -) -> impl Iterator<Item = (FileId, FileReference)> { +) -> impl Iterator<Item = (EditionedFileId, FileReference)> { def.usages(&ctx.sema).all().into_iter().flat_map(|(file_id, references)| { references.into_iter().map(move |reference| (file_id, reference)) }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index 685d230dc6f..a9399ba6b7f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -65,7 +65,7 @@ mod tests; pub mod utils; use hir::Semantics; -use ide_db::{base_db::FileRange, RootDatabase}; +use ide_db::{EditionedFileId, RootDatabase}; use syntax::TextRange; pub(crate) use crate::assist_context::{AssistContext, Assists}; @@ -83,10 +83,13 @@ pub fn assists( db: &RootDatabase, config: &AssistConfig, resolve: AssistResolveStrategy, - range: FileRange, + range: ide_db::FileRange, ) -> Vec<Assist> { let sema = Semantics::new(db); - let ctx = AssistContext::new(sema, config, range); + let file_id = sema + .attach_first_edition(range.file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(range.file_id)); + let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range }); let mut acc = Assists::new(&ctx, resolve); handlers::all().iter().for_each(|handler| { handler(&mut acc, &ctx); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index 2dcfda334b8..e42be636d71 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -1,12 +1,12 @@ mod generated; use expect_test::expect; -use hir::Semantics; +use hir::{FileRange, Semantics}; use ide_db::{ - base_db::{FileId, FileRange, SourceDatabaseExt}, + base_db::SourceDatabaseExt, imports::insert_use::{ImportGranularity, InsertUseConfig}, source_change::FileSystemEdit, - RootDatabase, SnippetCap, + EditionedFileId, RootDatabase, SnippetCap, }; use stdx::{format_to, trim_indent}; use syntax::TextRange; @@ -72,7 +72,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { term_search_borrowck: true, }; -pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { +pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) { RootDatabase::with_single_file(text) } @@ -165,17 +165,17 @@ pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) { fn check_doc_test(assist_id: &str, before: &str, after: &str) { let after = trim_indent(after); let (db, file_id, selection) = RootDatabase::with_range_or_offset(before); - let before = db.file_text(file_id).to_string(); + let before = db.file_text(file_id.file_id()).to_string(); let frange = FileRange { file_id, range: selection.into() }; - let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange) + let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange.into()) .into_iter() .find(|assist| assist.id.0 == assist_id) .unwrap_or_else(|| { panic!( "\n\nAssist is not applicable: {}\nAvailable assists: {}", assist_id, - assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange) + assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into()) .into_iter() .map(|assist| assist.id.0) .collect::<Vec<_>>() @@ -190,7 +190,7 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) { .expect("Assist did not contain any source changes"); let mut actual = before; if let Some((source_file_edit, snippet_edit)) = - source_change.get_source_and_snippet_edit(file_id) + source_change.get_source_and_snippet_edit(file_id.file_id()) { source_file_edit.apply(&mut actual); if let Some(snippet_edit) = snippet_edit { @@ -224,7 +224,7 @@ fn check_with_config( ) { let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); db.enable_proc_attr_macros(); - let text_without_caret = db.file_text(file_with_caret_id).to_string(); + let text_without_caret = db.file_text(file_with_caret_id.into()).to_string(); let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; @@ -331,7 +331,7 @@ fn assist_order_field_struct() { let (before_cursor_pos, before) = extract_offset(before); let (db, file_id) = with_single_file(&before); let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; - let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange); + let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into()); let mut assists = assists.iter(); assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)"); @@ -357,7 +357,7 @@ pub fn test_some_range(a: int) -> bool { "#, ); - let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange); + let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into()); let expected = labels(&assists); expect![[r#" @@ -386,7 +386,7 @@ pub fn test_some_range(a: int) -> bool { let mut cfg = TEST_CONFIG; cfg.allowed = Some(vec![AssistKind::Refactor]); - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange); + let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); let expected = labels(&assists); expect![[r#" @@ -401,7 +401,7 @@ pub fn test_some_range(a: int) -> bool { { let mut cfg = TEST_CONFIG; cfg.allowed = Some(vec![AssistKind::RefactorExtract]); - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange); + let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); let expected = labels(&assists); expect![[r#" @@ -414,7 +414,7 @@ pub fn test_some_range(a: int) -> bool { { let mut cfg = TEST_CONFIG; cfg.allowed = Some(vec![AssistKind::QuickFix]); - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange); + let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); let expected = labels(&assists); expect![[r#""#]].assert_eq(&expected); @@ -439,7 +439,7 @@ pub fn test_some_range(a: int) -> bool { cfg.allowed = Some(vec![AssistKind::RefactorExtract]); { - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange); + let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); assert_eq!(2, assists.len()); let mut assists = assists.into_iter(); @@ -454,7 +454,7 @@ pub fn test_some_range(a: int) -> bool { group: None, target: 59..60, source_change: None, - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&extract_into_variable_assist); @@ -470,7 +470,7 @@ pub fn test_some_range(a: int) -> bool { group: None, target: 59..60, source_change: None, - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&extract_into_function_assist); @@ -484,7 +484,7 @@ pub fn test_some_range(a: int) -> bool { assist_id: "SOMETHING_MISMATCHING".to_owned(), assist_kind: AssistKind::RefactorExtract, }), - frange, + frange.into(), ); assert_eq!(2, assists.len()); let mut assists = assists.into_iter(); @@ -500,7 +500,7 @@ pub fn test_some_range(a: int) -> bool { group: None, target: 59..60, source_change: None, - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&extract_into_variable_assist); @@ -516,7 +516,7 @@ pub fn test_some_range(a: int) -> bool { group: None, target: 59..60, source_change: None, - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&extract_into_function_assist); @@ -530,7 +530,7 @@ pub fn test_some_range(a: int) -> bool { assist_id: "extract_variable".to_owned(), assist_kind: AssistKind::RefactorExtract, }), - frange, + frange.into(), ); assert_eq!(2, assists.len()); let mut assists = assists.into_iter(); @@ -587,7 +587,9 @@ pub fn test_some_range(a: int) -> bool { is_snippet: true, }, ), - trigger_signature_help: false, + command: Some( + Rename, + ), } "#]] .assert_debug_eq(&extract_into_variable_assist); @@ -603,14 +605,14 @@ pub fn test_some_range(a: int) -> bool { group: None, target: 59..60, source_change: None, - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&extract_into_function_assist); } { - let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange); + let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange.into()); assert_eq!(2, assists.len()); let mut assists = assists.into_iter(); @@ -666,7 +668,9 @@ pub fn test_some_range(a: int) -> bool { is_snippet: true, }, ), - trigger_signature_help: false, + command: Some( + Rename, + ), } "#]] .assert_debug_eq(&extract_into_variable_assist); @@ -715,7 +719,7 @@ pub fn test_some_range(a: int) -> bool { is_snippet: true, }, ), - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&extract_into_function_assist); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index ba6ef1921ac..c67693ea2bb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -88,8 +88,8 @@ pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool { let path = attr.path(); (|| { Some( - path.segments().first()?.as_text()?.starts_with("test") - || path.segments().last()?.as_text()?.ends_with("test"), + path.segments().first()?.as_str().starts_with("test") + || path.segments().last()?.as_str().ends_with("test"), ) })() .unwrap_or_default() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs index f2a097afc86..fc43d243b36 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs @@ -184,7 +184,7 @@ fn normalize(name: &str) -> Option<String> { fn is_valid_name(name: &str) -> bool { matches!( - ide_db::syntax_helpers::LexedStr::single_token(name), + ide_db::syntax_helpers::LexedStr::single_token(syntax::Edition::CURRENT_FIXME, name), Some((syntax::SyntaxKind::IDENT, _error)) ) } @@ -319,7 +319,7 @@ fn from_field_name(expr: &ast::Expr) -> Option<String> { #[cfg(test)] mod tests { - use ide_db::base_db::FileRange; + use hir::FileRange; use test_fixture::WithFixture; use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 995a4443edf..414b096ad47 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -24,9 +24,9 @@ pub(crate) mod vis; use std::iter; -use hir::{known, HasAttrs, ImportPathConfig, ScopeDef, Variant}; +use hir::{sym, HasAttrs, ImportPathConfig, Name, ScopeDef, Variant}; use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind}; -use syntax::{ast, SmolStr}; +use syntax::{ast, SmolStr, ToSmolStr}; use crate::{ context::{ @@ -541,13 +541,21 @@ impl Completions { } pub(crate) fn add_lifetime(&mut self, ctx: &CompletionContext<'_>, name: hir::Name) { - CompletionItem::new(SymbolKind::LifetimeParam, ctx.source_range(), name.to_smol_str()) - .add_to(self, ctx.db) + CompletionItem::new( + SymbolKind::LifetimeParam, + ctx.source_range(), + name.display_no_db().to_smolstr(), + ) + .add_to(self, ctx.db) } pub(crate) fn add_label(&mut self, ctx: &CompletionContext<'_>, name: hir::Name) { - CompletionItem::new(SymbolKind::Label, ctx.source_range(), name.to_smol_str()) - .add_to(self, ctx.db) + CompletionItem::new( + SymbolKind::Label, + ctx.source_range(), + name.display_no_db().to_smolstr(), + ) + .add_to(self, ctx.db) } pub(crate) fn add_variant_pat( @@ -618,7 +626,8 @@ fn enum_variants_with_paths( let mut process_variant = |variant: Variant| { let self_path = hir::ModPath::from_segments( hir::PathKind::Plain, - iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))), + iter::once(Name::new_symbol_root(sym::Self_.clone())) + .chain(iter::once(variant.name(ctx.db))), ); cb(acc, ctx, variant, self_path); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs index 87a286778e6..6e7d50ede06 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs @@ -39,6 +39,7 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { "target_vendor" => KNOWN_VENDOR.iter().copied().for_each(add_completion), "target_endian" => ["little", "big"].into_iter().for_each(add_completion), name => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| { + let s = s.as_str(); let insert_text = format!(r#""{s}""#); let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); item.insert_text(insert_text); @@ -47,6 +48,7 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { }), }, None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().unique().for_each(|s| { + let s = s.as_str(); let item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); acc.add(item.build(ctx.db)); }), diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs index 90dac1902a4..0127a428248 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs @@ -2,7 +2,7 @@ use hir::ScopeDef; use ide_db::{documentation::HasDocs, SymbolKind}; use itertools::Itertools; -use syntax::SmolStr; +use syntax::{SmolStr, ToSmolStr}; use crate::{ context::{CompletionContext, ExistingDerives, PathCompletionCtx, Qualified}, @@ -62,7 +62,7 @@ pub(crate) fn complete_derive_path( _ => return acc.add_macro(ctx, path_ctx, mac, name), }; - let name_ = name.to_smol_str(); + let name_ = name.display_no_db().to_smolstr(); let find = DEFAULT_DERIVE_DEPENDENCIES .iter() .find(|derive_completion| derive_completion.label == name_); @@ -75,7 +75,7 @@ pub(crate) fn complete_derive_path( !existing_derives .iter() .map(|it| it.name(ctx.db)) - .any(|it| it.to_smol_str() == dependency) + .any(|it| it.display_no_db().to_smolstr() == dependency) }, )); let lookup = components.join(", "); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs index f45f9cba258..7e3a62405a7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs @@ -18,7 +18,7 @@ pub(super) fn complete_macro_use( for mod_def in krate.root_module().declarations(ctx.db) { if let ModuleDef::Macro(mac) = mod_def { let mac_name = mac.name(ctx.db); - let Some(mac_name) = mac_name.as_str() else { continue }; + let mac_name = mac_name.as_str(); let existing_import = existing_imports .iter() diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index a4f092cc498..a07daf4c4e4 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -1,5 +1,6 @@ //! Completes references after dot (fields and method calls). +use hir::{sym, Name}; use ide_db::FxHashSet; use syntax::SmolStr; @@ -90,12 +91,14 @@ pub(crate) fn complete_undotted_self( in_breakable: expr_ctx.in_breakable, }, }, - Some(hir::known::SELF_PARAM), + Some(Name::new_symbol_root(sym::self_.clone())), field, &ty, ) }, - |acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty), + |acc, field, ty| { + acc.add_tuple_field(ctx, Some(Name::new_symbol_root(sym::self_.clone())), field, &ty) + }, true, false, ); @@ -112,7 +115,7 @@ pub(crate) fn complete_undotted_self( }, }, func, - Some(hir::known::SELF_PARAM), + Some(Name::new_symbol_root(sym::self_.clone())), None, ) }); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs index 01f9368aa4e..71ff6b5aea3 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs @@ -1,6 +1,6 @@ //! Completion of names from the current scope in expression position. -use hir::{ImportPathConfig, ScopeDef}; +use hir::{sym, ImportPathConfig, Name, ScopeDef}; use syntax::ast; use crate::{ @@ -190,7 +190,7 @@ pub(crate) fn complete_expr_path( path_ctx, strukt, None, - Some(hir::known::SELF_TYPE), + Some(Name::new_symbol_root(sym::Self_.clone())), ); } } @@ -210,7 +210,12 @@ pub(crate) fn complete_expr_path( acc.add_union_literal(ctx, un, path, None); if complete_self { - acc.add_union_literal(ctx, un, None, Some(hir::known::SELF_TYPE)); + acc.add_union_literal( + ctx, + un, + None, + Some(Name::new_symbol_root(sym::Self_.clone())), + ); } } hir::Adt::Enum(e) => { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs index b67d82c20d8..2427f4e49f2 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs @@ -2,6 +2,7 @@ use hir::Name; use ide_db::{documentation::HasDocs, SymbolKind}; +use syntax::ToSmolStr; use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; @@ -18,7 +19,7 @@ pub(crate) fn complete_extern_crate(acc: &mut Completions, ctx: &CompletionConte let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Module), ctx.source_range(), - name.to_smol_str(), + name.display_no_db().to_smolstr(), ); item.set_documentation(module.docs(ctx.db)); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index 3a8b9c0cb97..e803072fa8f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -5,7 +5,7 @@ use ide_db::imports::{ insert_use::ImportScope, }; use itertools::Itertools; -use syntax::{ast, AstNode, SyntaxNode, T}; +use syntax::{ast, AstNode, SyntaxNode, ToSmolStr, T}; use crate::{ context::{ @@ -424,7 +424,7 @@ fn compute_fuzzy_completion_order_key( cov_mark::hit!(certain_fuzzy_order_test); let import_name = match proposed_mod_path.segments().last() { // FIXME: nasty alloc, this is a hot path! - Some(name) => name.to_smol_str().to_ascii_lowercase(), + Some(name) => name.display_no_db().to_smolstr().to_ascii_lowercase(), None => return usize::MAX, }; match import_name.match_indices(user_input_lowercased).next() { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs index 5512ac21534..23affc36592 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs @@ -3,7 +3,7 @@ use hir::{ModuleDef, ScopeDef}; use ide_db::{syntax_helpers::format_string::is_format_string, SymbolKind}; use itertools::Itertools; -use syntax::{ast, AstToken, TextRange, TextSize}; +use syntax::{ast, AstToken, TextRange, TextSize, ToSmolStr}; use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, Completions}; @@ -31,9 +31,13 @@ pub(crate) fn format_string( }; let source_range = TextRange::new(brace_offset, cursor); - ctx.locals.iter().for_each(|(name, _)| { - CompletionItem::new(CompletionItemKind::Binding, source_range, name.to_smol_str()) - .add_to(acc, ctx.db); + ctx.locals.iter().sorted_by_key(|&(k, _)| k.clone()).for_each(|(name, _)| { + CompletionItem::new( + CompletionItemKind::Binding, + source_range, + name.display_no_db().to_smolstr(), + ) + .add_to(acc, ctx.db); }); ctx.scope.process_all_names(&mut |name, scope| { if let ScopeDef::ModuleDef(module_def) = scope { @@ -46,7 +50,7 @@ pub(crate) fn format_string( CompletionItem::new( CompletionItemKind::SymbolKind(symbol_kind), source_range, - name.to_smol_str(), + name.display_no_db().to_smolstr(), ) .add_to(acc, ctx.db); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index c48672e80ac..2fd7805e60d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -38,7 +38,7 @@ use ide_db::{ }; use syntax::{ ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds}, - format_smolstr, AstNode, SmolStr, SyntaxElement, SyntaxKind, TextRange, T, + format_smolstr, AstNode, SmolStr, SyntaxElement, SyntaxKind, TextRange, ToSmolStr, T, }; use text_edit::TextEdit; @@ -180,8 +180,10 @@ fn add_function_impl( ) { let fn_name = func.name(ctx.db); + let is_async = func.is_async(ctx.db); let label = format_smolstr!( - "fn {}({})", + "{}fn {}({})", + if is_async { "async " } else { "" }, fn_name.display(ctx.db), if func.assoc_fn_params(ctx.db).is_empty() { "" } else { ".." } ); @@ -193,9 +195,13 @@ fn add_function_impl( }); let mut item = CompletionItem::new(completion_kind, replacement_range, label); - item.lookup_by(format!("fn {}", fn_name.display(ctx.db))) - .set_documentation(func.docs(ctx.db)) - .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); + item.lookup_by(format!( + "{}fn {}", + if is_async { "async " } else { "" }, + fn_name.display(ctx.db) + )) + .set_documentation(func.docs(ctx.db)) + .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); if let Some(source) = ctx.sema.source(func) { let assoc_item = ast::AssocItem::Fn(source.value); @@ -252,7 +258,7 @@ fn add_type_alias_impl( type_alias: hir::TypeAlias, impl_def: hir::Impl, ) { - let alias_name = type_alias.name(ctx.db).unescaped().to_smol_str(); + let alias_name = type_alias.name(ctx.db).unescaped().display(ctx.db).to_smolstr(); let label = format_smolstr!("type {alias_name} ="); @@ -314,7 +320,7 @@ fn add_const_impl( const_: hir::Const, impl_def: hir::Impl, ) { - let const_name = const_.name(ctx.db).map(|n| n.to_smol_str()); + let const_name = const_.name(ctx.db).map(|n| n.display_no_db().to_smolstr()); if let Some(const_name) = const_name { if let Some(source) = ctx.sema.source(const_) { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs index 2c6cbf6146a..03fe93c563f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs @@ -7,8 +7,8 @@ //! there is no value in lifting these out into the outline module test since they will either not //! show up for normal completions, or they won't show completions other than lifetimes depending //! on the fixture input. -use hir::{known, ScopeDef}; -use syntax::{ast, TokenText}; +use hir::{sym, Name, ScopeDef}; +use syntax::{ast, ToSmolStr, TokenText}; use crate::{ completions::Completions, @@ -41,13 +41,13 @@ pub(crate) fn complete_lifetime( if matches!( res, ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) - if param_lifetime != Some(&*name.to_smol_str()) + if param_lifetime != Some(&*name.display_no_db().to_smolstr()) ) { acc.add_lifetime(ctx, name); } }); if param_lifetime.is_none() { - acc.add_lifetime(ctx, known::STATIC_LIFETIME); + acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_static.clone())); } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index f307ba9eb33..713968c1caf 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -7,7 +7,8 @@ use ide_db::{ base_db::{SourceDatabaseExt, VfsPath}, FxHashSet, RootDatabase, SymbolKind, }; -use syntax::{ast, AstNode, SyntaxKind}; +use stdx::IsNoneOr; +use syntax::{ast, AstNode, SyntaxKind, ToSmolStr}; use crate::{context::CompletionContext, CompletionItem, Completions}; @@ -43,11 +44,11 @@ pub(crate) fn complete_mod( let module_definition_file = current_module.definition_source_file_id(ctx.db).original_file(ctx.db); - let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file)); + let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file.file_id())); let directory_to_look_for_submodules = directory_to_look_for_submodules( current_module, ctx.db, - source_root.path_for_file(&module_definition_file)?, + source_root.path_for_file(&module_definition_file.file_id())?, )?; let existing_mod_declarations = current_module @@ -63,9 +64,9 @@ pub(crate) fn complete_mod( source_root .iter() - .filter(|submodule_candidate_file| submodule_candidate_file != &module_definition_file) - .filter(|submodule_candidate_file| { - Some(submodule_candidate_file) != module_declaration_file.as_ref() + .filter(|&submodule_candidate_file| submodule_candidate_file != module_definition_file) + .filter(|&submodule_candidate_file| { + IsNoneOr::is_none_or(module_declaration_file, |it| it != submodule_candidate_file) }) .filter_map(|submodule_file| { let submodule_path = source_root.path_for_file(&submodule_file)?; @@ -139,7 +140,7 @@ fn directory_to_look_for_submodules( module_chain_to_containing_module_file(module, db) .into_iter() .filter_map(|module| module.name(db)) - .try_fold(base_directory, |path, name| path.join(&name.to_smol_str())) + .try_fold(base_directory, |path, name| path.join(&name.display_no_db().to_smolstr())) } fn module_chain_to_containing_module_file( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs index 27e9d1d6cfe..8e5b55360dc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs @@ -48,16 +48,15 @@ pub(crate) fn complete_use_path( let unknown_is_current = |name: &hir::Name| { matches!( name_ref, - Some(name_ref) if name_ref.syntax().text() == name.to_smol_str().as_str() + Some(name_ref) if name_ref.syntax().text() == name.as_str() ) }; for (name, def) in module_scope { if !ctx.check_stability(def.attrs(ctx.db).as_deref()) { continue; } - let is_name_already_imported = name - .as_text() - .map_or(false, |text| already_imported_names.contains(text.as_str())); + let is_name_already_imported = + already_imported_names.contains(name.as_str()); let add_resolution = match def { ScopeDef::Unknown if unknown_is_current(&name) => { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 5782a4423a6..952d9217c71 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -10,14 +10,12 @@ use hir::{ HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo, }; use ide_db::{ - base_db::{FilePosition, SourceDatabase}, - famous_defs::FamousDefs, - helpers::is_editable_crate, + base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition, FxHashMap, FxHashSet, RootDatabase, }; use syntax::{ ast::{self, AttrKind, NameOrNameRef}, - AstNode, Edition, SmolStr, + AstNode, SmolStr, SyntaxKind::{self, *}, SyntaxToken, TextRange, TextSize, T, }; @@ -519,7 +517,7 @@ impl CompletionContext<'_> { I: hir::HasAttrs + Copy, { let attrs = item.attrs(self.db); - attrs.doc_aliases().collect() + attrs.doc_aliases().map(|it| it.as_str().into()).collect() } /// Check if an item is `#[doc(hidden)]`. @@ -543,7 +541,7 @@ impl CompletionContext<'_> { /// Whether the given trait is an operator trait or not. pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool { match trait_.attrs(self.db).lang() { - Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang), + Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()), None => false, } } @@ -643,7 +641,7 @@ impl CompletionContext<'_> { pub(crate) fn doc_aliases_in_scope(&self, scope_def: ScopeDef) -> Vec<SmolStr> { if let Some(attrs) = scope_def.attrs(self.db) { - attrs.doc_aliases().collect() + attrs.doc_aliases().map(|it| it.as_str().into()).collect() } else { vec![] } @@ -660,6 +658,7 @@ impl<'a> CompletionContext<'a> { let _p = tracing::info_span!("CompletionContext::new").entered(); let sema = Semantics::new(db); + let file_id = sema.attach_first_edition(file_id)?; let original_file = sema.parse(file_id); // Insert a fake ident to get a valid parse tree. We will use this file @@ -668,8 +667,7 @@ impl<'a> CompletionContext<'a> { let file_with_fake_ident = { let parse = db.parse(file_id); let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned()); - // FIXME: Edition - parse.reparse(&edit, Edition::CURRENT).tree() + parse.reparse(&edit, file_id.edition()).tree() }; // always pick the token to the immediate left of the cursor, as that is what we are actually diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index a14fe24fa75..1e972b9b4ce 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -823,13 +823,13 @@ fn classify_name_ref( for item in trait_.items_with_supertraits(sema.db) { match item { hir::AssocItem::TypeAlias(assoc_ty) => { - if assoc_ty.name(sema.db).as_str()? == arg_name { + if assoc_ty.name(sema.db).as_str() == arg_name { override_location = Some(TypeLocation::AssocTypeEq); return None; } }, hir::AssocItem::Const(const_) => { - if const_.name(sema.db)?.as_str()? == arg_name { + if const_.name(sema.db)?.as_str() == arg_name { override_location = Some(TypeLocation::AssocConstEq); return None; } @@ -867,7 +867,7 @@ fn classify_name_ref( let trait_items = trait_.items_with_supertraits(sema.db); let assoc_ty = trait_items.iter().find_map(|item| match item { hir::AssocItem::TypeAlias(assoc_ty) => { - (assoc_ty.name(sema.db).as_str()? == arg_name) + (assoc_ty.name(sema.db).as_str() == arg_name) .then_some(assoc_ty) }, _ => None, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index debfefc4801..3657a7d969b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -664,7 +664,7 @@ mod tests { /// If provided vec![vec![a], vec![b, c], vec![d]], then this will assert: /// a.score < b.score == c.score < d.score fn check_relevance_score_ordered(expected_relevance_order: Vec<Vec<CompletionRelevance>>) { - let expected = format!("{:#?}", &expected_relevance_order); + let expected = format!("{expected_relevance_order:#?}"); let actual_relevance_order = expected_relevance_order .into_iter() @@ -685,7 +685,7 @@ mod tests { ) .1; - let actual = format!("{:#?}", &actual_relevance_order); + let actual = format!("{actual_relevance_order:#?}"); assert_eq_text!(&expected, &actual); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 7d9c2c7c60d..424f94457e3 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -12,13 +12,12 @@ mod tests; use hir::ImportPathConfig; use ide_db::{ - base_db::FilePosition, helpers::mod_path_to_ast, imports::{ import_assets::NameToImport, insert_use::{self, ImportScope}, }, - items_locator, RootDatabase, + items_locator, FilePosition, RootDatabase, }; use syntax::algo; use text_edit::TextEdit; @@ -239,7 +238,7 @@ pub fn resolve_completion_edits( let _p = tracing::info_span!("resolve_completion_edits").entered(); let sema = hir::Semantics::new(db); - let original_file = sema.parse(file_id); + let original_file = sema.parse(sema.attach_first_edition(file_id)?); let original_token = syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?; let position_for_import = &original_token.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index fe9e2e5268a..abcff62341b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -10,14 +10,14 @@ pub(crate) mod type_alias; pub(crate) mod union_literal; pub(crate) mod variant; -use hir::{AsAssocItem, HasAttrs, HirDisplay, ImportPathConfig, ModuleDef, ScopeDef, Type}; +use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ImportPathConfig, ModuleDef, ScopeDef, Type}; use ide_db::{ documentation::{Documentation, HasDocs}, helpers::item_name, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, }; -use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; +use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange, ToSmolStr}; use text_edit::TextEdit; use crate::{ @@ -95,7 +95,7 @@ impl<'a> RenderContext<'a> { fn is_deprecated(&self, def: impl HasAttrs) -> bool { let attrs = def.attrs(self.db()); - attrs.by_key("deprecated").exists() + attrs.by_key(&sym::deprecated).exists() } fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool { @@ -133,7 +133,8 @@ pub(crate) fn render_field( let db = ctx.db(); let is_deprecated = ctx.is_deprecated(field); let name = field.name(db); - let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); + let (name, escaped_name) = + (name.unescaped().display(db).to_smolstr(), name.display_no_db().to_smolstr()); let mut item = CompletionItem::new( SymbolKind::Field, ctx.source_range(), @@ -280,8 +281,7 @@ pub(crate) fn render_expr( let mut snippet_formatter = |ty: &hir::Type| { let arg_name = ty .as_adt() - .and_then(|adt| adt.name(ctx.db).as_text()) - .map(|s| stdx::to_lower_snake_case(s.as_str())) + .map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str())) .unwrap_or_else(|| String::from("_")); let res = format!("${{{i}:{arg_name}}}"); i += 1; @@ -290,8 +290,7 @@ pub(crate) fn render_expr( let mut label_formatter = |ty: &hir::Type| { ty.as_adt() - .and_then(|adt| adt.name(ctx.db).as_text()) - .map(|s| stdx::to_lower_snake_case(s.as_str())) + .map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str())) .unwrap_or_else(|| String::from("...")) }; @@ -401,10 +400,10 @@ fn render_resolution_path( let config = completion.config; let requires_import = import_to_add.is_some(); - let name = local_name.to_smol_str(); + let name = local_name.display_no_db().to_smolstr(); let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution); if local_name.is_escaped() { - item.insert_text(local_name.to_smol_str()); + item.insert_text(local_name.display_no_db().to_smolstr()); } // Add `<>` for generic types let type_path_no_ty_args = matches!( @@ -486,8 +485,11 @@ fn render_resolution_simple_( let ctx = ctx.import_to_add(import_to_add); let kind = res_to_kind(resolution); - let mut item = - CompletionItem::new(kind, ctx.source_range(), local_name.unescaped().to_smol_str()); + let mut item = CompletionItem::new( + kind, + ctx.source_range(), + local_name.unescaped().display(db).to_smolstr(), + ); item.set_relevance(ctx.completion_relevance()) .set_documentation(scope_def_docs(db, resolution)) .set_deprecated(scope_def_is_deprecated(&ctx, resolution)); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs index a6a1c79e668..3bfec0de6bc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs @@ -2,6 +2,7 @@ use hir::{AsAssocItem, HirDisplay}; use ide_db::SymbolKind; +use syntax::ToSmolStr; use crate::{item::CompletionItem, render::RenderContext}; @@ -13,7 +14,8 @@ pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> { let db = ctx.db(); let name = const_.name(db)?; - let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); + let (name, escaped_name) = + (name.unescaped().display(db).to_smolstr(), name.display(db).to_smolstr()); let detail = const_.display(db).to_string(); let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name); @@ -24,7 +26,7 @@ fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> if let Some(actm) = const_.as_assoc_item(db) { if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).to_smol_str()); + item.trait_name(trt.name(db).display_no_db().to_smolstr()); } } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 48c9d624f63..05b2d0ae386 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -4,7 +4,7 @@ use hir::{db::HirDatabase, AsAssocItem, HirDisplay}; use ide_db::{SnippetCap, SymbolKind}; use itertools::Itertools; use stdx::{format_to, to_lower_snake_case}; -use syntax::{format_smolstr, AstNode, SmolStr}; +use syntax::{format_smolstr, AstNode, SmolStr, ToSmolStr}; use crate::{ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind}, @@ -64,7 +64,7 @@ fn render( ), format_smolstr!("{}.{}", receiver.display(ctx.db()), name.display(ctx.db())), ), - _ => (name.unescaped().to_smol_str(), name.to_smol_str()), + _ => (name.unescaped().display(db).to_smolstr(), name.display(db).to_smolstr()), }; let has_self_param = func.self_param(db).is_some(); let mut item = CompletionItem::new( @@ -148,7 +148,7 @@ fn render( item.set_documentation(ctx.docs(func)) .set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func)) .detail(detail) - .lookup_by(name.unescaped().to_smol_str()); + .lookup_by(name.unescaped().display(db).to_smolstr()); if let Some((cap, (self_param, params))) = complete_call_parens { add_call_parens(&mut item, completion, cap, call, escaped_call, self_param, params); @@ -161,7 +161,7 @@ fn render( None => { if let Some(actm) = assoc_item { if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).to_smol_str()); + item.trait_name(trt.name(db).display_no_db().to_smolstr()); } } } @@ -188,7 +188,7 @@ fn compute_return_type_match( CompletionRelevanceReturnType::Constructor } else if ret_type .as_adt() - .and_then(|adt| adt.name(db).as_str().map(|name| name.ends_with("Builder"))) + .map(|adt| adt.name(db).as_str().ends_with("Builder")) .unwrap_or(false) { // fn([..]) -> [..]Builder @@ -219,7 +219,7 @@ pub(super) fn add_call_parens<'b>( params.iter().enumerate().format_with(", ", |(index, param), f| { match param.name(ctx.db) { Some(n) => { - let smol_str = n.to_smol_str(); + let smol_str = n.display_no_db().to_smolstr(); let text = smol_str.as_str().trim_start_matches('_'); let ref_ = ref_of_param(ctx, text, param.ty()); f(&format_args!("${{{}:{ref_}{text}}}", index + offset)) @@ -227,11 +227,7 @@ pub(super) fn add_call_parens<'b>( None => { let name = match param.ty().as_adt() { None => "_".to_owned(), - Some(adt) => adt - .name(ctx.db) - .as_text() - .map(|s| to_lower_snake_case(s.as_str())) - .unwrap_or_else(|| "_".to_owned()), + Some(adt) => to_lower_snake_case(adt.name(ctx.db).as_str()), }; f(&format_args!("${{{}:{name}}}", index + offset)) } @@ -263,8 +259,8 @@ pub(super) fn add_call_parens<'b>( fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'static str { if let Some(derefed_ty) = ty.remove_ref() { - for (name, local) in ctx.locals.iter() { - if name.as_text().as_deref() == Some(arg) { + for (name, local) in ctx.locals.iter().sorted_by_key(|&(k, _)| k.clone()) { + if name.as_str() == arg { return if local.ty(ctx.db) == derefed_ty { if ty.is_mutable_reference() { "&mut " diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs index a6c8c0e853c..de715bcbfaf 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs @@ -2,7 +2,7 @@ use hir::HirDisplay; use ide_db::{documentation::Documentation, SymbolKind}; -use syntax::{format_smolstr, SmolStr}; +use syntax::{format_smolstr, SmolStr, ToSmolStr}; use crate::{ context::{PathCompletionCtx, PathKind, PatternContext}, @@ -46,7 +46,8 @@ fn render( ctx.source_range() }; - let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); + let (name, escaped_name) = + (name.unescaped().display(ctx.db()).to_smolstr(), name.display(ctx.db()).to_smolstr()); let docs = ctx.docs(macro_); let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default(); let is_fn_like = macro_.is_fn_like(completion.db); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs index 942670be2a3..598b8762b68 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs @@ -3,7 +3,7 @@ use hir::{db::HirDatabase, Name, StructKind}; use ide_db::{documentation::HasDocs, SnippetCap}; use itertools::Itertools; -use syntax::SmolStr; +use syntax::{SmolStr, ToSmolStr}; use crate::{ context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext}, @@ -31,7 +31,8 @@ pub(crate) fn render_struct_pat( } let name = local_name.unwrap_or_else(|| strukt.name(ctx.db())); - let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); + let (name, escaped_name) = + (name.unescaped().display(ctx.db()).to_smolstr(), name.display(ctx.db()).to_smolstr()); let kind = strukt.kind(ctx.db()); let label = format_literal_label(name.as_str(), kind, ctx.snippet_cap()); let lookup = format_literal_lookup(name.as_str(), kind); @@ -63,7 +64,11 @@ pub(crate) fn render_variant_pat( ), None => { let name = local_name.unwrap_or_else(|| variant.name(ctx.db())); - (name.unescaped().to_smol_str(), name.to_smol_str()) + let it = ( + name.unescaped().display(ctx.db()).to_smolstr(), + name.display(ctx.db()).to_smolstr(), + ); + it } }; @@ -184,7 +189,7 @@ fn render_record_as_pat( None => { format!( "{name} {{ {}{} }}", - fields.map(|field| field.name(db).to_smol_str()).format(", "), + fields.map(|field| field.name(db).display_no_db().to_smolstr()).format(", "), if fields_omitted { ", .." } else { "" }, name = name ) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs index 47254e6a184..b81caf24220 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs @@ -2,7 +2,7 @@ use hir::{AsAssocItem, HirDisplay}; use ide_db::SymbolKind; -use syntax::SmolStr; +use syntax::{SmolStr, ToSmolStr}; use crate::{item::CompletionItem, render::RenderContext}; @@ -32,11 +32,11 @@ fn render( let name = type_alias.name(db); let (name, escaped_name) = if with_eq { ( - SmolStr::from_iter([&name.unescaped().to_smol_str(), " = "]), - SmolStr::from_iter([&name.to_smol_str(), " = "]), + SmolStr::from_iter([&name.unescaped().display(db).to_smolstr(), " = "]), + SmolStr::from_iter([&name.display_no_db().to_smolstr(), " = "]), ) } else { - (name.unescaped().to_smol_str(), name.to_smol_str()) + (name.unescaped().display(db).to_smolstr(), name.display_no_db().to_smolstr()) }; let detail = type_alias.display(db).to_string(); @@ -48,7 +48,7 @@ fn render( if let Some(actm) = type_alias.as_assoc_item(db) { if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).to_smol_str()); + item.trait_name(trt.name(db).display_no_db().to_smolstr()); } } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs index 93e943dbed9..ca7593c122e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs @@ -3,6 +3,7 @@ use hir::{HirDisplay, Name, StructKind}; use ide_db::SymbolKind; use itertools::Itertools; +use syntax::ToSmolStr; use crate::{ render::{ @@ -26,8 +27,12 @@ pub(crate) fn render_union_literal( (name.unescaped().display(ctx.db()).to_string(), name.display(ctx.db()).to_string()) } }; - let label = format_literal_label(&name.to_smol_str(), StructKind::Record, ctx.snippet_cap()); - let lookup = format_literal_lookup(&name.to_smol_str(), StructKind::Record); + let label = format_literal_label( + &name.display_no_db().to_smolstr(), + StructKind::Record, + ctx.snippet_cap(), + ); + let lookup = format_literal_lookup(&name.display_no_db().to_smolstr(), StructKind::Record); let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Union), ctx.source_range(), @@ -47,7 +52,10 @@ pub(crate) fn render_union_literal( format!( "{} {{ ${{1|{}|}}: ${{2:()}} }}$0", escaped_qualified_name, - fields.iter().map(|field| field.name(ctx.db()).to_smol_str()).format(",") + fields + .iter() + .map(|field| field.name(ctx.db()).display_no_db().to_smolstr()) + .format(",") ) } else { format!( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs index 28238de4559..bc2df9e39f3 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs @@ -1,7 +1,7 @@ //! Code common to structs, unions, and enum variants. use crate::context::CompletionContext; -use hir::{db::HirDatabase, HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind}; +use hir::{db::HirDatabase, sym, HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind}; use ide_db::SnippetCap; use itertools::Itertools; use syntax::SmolStr; @@ -86,7 +86,7 @@ pub(crate) fn visible_fields( .copied() .collect::<Vec<_>>(); let has_invisible_field = n_fields - fields.len() > 0; - let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key("non_exhaustive").exists() + let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(&sym::non_exhaustive).exists() && item.krate(ctx.db) != module.krate(); let fields_omitted = has_invisible_field || is_foreign_non_exhaustive; Some((fields, fields_omitted)) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs index 5885b74e09d..1eb8c574bd1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs @@ -206,10 +206,11 @@ fn validate_snippet( ) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> { let mut imports = Vec::with_capacity(requires.len()); for path in requires.iter() { - let use_path = ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT) - .syntax_node() - .descendants() - .find_map(ast::Path::cast)?; + let use_path = + ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT_FIXME) + .syntax_node() + .descendants() + .find_map(ast::Path::cast)?; if use_path.syntax().text() != path.as_str() { return None; } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index fcac6c7ce72..f6274cf5376 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -26,9 +26,9 @@ mod visibility; use expect_test::Expect; use hir::PrefixKind; use ide_db::{ - base_db::{FileLoader, FilePosition}, + base_db::FileLoader, imports::insert_use::{ImportGranularity, InsertUseConfig}, - RootDatabase, SnippetCap, + FilePosition, RootDatabase, SnippetCap, }; use itertools::Itertools; use stdx::{format_to, trim_indent}; @@ -131,7 +131,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (database, FilePosition { file_id, offset }) + (database, FilePosition { file_id: file_id.file_id(), offset }) } pub(crate) fn do_completion(code: &str, kind: CompletionItemKind) -> Vec<CompletionItem> { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 7d9c1ed98ac..158dbaf1b1d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -767,8 +767,8 @@ fn main() { } "#, expect![[r#" - fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED + fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED me random_method(…) (use dep::test_mod::TestTrait) fn(&self) DEPRECATED "#]], ); @@ -1618,3 +1618,18 @@ pub struct FooStruct; "#]], ); } + +#[test] +fn primitive_mod() { + check( + r#" +//- minicore: str +fn main() { + str::from$0 +} +"#, + expect![[r#" + fn from_utf8_unchecked(…) (use core::str) const unsafe fn(&[u8]) -> &str + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs index f138938b02b..8aad7bfc3ad 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs @@ -299,6 +299,7 @@ trait Test { const CONST1: (); fn function0(); fn function1(); + async fn function2(); } impl Test for () { @@ -310,8 +311,9 @@ impl Test for () { "#, expect![[r#" ct const CONST1: () = + fn async fn function2() fn fn function1() - ma makro!(…) macro_rules! makro + ma makro!(…) macro_rules! makro md module ta type Type1 = kw crate:: diff --git a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs index 7a7328f312d..1c40685ebb1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs @@ -29,7 +29,16 @@ pub struct Assist { /// cumbersome, especially if you want to embed an assist into another data /// structure, such as a diagnostic. pub source_change: Option<SourceChange>, - pub trigger_signature_help: bool, + /// The command to execute after the assist is applied. + pub command: Option<Command>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Command { + /// Show the parameter hints popup. + TriggerParameterHints, + /// Rename the just inserted item. + Rename, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index a75a708d956..991bef344a3 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -17,7 +17,7 @@ use hir::{ use stdx::{format_to, impl_from}; use syntax::{ ast::{self, AstNode}, - match_ast, SyntaxKind, SyntaxNode, SyntaxToken, + match_ast, SyntaxKind, SyntaxNode, SyntaxToken, ToSmolStr, }; use crate::documentation::{Documentation, HasDocs}; @@ -144,7 +144,7 @@ impl Definition { Definition::Local(it) => it.name(db), Definition::GenericParam(it) => it.name(db), Definition::Label(it) => it.name(db), - Definition::BuiltinLifetime(StaticLifetime) => hir::known::STATIC_LIFETIME, + Definition::BuiltinLifetime(it) => it.name(), Definition::BuiltinAttr(_) => return None, // FIXME Definition::ToolModule(_) => return None, // FIXME Definition::DeriveHelper(it) => it.name(db), @@ -192,13 +192,13 @@ impl Definition { let AttributeTemplate { word, list, name_value_str } = it.template(db)?; let mut docs = "Valid forms are:".to_owned(); if word { - format_to!(docs, "\n - #\\[{}]", name); + format_to!(docs, "\n - #\\[{}]", name.display(db)); } if let Some(list) = list { - format_to!(docs, "\n - #\\[{}({})]", name, list); + format_to!(docs, "\n - #\\[{}({})]", name.display(db), list); } if let Some(name_value_str) = name_value_str { - format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str); + format_to!(docs, "\n - #\\[{} = {}]", name.display(db), name_value_str); } Some(Documentation::new(docs.replace('*', "\\*"))) } @@ -256,8 +256,8 @@ impl Definition { Definition::GenericParam(it) => it.display(db).to_string(), Definition::Label(it) => it.name(db).display(db).to_string(), Definition::ExternCrateDecl(it) => it.display(db).to_string(), - Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)), - Definition::ToolModule(it) => it.name(db).to_string(), + Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db).display(db)), + Definition::ToolModule(it) => it.name(db).display(db).to_string(), Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)), } } @@ -670,7 +670,7 @@ impl NameRefClass { hir::AssocItem::TypeAlias(it) => Some(it), _ => None, }) - .find(|alias| alias.name(sema.db).to_smol_str() == name_ref.text().as_str()) + .find(|alias| alias.name(sema.db).display_no_db().to_smolstr() == name_ref.text().as_str()) { return Some(NameRefClass::Definition(Definition::TypeAlias(ty))); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs index 58e77b95c32..5e443badf9e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs @@ -2,7 +2,7 @@ use either::Either; use hir::{ db::{DefDatabase, HirDatabase}, - resolve_doc_path_on, AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile, + resolve_doc_path_on, sym, AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile, }; use itertools::Itertools; use syntax::{ @@ -92,7 +92,7 @@ pub fn docs_with_rangemap( attrs: &AttrsWithOwner, ) -> Option<(Documentation, DocsRangeMap)> { let docs = attrs - .by_key("doc") + .by_key(&sym::doc) .attrs() .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id))); let indent = doc_indent(attrs); @@ -134,7 +134,7 @@ pub fn docs_with_rangemap( } pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> { - let docs = attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value_unescape()); + let docs = attrs.by_key(&sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()); let indent = doc_indent(attrs); let mut buf = String::new(); for doc in docs { @@ -269,12 +269,13 @@ fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> { } fn doc_indent(attrs: &hir::Attrs) -> usize { - attrs - .by_key("doc") - .attrs() - .filter_map(|attr| attr.string_value()) // no need to use unescape version here - .flat_map(|s| s.lines()) - .filter_map(|line| line.chars().position(|c| !c.is_whitespace())) - .min() - .unwrap_or(0) + let mut min = !0; + for val in attrs.by_key(&sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) { + if let Some(m) = + val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min() + { + min = min.min(m); + } + } + min } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs index 51ac0b71911..1a16a567f36 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs @@ -2,6 +2,7 @@ use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase}; use hir::{Crate, Enum, Function, Macro, Module, ScopeDef, Semantics, Trait}; +use syntax::ToSmolStr; use crate::RootDatabase; @@ -198,15 +199,18 @@ impl FamousDefs<'_, '_> { for segment in path { module = module.children(db).find_map(|child| { let name = child.name(db)?; - if name.to_smol_str() == segment { + if name.display_no_db().to_smolstr() == segment { Some(child) } else { None } })?; } - let def = - module.scope(db, None).into_iter().find(|(name, _def)| name.to_smol_str() == trait_)?.1; + let def = module + .scope(db, None) + .into_iter() + .find(|(name, _def)| name.display_no_db().to_smolstr() == trait_)? + .1; Some(def) } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs index 0504f3caf5f..abf4438a71f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs @@ -187,7 +187,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "future_incompatible", - description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-eval-mutable-ptr-in-final-value, const-evaluatable-unchecked, dependency-on-unit-never-type-fallback, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, never-type-fallback-flowing-into-unsafe, order-dependent-trait-objects, patterns-in-fns-without-body, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, self-constructor-from-outer-item, semicolon-in-expressions-from-macros, soft-unstable, uncovered-param-in-projection, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, wasm-c-abi, writes-through-immutable-pointer"##, + description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-eval-mutable-ptr-in-final-value, const-evaluatable-unchecked, dependency-on-unit-never-type-fallback, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, never-type-fallback-flowing-into-unsafe, order-dependent-trait-objects, out-of-scope-macro-calls, patterns-in-fns-without-body, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, self-constructor-from-outer-item, semicolon-in-expressions-from-macros, soft-unstable, uncovered-param-in-projection, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, wasm-c-abi, writes-through-immutable-pointer"##, }, Lint { label: "fuzzy_provenance_casts", @@ -428,6 +428,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ label: "order_dependent_trait_objects", description: r##"trait-object types were treated as different depending on marker-trait order"##, }, + Lint { + label: "out_of_scope_macro_calls", + description: r##"detects out of scope calls to `macro_rules` in key-value attributes"##, + }, Lint { label: "overflowing_literals", description: r##"literal out of range for its type"## }, Lint { label: "overlapping_range_endpoints", @@ -791,7 +795,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "future_incompatible", - description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-eval-mutable-ptr-in-final-value, const-evaluatable-unchecked, dependency-on-unit-never-type-fallback, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, never-type-fallback-flowing-into-unsafe, order-dependent-trait-objects, patterns-in-fns-without-body, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, self-constructor-from-outer-item, semicolon-in-expressions-from-macros, soft-unstable, uncovered-param-in-projection, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, wasm-c-abi, writes-through-immutable-pointer"##, + description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-eval-mutable-ptr-in-final-value, const-evaluatable-unchecked, dependency-on-unit-never-type-fallback, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, never-type-fallback-flowing-into-unsafe, order-dependent-trait-objects, out-of-scope-macro-calls, patterns-in-fns-without-body, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, self-constructor-from-outer-item, semicolon-in-expressions-from-macros, soft-unstable, uncovered-param-in-projection, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, wasm-c-abi, writes-through-immutable-pointer"##, }, children: &[ "deref_into_dyn_supertrait", @@ -815,6 +819,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ "missing_fragment_specifier", "never_type_fallback_flowing_into_unsafe", "order_dependent_trait_objects", + "out_of_scope_macro_calls", "patterns_in_fns_without_body", "proc_macro_derive_resolution_fallback", "pub_use_of_private_extern_crate", @@ -957,7 +962,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ pub const RUSTDOC_LINTS: &[Lint] = &[ Lint { label: "rustdoc::all", - description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links"##, + description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links, rustdoc::unportable-markdown"##, }, Lint { label: "rustdoc::bare_urls", description: r##"detects URLs that are not hyperlinks"## }, Lint { @@ -1000,12 +1005,16 @@ pub const RUSTDOC_LINTS: &[Lint] = &[ label: "rustdoc::unescaped_backticks", description: r##"detects unescaped backticks in doc comments"##, }, + Lint { + label: "rustdoc::unportable_markdown", + description: r##"detects markdown that is interpreted differently in different parser"##, + }, ]; pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &[LintGroup { lint: Lint { label: "rustdoc::all", - description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links"##, + description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links, rustdoc::unportable-markdown"##, }, children: &[ "rustdoc::broken_intra_doc_links", @@ -1018,6 +1027,7 @@ pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &[LintGroup { "rustdoc::missing_crate_level_docs", "rustdoc::unescaped_backticks", "rustdoc::redundant_explicit_links", + "rustdoc::unportable_markdown", ], }]; @@ -3065,17 +3075,6 @@ The tracking issue for this feature is: [#78729] "##, }, Lint { - label: "const_char_from_u32_unchecked", - description: r##"# `const_char_from_u32_unchecked` - -The tracking issue for this feature is: [#89259] - -[#89259]: https://github.com/rust-lang/rust/issues/89259 - ------------------------- -"##, - }, - Lint { label: "const_closures", description: r##"# `const_closures` @@ -3109,17 +3108,6 @@ The tracking issue for this feature is: [#65143] "##, }, Lint { - label: "const_cstr_from_ptr", - description: r##"# `const_cstr_from_ptr` - -The tracking issue for this feature is: [#113219] - -[#113219]: https://github.com/rust-lang/rust/issues/113219 - ------------------------- -"##, - }, - Lint { label: "const_eval_select", description: r##"# `const_eval_select` @@ -4324,6 +4312,15 @@ The tracking issue for this feature is: [#112115] "##, }, Lint { + label: "cstr_internals", + description: r##"# `cstr_internals` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { label: "cursor_remaining", description: r##"# `cursor_remaining` @@ -4998,6 +4995,15 @@ The tracking issue for this feature is: [#72631] "##, }, Lint { + label: "extend_one_unchecked", + description: r##"# `extend_one_unchecked` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { label: "extended_varargs_abi_support", description: r##"# `extended_varargs_abi_support` @@ -5877,17 +5883,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "io_slice_advance", - description: r##"# `io_slice_advance` - -The tracking issue for this feature is: [#62726] - -[#62726]: https://github.com/rust-lang/rust/issues/62726 - ------------------------- -"##, - }, - Lint { label: "ip", description: r##"# `ip` @@ -6284,8 +6279,8 @@ The tracking issue for this feature is: [#69835] "##, }, Lint { - label: "lazy_cell_consume", - description: r##"# `lazy_cell_consume` + label: "lazy_cell_into_inner", + description: r##"# `lazy_cell_into_inner` The tracking issue for this feature is: [#125623] @@ -8121,6 +8116,17 @@ The tracking issue for this feature is: [#101730] "##, }, Lint { + label: "s390x_target_feature", + description: r##"# `s390x_target_feature` + +The tracking issue for this feature is: [#44839] + +[#44839]: https://github.com/rust-lang/rust/issues/44839 + +------------------------ +"##, + }, + Lint { label: "sealed", description: r##"# `sealed` @@ -9744,6 +9750,17 @@ The tracking issue for this feature is: [#18598] "##, }, Lint { + label: "unsized_const_params", + description: r##"# `unsized_const_params` + +The tracking issue for this feature is: [#95174] + +[#95174]: https://github.com/rust-lang/rust/issues/95174 + +------------------------ +"##, + }, + Lint { label: "unsized_fn_params", description: r##"# `unsized_fn_params` @@ -10095,17 +10112,6 @@ The tracking issue for this feature is: [#71213] "##, }, Lint { - label: "wasm_abi", - description: r##"# `wasm_abi` - -The tracking issue for this feature is: [#83788] - -[#83788]: https://github.com/rust-lang/rust/issues/83788 - ------------------------- -"##, - }, - Lint { label: "wasm_target_feature", description: r##"# `wasm_target_feature` @@ -10137,6 +10143,17 @@ This feature is internal to the Rust compiler and is not intended for general us "##, }, Lint { + label: "windows_change_time", + description: r##"# `windows_change_time` + +The tracking issue for this feature is: [#121478] + +[#121478]: https://github.com/rust-lang/rust/issues/121478 + +------------------------ +"##, + }, + Lint { label: "windows_handle", description: r##"# `windows_handle` @@ -10210,6 +10227,17 @@ The tracking issue for this feature is: [#114854] "##, }, Lint { + label: "windows_process_extensions_show_window", + description: r##"# `windows_process_extensions_show_window` + +The tracking issue for this feature is: [#127544] + +[#127544]: https://github.com/rust-lang/rust/issues/127544 + +------------------------ +"##, + }, + Lint { label: "windows_stdio", description: r##"# `windows_stdio` @@ -10261,6 +10289,28 @@ The tracking issue for this feature is: [#70436] "##, }, Lint { + label: "x86_amx_intrinsics", + description: r##"# `x86_amx_intrinsics` + +The tracking issue for this feature is: [#126622] + +[#126622]: https://github.com/rust-lang/rust/issues/126622 + +------------------------ +"##, + }, + Lint { + label: "xop_target_feature", + description: r##"# `xop_target_feature` + +The tracking issue for this feature is: [#127208] + +[#127208]: https://github.com/rust-lang/rust/issues/127208 + +------------------------ +"##, + }, + Lint { label: "yeet_desugar_details", description: r##"# `yeet_desugar_details` @@ -10377,7 +10427,7 @@ There is a good explanation the reason why this lint should work in this way and }, Lint { label: "clippy::as_ptr_cast_mut", - description: r##"Checks for the result of a `&self`-taking `as_ptr` being cast to a mutable pointer"##, + description: r##"Checks for the result of a `&self`-taking `as_ptr` being cast to a mutable pointer."##, }, Lint { label: "clippy::as_underscore", @@ -10411,16 +10461,17 @@ that can themselves be awaited."##, }, Lint { label: "clippy::await_holding_invalid_type", - description: r##"Allows users to configure types which should not be held across `await` + description: r##"Allows users to configure types which should not be held across await suspension points."##, }, Lint { label: "clippy::await_holding_lock", - description: r##"Checks for calls to await while holding a non-async-aware MutexGuard."##, + description: r##"Checks for calls to `await` while holding a non-async-aware +`MutexGuard`."##, }, Lint { label: "clippy::await_holding_refcell_ref", - description: r##"Checks for calls to await while holding a `RefCell` `Ref` or `RefMut`."##, + description: r##"Checks for calls to `await` while holding a `RefCell`, `Ref`, or `RefMut`."##, }, Lint { label: "clippy::bad_bit_mask", @@ -10446,8 +10497,8 @@ table: }, Lint { label: "clippy::bind_instead_of_map", - description: r##"Checks for usage of `_.and_then(|x| Some(y))`, `_.and_then(|x| Ok(y))` or -`_.or_else(|x| Err(y))`."##, + description: r##"Checks for usage of `_.and_then(|x| Some(y))`, `_.and_then(|x| Ok(y))` +or `_.or_else(|x| Err(y))`."##, }, Lint { label: "clippy::blanket_clippy_restriction_lints", @@ -10548,13 +10599,13 @@ and suggests to use a case-insensitive approach instead."##, }, Lint { label: "clippy::cast_enum_truncation", - description: r##"Checks for casts from an enum type to an integral type which will definitely truncate the + description: r##"Checks for casts from an enum type to an integral type that will definitely truncate the value."##, }, Lint { label: "clippy::cast_lossless", - description: r##"Checks for casts between numerical types that may -be replaced by safe conversion functions."##, + description: r##"Checks for casts between numeric types that can be replaced by safe +conversion functions."##, }, Lint { label: "clippy::cast_nan_to_int", @@ -10562,7 +10613,7 @@ be replaced by safe conversion functions."##, }, Lint { label: "clippy::cast_possible_truncation", - description: r##"Checks for casts between numerical types that may + description: r##"Checks for casts between numeric types that may truncate large values. This is expected behavior, so the cast is `Allow` by default. It suggests user either explicitly ignore the lint, or use `try_from()` and handle the truncation, default, or panic explicitly."##, @@ -10570,16 +10621,16 @@ or use `try_from()` and handle the truncation, default, or panic explicitly."##, Lint { label: "clippy::cast_possible_wrap", description: r##"Checks for casts from an unsigned type to a signed type of -the same size, or possibly smaller due to target dependent integers. -Performing such a cast is a 'no-op' for the compiler, i.e., nothing is -changed at the bit level, and the binary representation of the value is +the same size, or possibly smaller due to target-dependent integers. +Performing such a cast is a no-op for the compiler (that is, nothing is +changed at the bit level), and the binary representation of the value is reinterpreted. This can cause wrapping if the value is too big for the target signed type. However, the cast works as defined, so this lint is `Allow` by default."##, }, Lint { label: "clippy::cast_precision_loss", - description: r##"Checks for casts from any numerical to a float type where + description: r##"Checks for casts from any numeric type to a float type where the receiving type cannot store all values from the original type without rounding errors. This possible rounding is to be expected, so this lint is `Allow` by default. @@ -10589,14 +10640,14 @@ or any 64-bit integer to `f64`."##, }, Lint { label: "clippy::cast_ptr_alignment", - description: r##"Checks for casts, using `as` or `pointer::cast`, -from a less-strictly-aligned pointer to a more-strictly-aligned pointer"##, + description: r##"Checks for casts, using `as` or `pointer::cast`, from a +less strictly aligned pointer to a more strictly aligned pointer."##, }, Lint { label: "clippy::cast_sign_loss", - description: r##"Checks for casts from a signed to an unsigned numerical + description: r##"Checks for casts from a signed to an unsigned numeric type. In this case, negative values wrap around to large positive values, -which can be quite surprising in practice. However, as the cast works as +which can be quite surprising in practice. However, since the cast works as defined, this lint is `Allow` by default."##, }, Lint { @@ -11132,8 +11183,8 @@ implement equality for a type involving floats)."##, }, Lint { label: "clippy::float_cmp_const", - description: r##"Checks for (in-)equality comparisons on floating-point -value and constant, except in functions called `*eq*` (which probably + description: r##"Checks for (in-)equality comparisons on constant floating-point +values (apart from zero), except in functions called `*eq*` (which probably implement equality for a type involving floats)."##, }, Lint { @@ -11152,7 +11203,7 @@ bools in function definitions."##, }, Lint { label: "clippy::fn_to_numeric_cast", - description: r##"Checks for casts of function pointers to something other than usize"##, + description: r##"Checks for casts of function pointers to something other than `usize`."##, }, Lint { label: "clippy::fn_to_numeric_cast_any", @@ -11161,7 +11212,7 @@ bools in function definitions."##, Lint { label: "clippy::fn_to_numeric_cast_with_truncation", description: r##"Checks for casts of a function pointer to a numeric type not wide enough to -store address."##, +store an address."##, }, Lint { label: "clippy::for_kv_map", @@ -12139,7 +12190,7 @@ containing module's name."##, Lint { label: "clippy::modulo_arithmetic", description: r##"Checks for modulo arithmetic."## }, Lint { label: "clippy::modulo_one", - description: r##"Checks for getting the remainder of a division by one or minus + description: r##"Checks for getting the remainder of integer division by one or minus one."##, }, Lint { label: "clippy::multi_assignments", description: r##"Checks for nested assignments."## }, @@ -12531,10 +12582,6 @@ etc. instead."##, index."##, }, Lint { - label: "clippy::overflow_check_conditional", - description: r##"Detects classic underflow/overflow checks."##, - }, - Lint { label: "clippy::overly_complex_bool_expr", description: r##"Checks for boolean expressions that contain terminals that can be eliminated."##, @@ -12545,6 +12592,10 @@ can be eliminated."##, description: r##"Checks for usage of `panic!` or assertions in a function whose return type is `Result`."##, }, Lint { + label: "clippy::panicking_overflow_checks", + description: r##"Detects C-style underflow/overflow checks."##, + }, + Lint { label: "clippy::panicking_unwrap", description: r##"Checks for calls of `unwrap[_err]()` that will always fail."##, }, @@ -12577,6 +12628,10 @@ The `allowed-dotfiles` configuration can be used to allow additional file extensions that Clippy should not lint."##, }, Lint { + label: "clippy::pathbuf_init_then_push", + description: r##"Checks for calls to `push` immediately after creating a new `PathBuf`."##, + }, + Lint { label: "clippy::pattern_type_mismatch", description: r##"Checks for patterns that aren't exact representations of the types they are applied to. @@ -12653,12 +12708,12 @@ with the appropriate `.to_owned()`/`to_string()` calls."##, }, Lint { label: "clippy::ptr_as_ptr", - description: r##"Checks for `as` casts between raw pointers without changing its mutability, -namely `*const T` to `*const U` and `*mut T` to `*mut U`."##, + description: r##"Checks for `as` casts between raw pointers that don't change their +constness, namely `*const T` to `*const U` and `*mut T` to `*mut U`."##, }, Lint { label: "clippy::ptr_cast_constness", - description: r##"Checks for `as` casts between raw pointers which change its constness, namely `*const T` to + description: r##"Checks for `as` casts between raw pointers that change their constness, namely `*const T` to `*mut T` and `*mut T` to `*const T`."##, }, Lint { label: "clippy::ptr_eq", description: r##"Use `std::ptr::eq` when applicable"## }, @@ -12934,7 +12989,7 @@ value into a Vec."##, Lint { label: "clippy::same_name_method", description: r##"It lints if a struct has two methods with the same name: -one from a trait, another not from trait."##, +one from a trait, another not from a trait."##, }, Lint { label: "clippy::search_is_some", @@ -13496,8 +13551,9 @@ as returning a large `T` directly may be detrimental to performance."##, }, Lint { label: "clippy::unnecessary_cast", - description: r##"Checks for casts to the same type, casts of int literals to integer types, casts of float -literals to float types and casts between raw pointers without changing type or constness."##, + description: r##"Checks for casts to the same type, casts of int literals to integer +types, casts of float literals to float types, and casts between raw +pointers that don't change type or constness."##, }, Lint { label: "clippy::unnecessary_clippy_cfg", @@ -13602,8 +13658,9 @@ which compares the two arguments, either directly or indirectly."##, }, Lint { label: "clippy::unnecessary_struct_initialization", - description: r##"Checks for initialization of a `struct` by copying a base without setting -any field."##, + description: r##"Checks for initialization of an identical `struct` from another instance +of the type, either by copying a base without setting any field or by +moving all fields individually."##, }, Lint { label: "clippy::unnecessary_to_owned", @@ -13976,7 +14033,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::complexity", - description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::implied_bounds_in_impls, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_clamp, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_inspect, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_filter_map, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_min_or_max, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##, + description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::implied_bounds_in_impls, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_clamp, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_inspect, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_filter_map, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_min_or_max, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##, }, children: &[ "clippy::bind_instead_of_map", @@ -14052,7 +14109,6 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::option_filter_map", "clippy::option_map_unit_fn", "clippy::or_then_unwrap", - "clippy::overflow_check_conditional", "clippy::partialeq_ne_impl", "clippy::precedence", "clippy::ptr_offset_with_cast", @@ -14113,7 +14169,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::correctness", - description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::eager_transmute, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::lint_groups_priority, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, + description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::eager_transmute, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::lint_groups_priority, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_overflow_checks, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, }, children: &[ "clippy::absurd_extreme_comparisons", @@ -14159,6 +14215,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::option_env_unwrap", "clippy::out_of_bounds_indexing", "clippy::overly_complex_bool_expr", + "clippy::panicking_overflow_checks", "clippy::panicking_unwrap", "clippy::possible_missing_comma", "clippy::read_line_without_trim", @@ -14435,7 +14492,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::restriction", - description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::cfg_not_test, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_enum_variants_with_brackets, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::field_scoped_visibility_modifiers, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::integer_division_remainder_used, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::renamed_function_params, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, + description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::cfg_not_test, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_enum_variants_with_brackets, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::field_scoped_visibility_modifiers, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::integer_division_remainder_used, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pathbuf_init_then_push, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::renamed_function_params, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, }, children: &[ "clippy::absolute_paths", @@ -14507,6 +14564,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::panic", "clippy::panic_in_result_fn", "clippy::partial_pub_fields", + "clippy::pathbuf_init_then_push", "clippy::pattern_type_mismatch", "clippy::print_stderr", "clippy::print_stdout", diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index c069e1c25b6..f6a781907db 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -2,11 +2,12 @@ use std::collections::VecDeque; -use base_db::{FileId, SourceDatabaseExt}; +use base_db::SourceDatabaseExt; use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; +use span::FileId; use syntax::{ ast::{self, make}, - AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, + AstToken, SyntaxKind, SyntaxToken, ToSmolStr, TokenAtOffset, }; use crate::{ @@ -50,9 +51,9 @@ pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { } segments.extend( - path.segments() - .iter() - .map(|segment| make::path_segment(make::name_ref(&segment.to_smol_str()))), + path.segments().iter().map(|segment| { + make::path_segment(make::name_ref(&segment.display_no_db().to_smolstr())) + }), ); make::path_from_segments(segments, is_abs) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 088717a66e5..1c4c15f2557 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -9,13 +9,13 @@ use itertools::{EitherOrBoth, Itertools}; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{ ast::{self, make, HasName}, - AstNode, SmolStr, SyntaxNode, + AstNode, SmolStr, SyntaxNode, ToSmolStr, }; use crate::{ helpers::item_name, items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT}, - RootDatabase, + FxIndexSet, RootDatabase, }; /// A candidate for import, derived during various IDE activities: @@ -262,7 +262,7 @@ impl ImportAssets { let scope = match sema.scope(&self.candidate_node) { Some(it) => it, - None => return <FxHashSet<_>>::default().into_iter(), + None => return <FxIndexSet<_>>::default().into_iter(), }; let krate = self.module_with_candidate.krate(); @@ -319,7 +319,7 @@ fn path_applicable_imports( path_candidate: &PathImportCandidate, mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy, scope_filter: impl Fn(ItemInNs) -> bool + Copy, -) -> FxHashSet<LocatedImport> { +) -> FxIndexSet<LocatedImport> { let _p = tracing::info_span!("ImportAssets::path_applicable_imports").entered(); match &path_candidate.qualifier { @@ -389,16 +389,16 @@ fn import_for_item( let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev(); let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it { // segments match, check next one - EitherOrBoth::Both(a, b) if b.as_str() == Some(&**a) => None, + EitherOrBoth::Both(a, b) if b.as_str() == &**a => None, // segments mismatch / qualifier is longer than the path, bail out EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false), // all segments match and we have exhausted the qualifier, proceed EitherOrBoth::Right(_) => Some(true), }; if item_as_assoc.is_none() { - let item_name = item_name(db, original_item)?.as_text()?; + let item_name = item_name(db, original_item)?; let last_segment = import_path_candidate_segments.next()?; - if last_segment.as_str() != Some(&*item_name) { + if *last_segment != item_name { return None; } } @@ -459,7 +459,7 @@ fn find_import_for_segment( unresolved_first_segment: &str, ) -> Option<ItemInNs> { let segment_is_name = item_name(db, original_item) - .map(|name| name.to_smol_str() == unresolved_first_segment) + .map(|name| name.display_no_db().to_smolstr() == unresolved_first_segment) .unwrap_or(false); Some(if segment_is_name { @@ -483,7 +483,7 @@ fn module_with_segment_name( }; while let Some(module) = current_module { if let Some(module_name) = module.name(db) { - if module_name.to_smol_str() == segment_name { + if module_name.display_no_db().to_smolstr() == segment_name { return Some(module); } } @@ -500,7 +500,7 @@ fn trait_applicable_items( trait_assoc_item: bool, mod_path: impl Fn(ItemInNs) -> Option<ModPath>, scope_filter: impl Fn(hir::Trait) -> bool, -) -> FxHashSet<LocatedImport> { +) -> FxIndexSet<LocatedImport> { let _p = tracing::info_span!("ImportAssets::trait_applicable_items").entered(); let db = sema.db; @@ -566,7 +566,7 @@ fn trait_applicable_items( definitions_exist_in_trait_crate || definitions_exist_in_receiver_crate() }); - let mut located_imports = FxHashSet::default(); + let mut located_imports = FxIndexSet::default(); let mut trait_import_paths = FxHashMap::default(); if trait_assoc_item { @@ -703,7 +703,7 @@ fn path_import_candidate( ) -> Option<ImportCandidate> { Some(match qualifier { Some(qualifier) => match sema.resolve_path(&qualifier) { - None => { + Some(PathResolution::Def(ModuleDef::BuiltinType(_))) | None => { if qualifier.first_qualifier().map_or(true, |it| sema.resolve_path(&it).is_none()) { let qualifier = qualifier .segments() diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs index 9102980677c..fc86d169a24 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs @@ -176,7 +176,7 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) { pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) { let text: &str = "use foo as _"; - let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT); + let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT_FIXME); let node = parse .tree() .syntax() diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs index 9cacb6b1a60..926fae0d317 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs @@ -93,17 +93,25 @@ fn try_merge_trees_mut(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehav let rhs_path = rhs.path()?; let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?; - if !(lhs.is_simple_path() + if lhs.is_simple_path() && rhs.is_simple_path() && lhs_path == lhs_prefix - && rhs_path == rhs_prefix) + && rhs_path == rhs_prefix { - lhs.split_prefix(&lhs_prefix); - rhs.split_prefix(&rhs_prefix); - } else { + // we can't merge if the renames are different (`A as a` and `A as b`), + // and we can safely return here + let lhs_name = lhs.rename().and_then(|lhs_name| lhs_name.name()); + let rhs_name = rhs.rename().and_then(|rhs_name| rhs_name.name()); + if lhs_name != rhs_name { + return None; + } + ted::replace(lhs.syntax(), rhs.syntax()); // we can safely return here, in this case `recursive_merge` doesn't do anything return Some(()); + } else { + lhs.split_prefix(&lhs_prefix); + rhs.split_prefix(&rhs_prefix); } } recursive_merge(lhs, rhs, merge) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 8fac5baa57b..4c52ba39dec 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -48,10 +48,13 @@ use std::{fmt, mem::ManuallyDrop}; use base_db::{ salsa::{self, Durability}, - AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, + AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, DEFAULT_FILE_TEXT_LRU_CAP, }; -use hir::db::{DefDatabase, ExpandDatabase, HirDatabase}; +use hir::{ + db::{DefDatabase, ExpandDatabase, HirDatabase}, + FilePositionWrapper, FileRangeWrapper, +}; use triomphe::Arc; use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; @@ -61,11 +64,15 @@ pub use ::line_index; /// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience. pub use base_db; +pub use span::{EditionedFileId, FileId}; pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>; pub type FxIndexMap<K, V> = indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>; +pub type FilePosition = FilePositionWrapper<FileId>; +pub type FileRange = FileRangeWrapper<FileId>; + #[salsa::database( base_db::SourceDatabaseExtStorage, base_db::SourceDatabaseStorage, @@ -138,7 +145,7 @@ impl Default for RootDatabase { } impl RootDatabase { - pub fn new(lru_capacity: Option<usize>) -> RootDatabase { + pub fn new(lru_capacity: Option<u16>) -> RootDatabase { let mut db = RootDatabase { storage: ManuallyDrop::new(salsa::Storage::default()) }; db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); db.set_proc_macros_with_durability(Default::default(), Durability::HIGH); @@ -154,16 +161,17 @@ impl RootDatabase { self.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); } - pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<usize>) { + pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<u16>) { let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP); base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP); base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); // macro expansions are usually rather small, so we can afford to keep more of them alive hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP); + hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048); } - pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) { + pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, u16>) { use hir::db as hir_db; base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP); @@ -185,135 +193,7 @@ impl RootDatabase { .copied() .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP), ); - - macro_rules! update_lru_capacity_per_query { - ($( $module:ident :: $query:ident )*) => {$( - if let Some(&cap) = lru_capacities.get(stringify!($query)) { - $module::$query.in_db_mut(self).set_lru_capacity(cap); - } - )*} - } - update_lru_capacity_per_query![ - // SourceDatabase - // base_db::ParseQuery - // base_db::CrateGraphQuery - // base_db::ProcMacrosQuery - - // SourceDatabaseExt - base_db::FileTextQuery - // base_db::FileSourceRootQuery - // base_db::SourceRootQuery - base_db::SourceRootCratesQuery - - // ExpandDatabase - hir_db::AstIdMapQuery - // hir_db::ParseMacroExpansionQuery - // hir_db::InternMacroCallQuery - hir_db::MacroArgQuery - hir_db::DeclMacroExpanderQuery - // hir_db::MacroExpandQuery - hir_db::ExpandProcMacroQuery - hir_db::ParseMacroExpansionErrorQuery - - // DefDatabase - hir_db::FileItemTreeQuery - hir_db::BlockDefMapQuery - hir_db::StructDataWithDiagnosticsQuery - hir_db::UnionDataWithDiagnosticsQuery - hir_db::EnumDataQuery - hir_db::EnumVariantDataWithDiagnosticsQuery - hir_db::ImplDataWithDiagnosticsQuery - hir_db::TraitDataWithDiagnosticsQuery - hir_db::TraitAliasDataQuery - hir_db::TypeAliasDataQuery - hir_db::FunctionDataQuery - hir_db::ConstDataQuery - hir_db::StaticDataQuery - hir_db::Macro2DataQuery - hir_db::MacroRulesDataQuery - hir_db::ProcMacroDataQuery - hir_db::BodyWithSourceMapQuery - hir_db::BodyQuery - hir_db::ExprScopesQuery - hir_db::GenericParamsQuery - hir_db::FieldsAttrsQuery - hir_db::FieldsAttrsSourceMapQuery - hir_db::AttrsQuery - hir_db::CrateLangItemsQuery - hir_db::LangItemQuery - hir_db::ImportMapQuery - hir_db::FieldVisibilitiesQuery - hir_db::FunctionVisibilityQuery - hir_db::ConstVisibilityQuery - hir_db::CrateSupportsNoStdQuery - - // HirDatabase - hir_db::MirBodyQuery - hir_db::BorrowckQuery - hir_db::TyQuery - hir_db::ValueTyQuery - hir_db::ImplSelfTyQuery - hir_db::ConstParamTyQuery - hir_db::ConstEvalQuery - hir_db::ConstEvalDiscriminantQuery - hir_db::ImplTraitQuery - hir_db::FieldTypesQuery - hir_db::LayoutOfAdtQuery - hir_db::TargetDataLayoutQuery - hir_db::CallableItemSignatureQuery - hir_db::ReturnTypeImplTraitsQuery - hir_db::GenericPredicatesForParamQuery - hir_db::GenericPredicatesQuery - hir_db::TraitEnvironmentQuery - hir_db::GenericDefaultsQuery - hir_db::InherentImplsInCrateQuery - hir_db::InherentImplsInBlockQuery - hir_db::IncoherentInherentImplCratesQuery - hir_db::TraitImplsInCrateQuery - hir_db::TraitImplsInBlockQuery - hir_db::TraitImplsInDepsQuery - // hir_db::InternCallableDefQuery - // hir_db::InternLifetimeParamIdQuery - // hir_db::InternImplTraitIdQuery - // hir_db::InternTypeOrConstParamIdQuery - // hir_db::InternClosureQuery - // hir_db::InternCoroutineQuery - hir_db::AssociatedTyDataQuery - hir_db::TraitDatumQuery - hir_db::AdtDatumQuery - hir_db::ImplDatumQuery - hir_db::FnDefDatumQuery - hir_db::FnDefVarianceQuery - hir_db::AdtVarianceQuery - hir_db::AssociatedTyValueQuery - hir_db::ProgramClausesForChalkEnvQuery - - // SymbolsDatabase - symbol_index::ModuleSymbolsQuery - symbol_index::LibrarySymbolsQuery - // symbol_index::LocalRootsQuery - // symbol_index::LibraryRootsQuery - - // LineIndexDatabase - crate::LineIndexQuery - - // InternDatabase - // hir_db::InternFunctionQuery - // hir_db::InternStructQuery - // hir_db::InternUnionQuery - // hir_db::InternEnumQuery - // hir_db::InternConstQuery - // hir_db::InternStaticQuery - // hir_db::InternTraitQuery - // hir_db::InternTraitAliasQuery - // hir_db::InternTypeAliasQuery - // hir_db::InternImplQuery - // hir_db::InternExternBlockQuery - // hir_db::InternBlockQuery - // hir_db::InternMacro2Query - // hir_db::InternProcMacroQuery - // hir_db::InternMacroRulesQuery - ]; + hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048); } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index e21d54ccd0e..84a388a460b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -472,7 +472,7 @@ fn find_trait_for_assoc_item( }); for name in names { - if assoc_item_name.as_str() == name.as_text()?.as_str() { + if assoc_item_name.as_str() == name.as_str() { // It is fine to return the first match because in case of // multiple possibilities, the exact trait must be disambiguated // in the definition of trait being implemented, so this search diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index 484c65c2b01..232f2428287 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -22,10 +22,10 @@ //! Our current behavior is ¯\_(ツ)_/¯. use std::fmt; -use base_db::{AnchoredPathBuf, FileId, FileRange}; +use base_db::AnchoredPathBuf; use either::Either; -use hir::{FieldSource, HirFileIdExt, InFile, ModuleSource, Semantics}; -use span::SyntaxContextId; +use hir::{FieldSource, FileRange, HirFileIdExt, InFile, ModuleSource, Semantics}; +use span::{Edition, EditionedFileId, FileId, SyntaxContextId}; use stdx::{never, TupleExt}; use syntax::{ ast::{self, HasName}, @@ -227,7 +227,8 @@ fn rename_mod( module: hir::Module, new_name: &str, ) -> Result<SourceChange> { - if IdentifierKind::classify(new_name)? != IdentifierKind::Ident { + if IdentifierKind::classify(module.krate().edition(sema.db), new_name)? != IdentifierKind::Ident + { bail!("Invalid name `{0}`: cannot rename module to {0}", new_name); } @@ -240,7 +241,7 @@ fn rename_mod( let InFile { file_id, value: def_source } = module.definition_source(sema.db); if let ModuleSource::SourceFile(..) = def_source { let new_name = new_name.trim_start_matches("r#"); - let anchor = file_id.original_file(sema.db); + let anchor = file_id.original_file(sema.db).file_id(); let is_mod_rs = module.is_mod_rs(sema.db); let has_detached_child = module.children(sema.db).any(|child| !child.is_inline(sema.db)); @@ -289,7 +290,7 @@ fn rename_mod( .map(TupleExt::head) { source_change.insert_source_edit( - file_id, + file_id.file_id(), TextEdit::replace(file_range.range, new_name.to_owned()), ) }; @@ -300,8 +301,8 @@ fn rename_mod( let def = Definition::Module(module); let usages = def.usages(sema).all(); - let ref_edits = usages.iter().map(|(&file_id, references)| { - (file_id, source_edit_from_references(references, def, new_name)) + let ref_edits = usages.iter().map(|(file_id, references)| { + (EditionedFileId::file_id(file_id), source_edit_from_references(references, def, new_name)) }); source_change.extend(ref_edits); @@ -313,7 +314,12 @@ fn rename_reference( def: Definition, new_name: &str, ) -> Result<SourceChange> { - let ident_kind = IdentifierKind::classify(new_name)?; + let ident_kind = IdentifierKind::classify( + def.krate(sema.db) + .ok_or_else(|| RenameError("definition has no krate?".into()))? + .edition(sema.db), + new_name, + )?; if matches!( def, @@ -344,8 +350,8 @@ fn rename_reference( bail!("Cannot rename reference to `_` as it is being referenced multiple times"); } let mut source_change = SourceChange::default(); - source_change.extend(usages.iter().map(|(&file_id, references)| { - (file_id, source_edit_from_references(references, def, new_name)) + source_change.extend(usages.iter().map(|(file_id, references)| { + (EditionedFileId::file_id(file_id), source_edit_from_references(references, def, new_name)) })); let mut insert_def_edit = |def| { @@ -578,7 +584,7 @@ fn source_edit_from_def( } } let Some(file_id) = file_id else { bail!("No file available to rename") }; - return Ok((file_id, edit.finish())); + return Ok((EditionedFileId::file_id(file_id), edit.finish())); } let FileRange { file_id, range } = def .range_for_rename(sema) @@ -594,7 +600,7 @@ fn source_edit_from_def( _ => (range, new_name.to_owned()), }; edit.replace(range, new_name); - Ok((file_id, edit.finish())) + Ok((file_id.file_id(), edit.finish())) } #[derive(Copy, Clone, Debug, PartialEq)] @@ -605,8 +611,8 @@ pub enum IdentifierKind { } impl IdentifierKind { - pub fn classify(new_name: &str) -> Result<IdentifierKind> { - match parser::LexedStr::single_token(new_name) { + pub fn classify(edition: Edition, new_name: &str) -> Result<IdentifierKind> { + match parser::LexedStr::single_token(edition, new_name) { Some(res) => match res { (SyntaxKind::IDENT, _) => { if let Some(inner) = new_name.strip_prefix("r#") { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index e1cfe048983..05b32e2a854 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -6,16 +6,17 @@ use std::mem; -use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; +use base_db::{salsa::Database, SourceDatabase, SourceDatabaseExt}; use hir::{ - AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile, - InRealFile, ModuleSource, PathResolution, Semantics, Visibility, + sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt, + InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; -use nohash_hasher::IntMap; use once_cell::unsync::Lazy; use parser::SyntaxKind; -use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize}; +use rustc_hash::FxHashMap; +use span::EditionedFileId; +use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize, ToSmolStr}; use triomphe::Arc; use crate::{ @@ -26,7 +27,7 @@ use crate::{ #[derive(Debug, Default, Clone)] pub struct UsageSearchResult { - pub references: IntMap<FileId, Vec<FileReference>>, + pub references: FxHashMap<EditionedFileId, Vec<FileReference>>, } impl UsageSearchResult { @@ -38,8 +39,8 @@ impl UsageSearchResult { self.references.len() } - pub fn iter(&self) -> impl Iterator<Item = (&FileId, &[FileReference])> + '_ { - self.references.iter().map(|(file_id, refs)| (file_id, &**refs)) + pub fn iter(&self) -> impl Iterator<Item = (EditionedFileId, &[FileReference])> + '_ { + self.references.iter().map(|(&file_id, refs)| (file_id, &**refs)) } pub fn file_ranges(&self) -> impl Iterator<Item = FileRange> + '_ { @@ -50,8 +51,8 @@ impl UsageSearchResult { } impl IntoIterator for UsageSearchResult { - type Item = (FileId, Vec<FileReference>); - type IntoIter = <IntMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter; + type Item = (EditionedFileId, Vec<FileReference>); + type IntoIter = <FxHashMap<EditionedFileId, Vec<FileReference>> as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { self.references.into_iter() @@ -142,36 +143,40 @@ bitflags::bitflags! { /// e.g. for things like local variables. #[derive(Clone, Debug)] pub struct SearchScope { - entries: IntMap<FileId, Option<TextRange>>, + entries: FxHashMap<EditionedFileId, Option<TextRange>>, } impl SearchScope { - fn new(entries: IntMap<FileId, Option<TextRange>>) -> SearchScope { + fn new(entries: FxHashMap<EditionedFileId, Option<TextRange>>) -> SearchScope { SearchScope { entries } } /// Build a search scope spanning the entire crate graph of files. fn crate_graph(db: &RootDatabase) -> SearchScope { - let mut entries = IntMap::default(); + let mut entries = FxHashMap::default(); let graph = db.crate_graph(); for krate in graph.iter() { let root_file = graph[krate].root_file_id; let source_root_id = db.file_source_root(root_file); let source_root = db.source_root(source_root_id); - entries.extend(source_root.iter().map(|id| (id, None))); + entries.extend( + source_root.iter().map(|id| (EditionedFileId::new(id, graph[krate].edition), None)), + ); } SearchScope { entries } } /// Build a search scope spanning all the reverse dependencies of the given crate. fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope { - let mut entries = IntMap::default(); + let mut entries = FxHashMap::default(); for rev_dep in of.transitive_reverse_dependencies(db) { let root_file = rev_dep.root_file(db); let source_root_id = db.file_source_root(root_file); let source_root = db.source_root(source_root_id); - entries.extend(source_root.iter().map(|id| (id, None))); + entries.extend( + source_root.iter().map(|id| (EditionedFileId::new(id, rev_dep.edition(db)), None)), + ); } SearchScope { entries } } @@ -181,12 +186,17 @@ impl SearchScope { let root_file = of.root_file(db); let source_root_id = db.file_source_root(root_file); let source_root = db.source_root(source_root_id); - SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() } + SearchScope { + entries: source_root + .iter() + .map(|id| (EditionedFileId::new(id, of.edition(db)), None)) + .collect(), + } } /// Build a search scope spanning the given module and all its submodules. pub fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope { - let mut entries = IntMap::default(); + let mut entries = FxHashMap::default(); let (file_id, range) = { let InFile { file_id, value } = module.definition_source_range(db); @@ -211,11 +221,11 @@ impl SearchScope { /// Build an empty search scope. pub fn empty() -> SearchScope { - SearchScope::new(IntMap::default()) + SearchScope::new(FxHashMap::default()) } /// Build a empty search scope spanning the given file. - pub fn single_file(file: FileId) -> SearchScope { + pub fn single_file(file: EditionedFileId) -> SearchScope { SearchScope::new(std::iter::once((file, None)).collect()) } @@ -225,7 +235,7 @@ impl SearchScope { } /// Build a empty search scope spanning the given files. - pub fn files(files: &[FileId]) -> SearchScope { + pub fn files(files: &[EditionedFileId]) -> SearchScope { SearchScope::new(files.iter().map(|f| (*f, None)).collect()) } @@ -256,8 +266,8 @@ impl SearchScope { } impl IntoIterator for SearchScope { - type Item = (FileId, Option<TextRange>); - type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>; + type Item = (EditionedFileId, Option<TextRange>); + type IntoIter = std::collections::hash_map::IntoIter<EditionedFileId, Option<TextRange>>; fn into_iter(self) -> Self::IntoIter { self.entries.into_iter() @@ -333,7 +343,7 @@ impl Definition { if let Definition::Macro(macro_def) = self { return match macro_def.kind(db) { hir::MacroKind::Declarative => { - if macro_def.attrs(db).by_key("macro_export").exists() { + if macro_def.attrs(db).by_key(&sym::macro_export).exists() { SearchScope::reverse_dependencies(db, module.krate()) } else { SearchScope::krate(db, module.krate()) @@ -432,7 +442,7 @@ impl<'a> FindUsages<'a> { res } - pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) { + pub fn search(&self, sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool) { let _p = tracing::info_span!("FindUsages:search").entered(); let sema = self.sema; @@ -456,7 +466,7 @@ impl<'a> FindUsages<'a> { module .krate() .display_name(self.sema.db) - .map(|crate_name| crate_name.crate_name().as_smol_str().clone()) + .map(|crate_name| crate_name.crate_name().symbol().as_str().into()) } _ => { let self_kw_refs = || { @@ -468,7 +478,10 @@ impl<'a> FindUsages<'a> { }; // We need to unescape the name in case it is written without "r#" in earlier // editions of Rust where it isn't a keyword. - self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.unescaped().to_smol_str()) + self.def + .name(sema.db) + .or_else(self_kw_refs) + .map(|it| it.unescaped().display(sema.db).to_smolstr()) } }; let name = match &name { @@ -494,13 +507,13 @@ impl<'a> FindUsages<'a> { }) } - // for<'a> |scope: &'a SearchScope| -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a { ... } + // for<'a> |scope: &'a SearchScope| -> impl Iterator<Item = (Arc<String>, EditionedFileId, TextRange)> + 'a { ... } fn scope_files<'a>( sema: &'a Semantics<'_, RootDatabase>, scope: &'a SearchScope, - ) -> impl Iterator<Item = (Arc<str>, FileId, TextRange)> + 'a { + ) -> impl Iterator<Item = (Arc<str>, EditionedFileId, TextRange)> + 'a { scope.entries.iter().map(|(&file_id, &search_range)| { - let text = sema.db.file_text(file_id); + let text = sema.db.file_text(file_id.file_id()); let search_range = search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); @@ -624,7 +637,7 @@ impl<'a> FindUsages<'a> { return; }; - let text = sema.db.file_text(file_id); + let text = sema.db.file_text(file_id.file_id()); let search_range = search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); @@ -648,7 +661,7 @@ impl<'a> FindUsages<'a> { &self, self_ty: &hir::Type, name_ref: &ast::NameRef, - sink: &mut dyn FnMut(FileId, FileReference) -> bool, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { match NameRefClass::classify(self.sema, name_ref) { Some(NameRefClass::Definition(Definition::SelfType(impl_))) @@ -669,7 +682,7 @@ impl<'a> FindUsages<'a> { fn found_self_module_name_ref( &self, name_ref: &ast::NameRef, - sink: &mut dyn FnMut(FileId, FileReference) -> bool, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { match NameRefClass::classify(self.sema, name_ref) { Some(NameRefClass::Definition(def @ Definition::Module(_))) if def == self.def => { @@ -692,11 +705,11 @@ impl<'a> FindUsages<'a> { fn found_format_args_ref( &self, - file_id: FileId, + file_id: EditionedFileId, range: TextRange, token: ast::String, res: Option<PathResolution>, - sink: &mut dyn FnMut(FileId, FileReference) -> bool, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { match res.map(Definition::from) { Some(def) if def == self.def => { @@ -714,7 +727,7 @@ impl<'a> FindUsages<'a> { fn found_lifetime( &self, lifetime: &ast::Lifetime, - sink: &mut dyn FnMut(FileId, FileReference) -> bool, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { match NameRefClass::classify_lifetime(self.sema, lifetime) { Some(NameRefClass::Definition(def)) if def == self.def => { @@ -733,7 +746,7 @@ impl<'a> FindUsages<'a> { fn found_name_ref( &self, name_ref: &ast::NameRef, - sink: &mut dyn FnMut(FileId, FileReference) -> bool, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { match NameRefClass::classify(self.sema, name_ref) { Some(NameRefClass::Definition(def)) @@ -807,7 +820,7 @@ impl<'a> FindUsages<'a> { fn found_name( &self, name: &ast::Name, - sink: &mut dyn FnMut(FileId, FileReference) -> bool, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { match NameClass::classify(self.sema, name) { Some(NameClass::PatFieldShorthand { local_def: _, field_ref }) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs index 7ef7b7ae1d0..a83f8473c39 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs @@ -5,10 +5,11 @@ use std::{collections::hash_map::Entry, iter, mem}; -use crate::SnippetCap; -use base_db::{AnchoredPathBuf, FileId}; +use crate::{assists::Command, SnippetCap}; +use base_db::AnchoredPathBuf; use itertools::Itertools; use nohash_hasher::IntMap; +use span::FileId; use stdx::never; use syntax::{ algo, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, @@ -32,28 +33,28 @@ impl SourceChange { SourceChange { source_file_edits, file_system_edits, is_snippet: false } } - pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self { + pub fn from_text_edit(file_id: impl Into<FileId>, edit: TextEdit) -> Self { SourceChange { - source_file_edits: iter::once((file_id, (edit, None))).collect(), + source_file_edits: iter::once((file_id.into(), (edit, None))).collect(), ..Default::default() } } /// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing /// edits for a file if some already exist. - pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) { - self.insert_source_and_snippet_edit(file_id, edit, None) + pub fn insert_source_edit(&mut self, file_id: impl Into<FileId>, edit: TextEdit) { + self.insert_source_and_snippet_edit(file_id.into(), edit, None) } /// Inserts a [`TextEdit`] and potentially a [`SnippetEdit`] for the given [`FileId`]. /// This properly handles merging existing edits for a file if some already exist. pub fn insert_source_and_snippet_edit( &mut self, - file_id: FileId, + file_id: impl Into<FileId>, edit: TextEdit, snippet_edit: Option<SnippetEdit>, ) { - match self.source_file_edits.entry(file_id) { + match self.source_file_edits.entry(file_id.into()) { Entry::Occupied(mut entry) => { let value = entry.get_mut(); never!(value.0.union(edit).is_err(), "overlapping edits for same file"); @@ -194,7 +195,7 @@ pub struct SourceChangeBuilder { pub edit: TextEditBuilder, pub file_id: FileId, pub source_change: SourceChange, - pub trigger_signature_help: bool, + pub command: Option<Command>, /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin. pub mutated_tree: Option<TreeMutator>, @@ -231,20 +232,20 @@ impl TreeMutator { } impl SourceChangeBuilder { - pub fn new(file_id: FileId) -> SourceChangeBuilder { + pub fn new(file_id: impl Into<FileId>) -> SourceChangeBuilder { SourceChangeBuilder { edit: TextEdit::builder(), - file_id, + file_id: file_id.into(), source_change: SourceChange::default(), - trigger_signature_help: false, + command: None, mutated_tree: None, snippet_builder: None, } } - pub fn edit_file(&mut self, file_id: FileId) { + pub fn edit_file(&mut self, file_id: impl Into<FileId>) { self.commit(); - self.file_id = file_id; + self.file_id = file_id.into(); } fn commit(&mut self) { @@ -300,12 +301,19 @@ impl SourceChangeBuilder { let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() }; self.source_change.push_file_system_edit(file_system_edit); } - pub fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) { - let file_system_edit = FileSystemEdit::MoveFile { src, dst }; + pub fn move_file(&mut self, src: impl Into<FileId>, dst: AnchoredPathBuf) { + let file_system_edit = FileSystemEdit::MoveFile { src: src.into(), dst }; self.source_change.push_file_system_edit(file_system_edit); } - pub fn trigger_signature_help(&mut self) { - self.trigger_signature_help = true; + + /// Triggers the parameter hint popup after the assist is applied + pub fn trigger_parameter_hints(&mut self) { + self.command = Some(Command::TriggerParameterHints); + } + + /// Renames the item at the cursor position after the assist is applied + pub fn rename(&mut self) { + self.command = Some(Command::Rename); } /// Adds a tabstop snippet to place the cursor before `node` diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index 4f706e26af2..37238cc61d3 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -36,10 +36,35 @@ pub fn walk_expr(expr: &ast::Expr, cb: &mut dyn FnMut(ast::Expr)) { }) } +pub fn is_closure_or_blk_with_modif(expr: &ast::Expr) -> bool { + match expr { + ast::Expr::BlockExpr(block_expr) => { + matches!( + block_expr.modifier(), + Some( + ast::BlockModifier::Async(_) + | ast::BlockModifier::Try(_) + | ast::BlockModifier::Const(_) + ) + ) + } + ast::Expr::ClosureExpr(_) => true, + _ => false, + } +} + /// Preorder walk all the expression's child expressions preserving events. /// If the callback returns true on an [`WalkEvent::Enter`], the subtree of the expression will be skipped. /// Note that the subtree may already be skipped due to the context analysis this function does. pub fn preorder_expr(start: &ast::Expr, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool) { + preorder_expr_with_ctx_checker(start, &is_closure_or_blk_with_modif, cb); +} + +pub fn preorder_expr_with_ctx_checker( + start: &ast::Expr, + check_ctx: &dyn Fn(&ast::Expr) -> bool, + cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool, +) { let mut preorder = start.syntax().preorder(); while let Some(event) = preorder.next() { let node = match event { @@ -71,20 +96,7 @@ pub fn preorder_expr(start: &ast::Expr, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) if ast::GenericArg::can_cast(node.kind()) { preorder.skip_subtree(); } else if let Some(expr) = ast::Expr::cast(node) { - let is_different_context = match &expr { - ast::Expr::BlockExpr(block_expr) => { - matches!( - block_expr.modifier(), - Some( - ast::BlockModifier::Async(_) - | ast::BlockModifier::Try(_) - | ast::BlockModifier::Const(_) - ) - ) - } - ast::Expr::ClosureExpr(_) => true, - _ => false, - } && expr.syntax() != start.syntax(); + let is_different_context = check_ctx(&expr) && expr.syntax() != start.syntax(); let skip = cb(WalkEvent::Enter(expr)); if skip || is_different_context { preorder.skip_subtree(); @@ -277,6 +289,8 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { }); } Some(ast::BlockModifier::Unsafe(_)) => (), + Some(ast::BlockModifier::Gen(_)) => (), + Some(ast::BlockModifier::AsyncGen(_)) => (), None => (), } if let Some(stmt_list) = b.stmt_list() { @@ -392,7 +406,7 @@ fn for_each_break_expr( } } -fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool { +pub fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool { lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text()) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt index 7c01ac06939..efcf53ded64 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt @@ -20,8 +20,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -50,8 +53,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -80,8 +86,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -110,8 +119,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -140,8 +152,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -170,8 +185,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -200,8 +218,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt index f0b97779c73..9d70942199c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -18,8 +18,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, @@ -46,8 +49,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: CONST, @@ -74,8 +80,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: CONST, @@ -104,8 +113,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: ENUM, @@ -134,8 +146,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -164,8 +179,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, @@ -192,8 +210,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STATIC, @@ -222,8 +243,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -282,8 +306,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -314,8 +341,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -346,8 +376,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -376,8 +409,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -404,8 +440,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: TRAIT, @@ -434,8 +473,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -464,8 +506,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: UNION, @@ -494,8 +539,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -524,8 +572,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -554,8 +605,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -582,8 +636,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -612,8 +669,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -644,8 +704,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -672,8 +735,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -702,8 +768,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -730,8 +799,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -773,8 +845,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 0, + hir_file_id: EditionedFileId( + FileId( + 0, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -814,8 +889,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 1, + hir_file_id: EditionedFileId( + FileId( + 1, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -844,8 +922,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 1, + hir_file_id: EditionedFileId( + FileId( + 1, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -874,8 +955,11 @@ }, ), loc: DeclarationLocation { - hir_file_id: FileId( - 1, + hir_file_id: EditionedFileId( + FileId( + 1, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -904,8 +988,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 1, + hir_file_id: EditionedFileId( + FileId( + 1, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -934,8 +1021,11 @@ ), ), loc: DeclarationLocation { - hir_file_id: FileId( - 1, + hir_file_id: EditionedFileId( + FileId( + 1, + ), + Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs index ee7c448bb89..48a585bf333 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs @@ -113,8 +113,8 @@ fn assoc_item_of_trait( #[cfg(test)] mod tests { - use base_db::FilePosition; use expect_test::{expect, Expect}; + use hir::FilePosition; use hir::Semantics; use syntax::ast::{self, AstNode}; use test_fixture::ChangeFixture; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs index 46f47f258bd..5b566c5067d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs @@ -5,7 +5,10 @@ use std::iter; use hir::Semantics; -use syntax::ast::{self, make, Pat}; +use syntax::{ + ast::{self, make, Pat}, + ToSmolStr, +}; use crate::RootDatabase; @@ -26,7 +29,7 @@ impl TryEnum { _ => return None, }; TryEnum::ALL.iter().find_map(|&var| { - if enum_.name(sema.db).to_smol_str() == var.type_name() { + if enum_.name(sema.db).display_no_db().to_smolstr() == var.type_name() { return Some(var); } None diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs index a915391ad90..965f432407b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs @@ -1,7 +1,10 @@ //! Functionality for generating trivial constructors use hir::StructKind; -use syntax::ast::{make, Expr, Path}; +use syntax::{ + ast::{make, Expr, Path}, + ToSmolStr, +}; /// given a type return the trivial constructor (if one exists) pub fn use_trivial_constructor( @@ -15,7 +18,9 @@ pub fn use_trivial_constructor( if variant.kind(db) == hir::StructKind::Unit { let path = make::path_qualified( path, - make::path_segment(make::name_ref(&variant.name(db).to_smol_str())), + make::path_segment(make::name_ref( + &variant.name(db).display_no_db().to_smolstr(), + )), ); return Some(make::expr_path(path)); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs index 45fc6f8e68d..c7071d1ce47 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs @@ -1,16 +1,17 @@ //! Suggests shortening `Foo { field: field }` to `Foo { field }` in both //! expressions and patterns. -use ide_db::{ - base_db::{FileId, FileRange}, - source_change::SourceChange, -}; +use ide_db::{source_change::SourceChange, EditionedFileId, FileRange}; use syntax::{ast, match_ast, AstNode, SyntaxNode}; use text_edit::TextEdit; use crate::{fix, Diagnostic, DiagnosticCode}; -pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: &SyntaxNode) { +pub(crate) fn field_shorthand( + acc: &mut Vec<Diagnostic>, + file_id: EditionedFileId, + node: &SyntaxNode, +) { match_ast! { match node { ast::RecordExpr(it) => check_expr_field_shorthand(acc, file_id, it), @@ -22,7 +23,7 @@ pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: fn check_expr_field_shorthand( acc: &mut Vec<Diagnostic>, - file_id: FileId, + file_id: EditionedFileId, record_expr: ast::RecordExpr, ) { let record_field_list = match record_expr.record_expr_field_list() { @@ -52,7 +53,7 @@ fn check_expr_field_shorthand( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct initialization", - FileRange { file_id, range: field_range }, + FileRange { file_id: file_id.into(), range: field_range }, ) .with_fixes(Some(vec![fix( "use_expr_field_shorthand", @@ -66,7 +67,7 @@ fn check_expr_field_shorthand( fn check_pat_field_shorthand( acc: &mut Vec<Diagnostic>, - file_id: FileId, + file_id: EditionedFileId, record_pat: ast::RecordPat, ) { let record_pat_field_list = match record_pat.record_pat_field_list() { @@ -96,7 +97,7 @@ fn check_pat_field_shorthand( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct pattern", - FileRange { file_id, range: field_range }, + FileRange { file_id: file_id.into(), range: field_range }, ) .with_fixes(Some(vec![fix( "use_pat_field_shorthand", diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs index 785a42352bf..acff8116961 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -15,7 +15,7 @@ pub(crate) fn inactive_code( return None; } - let inactive = DnfExpr::new(d.cfg.clone()).why_inactive(&d.opts); + let inactive = DnfExpr::new(&d.cfg).why_inactive(&d.opts); let mut message = "code is inactive due to #[cfg] directives".to_owned(); if let Some(inactive) = inactive { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs index a0fad7c850c..18a95f0963d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -332,6 +332,7 @@ impl someStruct { check_diagnostics( r#" enum Option { Some, None } +use Option::{Some, None}; #[allow(unused)] fn main() { @@ -345,24 +346,6 @@ fn main() { } #[test] - fn non_let_bind() { - check_diagnostics( - r#" -enum Option { Some, None } - -#[allow(unused)] -fn main() { - match Option::None { - SOME_VAR @ None => (), - // ^^^^^^^^ 💡 warn: Variable `SOME_VAR` should have snake_case name, e.g. `some_var` - Some => (), - } -} -"#, - ); - } - - #[test] fn allow_attributes_crate_attr() { check_diagnostics( r#" @@ -427,7 +410,12 @@ fn qualify() { #[test] // Issue #8809. fn parenthesized_parameter() { - check_diagnostics(r#"fn f((O): _) { _ = O; }"#) + check_diagnostics( + r#" +fn f((_O): u8) {} + // ^^ 💡 warn: Variable `_O` should have snake_case name, e.g. `_o` +"#, + ) } #[test] @@ -766,4 +754,106 @@ mod Foo; "#, ) } + + #[test] + fn test_field_shorthand() { + check_diagnostics( + r#" +struct Foo { _nonSnake: u8 } + // ^^^^^^^^^ 💡 warn: Field `_nonSnake` should have snake_case name, e.g. `_non_snake` +fn func(Foo { _nonSnake }: Foo) {} +"#, + ); + } + + #[test] + fn test_match() { + check_diagnostics( + r#" +enum Foo { Variant { nonSnake1: u8 } } + // ^^^^^^^^^ 💡 warn: Field `nonSnake1` should have snake_case name, e.g. `non_snake1` +fn func() { + match (Foo::Variant { nonSnake1: 1 }) { + Foo::Variant { nonSnake1: _nonSnake2 } => {}, + // ^^^^^^^^^^ 💡 warn: Variable `_nonSnake2` should have snake_case name, e.g. `_non_snake2` + } +} +"#, + ); + + check_diagnostics( + r#" +struct Foo(u8); + +fn func() { + match Foo(1) { + Foo(_nonSnake) => {}, + // ^^^^^^^^^ 💡 warn: Variable `_nonSnake` should have snake_case name, e.g. `_non_snake` + } +} +"#, + ); + + check_diagnostics( + r#" +fn main() { + match 1 { + _Bad1 @ _Bad2 => {} + // ^^^^^ 💡 warn: Variable `_Bad1` should have snake_case name, e.g. `_bad1` + // ^^^^^ 💡 warn: Variable `_Bad2` should have snake_case name, e.g. `_bad2` + } +} +"#, + ); + check_diagnostics( + r#" +fn main() { + match 1 { _Bad1 => () } + // ^^^^^ 💡 warn: Variable `_Bad1` should have snake_case name, e.g. `_bad1` +} +"#, + ); + + check_diagnostics( + r#" +enum Foo { V1, V2 } +use Foo::V1; + +fn main() { + match V1 { + _Bad1 @ V1 => {}, + // ^^^^^ 💡 warn: Variable `_Bad1` should have snake_case name, e.g. `_bad1` + Foo::V2 => {} + } +} +"#, + ); + } + + #[test] + fn test_for_loop() { + check_diagnostics( + r#" +//- minicore: iterators +fn func() { + for _nonSnake in [] {} + // ^^^^^^^^^ 💡 warn: Variable `_nonSnake` should have snake_case name, e.g. `_non_snake` +} +"#, + ); + + check_fix( + r#" +//- minicore: iterators +fn func() { + for nonSnake$0 in [] { nonSnake; } +} +"#, + r#" +fn func() { + for non_snake in [] { non_snake; } +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index a9c0e3b7319..117088ca09c 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -3,11 +3,10 @@ use hir::{ImportPathConfig, PathResolution, Semantics}; use ide_db::{ - base_db::{FileId, FileRange}, helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, source_change::SourceChangeBuilder, - FxHashMap, RootDatabase, + EditionedFileId, FileRange, FxHashMap, RootDatabase, }; use itertools::Itertools; use stdx::{format_to, never}; @@ -102,7 +101,7 @@ impl State { pub(crate) fn json_in_items( sema: &Semantics<'_, RootDatabase>, acc: &mut Vec<Diagnostic>, - file_id: FileId, + file_id: EditionedFileId, node: &SyntaxNode, config: &DiagnosticsConfig, ) { @@ -132,7 +131,7 @@ pub(crate) fn json_in_items( Diagnostic::new( DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning), "JSON syntax is not valid as a Rust item", - FileRange { file_id, range }, + FileRange { file_id: file_id.into(), range }, ) .with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index 2cd6a71c001..e59b63f288d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -7,7 +7,10 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> // Use more accurate position if available. let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); Diagnostic::new( - DiagnosticCode::Ra("macro-error", Severity::Error), + DiagnosticCode::Ra( + "macro-error", + if d.error { Severity::Error } else { Severity::WeakWarning }, + ), d.message.clone(), display_range, ) @@ -45,7 +48,7 @@ macro_rules! include { () => {} } macro_rules! compile_error { () => {} } include!("doesntexist"); -//^^^^^^^ error: failed to load file `doesntexist` + //^^^^^^^^^^^^^ error: failed to load file `doesntexist` compile_error!("compile_error macro works"); //^^^^^^^^^^^^^ error: compile_error macro works @@ -125,7 +128,7 @@ macro_rules! env { () => {} } macro_rules! concat { () => {} } include!(concat!(env!("OUT_DIR"), "/out.rs")); -//^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix + //^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix "#, ); } @@ -160,20 +163,25 @@ macro_rules! include {} #[rustc_builtin_macro] macro_rules! compile_error {} +#[rustc_builtin_macro] +macro_rules! concat {} fn main() { // Test a handful of built-in (eager) macros: include!(invalid); - //^^^^^^^ error: could not convert tokens + //^^^^^^^ error: expected string literal include!("does not exist"); - //^^^^^^^ error: failed to load file `does not exist` + //^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` + + include!(concat!("does ", "not ", "exist")); + //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` env!(invalid); - //^^^ error: could not convert tokens + //^^^^^^^ error: expected string literal env!("OUT_DIR"); - //^^^ error: `OUT_DIR` not set, enable "build scripts" to fix + //^^^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix compile_error!("compile_error works"); //^^^^^^^^^^^^^ error: compile_error works @@ -198,7 +206,7 @@ fn f() { m!(); m!(hi); - //^ error: leftover tokens + //^ error: leftover tokens } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 41c762c85b2..56ec45c8984 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -1,6 +1,6 @@ use either::Either; use hir::InFile; -use ide_db::base_db::FileRange; +use ide_db::FileRange; use syntax::{ ast::{self, HasArgList}, AstNode, AstPtr, @@ -257,6 +257,75 @@ impl Foo { } #[test] + fn rest_pat_in_macro_expansion() { + check_diagnostics( + r#" +// issue #17292 +#![allow(dead_code)] + +macro_rules! replace_with_2_dots { + ( $( $input:tt )* ) => { + .. + }; +} + +macro_rules! enum_str { + ( + $( + $variant:ident ( + $( $tfield:ty ),* + ) + ) + , + * + ) => { + enum Foo { + $( + $variant ( $( $tfield ),* ), + )* + } + + impl Foo { + fn variant_name_as_str(&self) -> &str { + match self { + $( + Self::$variant ( replace_with_2_dots!( $( $tfield ),* ) ) + => "", + )* + } + } + } + }; +} + +enum_str! { + TupleVariant1(i32), + TupleVariant2(), + TupleVariant3(i8,u8,i128) +} +"#, + ); + + check_diagnostics( + r#" +#![allow(dead_code)] +macro_rules! two_dots1 { + () => { .. }; +} + +macro_rules! two_dots2 { + () => { two_dots1!() }; +} + +fn test() { + let (_, _, two_dots1!()) = ((), 42); + let (_, two_dots2!(), _) = (1, true, 2, false, (), (), 3); +} +"#, + ); + } + + #[test] fn varargs() { check_diagnostics( r#" diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 6a809cb0cef..ea7908525ae 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -1,7 +1,7 @@ use either::Either; use hir::{ db::{ExpandDatabase, HirDatabase}, - known, AssocItem, HirDisplay, HirFileIdExt, ImportPathConfig, InFile, Type, + sym, AssocItem, HirDisplay, HirFileIdExt, ImportPathConfig, InFile, Type, }; use ide_db::{ assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, @@ -11,7 +11,7 @@ use stdx::format_to; use syntax::{ algo, ast::{self, make}, - AstNode, SyntaxNode, SyntaxNodePtr, + AstNode, SyntaxNode, SyntaxNodePtr, ToSmolStr, }; use text_edit::TextEdit; @@ -146,7 +146,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass } }; let field = make::record_expr_field( - make::name_ref(&f.name(ctx.sema.db).to_smol_str()), + make::name_ref(&f.name(ctx.sema.db).display_no_db().to_smolstr()), field_expr, ); new_field_list.add_field(field.clone_for_update()); @@ -160,7 +160,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass let new_field_list = old_field_list.clone_for_update(); for (f, _) in missing_fields.iter() { let field = make::record_pat_field_shorthand(make::name_ref( - &f.name(ctx.sema.db).to_smol_str(), + &f.name(ctx.sema.db).display_no_db().to_smolstr(), )); new_field_list.add_field(field.clone_for_update()); } @@ -210,7 +210,7 @@ fn get_default_constructor( let has_new_func = ty .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| { if let AssocItem::Function(func) = assoc_item { - if func.name(ctx.sema.db) == known::new + if func.name(ctx.sema.db) == sym::new.clone() && func.assoc_fn_params(ctx.sema.db).is_empty() { return Some(()); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 6d0119fb57c..97296278c3a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -608,8 +608,8 @@ fn main() { // `Never` is deliberately not defined so that it's an uninferred type. // We ignore these to avoid triggering bugs in the analysis. match Option::<Never>::None { - None => (), - Some(never) => match never {}, + Option::None => (), + Option::Some(never) => match never {}, } match Option::<Never>::None { Option::Some(_never) => {}, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs index 5a3206445c5..dfadef11fde 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -1,6 +1,6 @@ use either::Either; use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics, VariantId}; -use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase}; +use ide_db::{source_change::SourceChange, EditionedFileId, RootDatabase}; use syntax::{ ast::{self, edit::IndentLevel, make}, AstNode, @@ -51,7 +51,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assis fn missing_record_expr_field_fixes( sema: &Semantics<'_, RootDatabase>, - usage_file_id: FileId, + usage_file_id: EditionedFileId, record_expr_field: &ast::RecordExprField, ) -> Option<Vec<Assist>> { let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index d831878044d..62bc1f3d06f 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -1,5 +1,5 @@ -use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn}; -use ide_db::{assists::Assist, base_db::FileRange, source_change::SourceChange}; +use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn, FileRange}; +use ide_db::{assists::Assist, source_change::SourceChange}; use syntax::{ast, AstNode}; use text_edit::TextEdit; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index a470ce72fc3..6d756484ebc 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -4,6 +4,7 @@ use ide_db::{ label::Label, source_change::SourceChangeBuilder, }; +use syntax::ToSmolStr; use text_edit::TextRange; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; @@ -21,7 +22,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let assoc_item = d.assoc_item.1; let default_range = d.impl_.syntax_node_ptr().text_range(); - let trait_name = d.trait_.name(db).to_smol_str(); + let trait_name = d.trait_.name(db).display_no_db().to_smolstr(); let (redundant_item_name, diagnostic_range, redundant_item_def) = match assoc_item { hir::AssocItem::Function(id) => { @@ -45,7 +46,10 @@ pub(crate) fn trait_impl_redundant_assoc_item( ( format!("`type {redundant_assoc_item_name}`"), type_alias.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), - format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()), + format!( + "\n type {};", + type_alias.name(ctx.sema.db).display_no_db().to_smolstr() + ), ) } }; @@ -98,7 +102,7 @@ fn quickfix_for_redundant_assoc_item( group: None, target: range, source_change: Some(source_change_builder.finish()), - trigger_signature_help: false, + command: None, }]) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 4c255322280..4e52d28051b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -748,4 +748,16 @@ fn f() { "#, ); } + + #[test] + fn regression_17585() { + check_diagnostics( + r#" +fn f() { + let (_, _, _, ..) = (true, 42); + // ^^^^^^^^^^^^^ error: expected (bool, i32), found (bool, i32, {unknown}) +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs index 4f04267adb1..b4a566e3188 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -75,14 +75,14 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist> .unique() .map(|code| Assist { id: AssistId("typed-hole", AssistKind::QuickFix), - label: Label::new(format!("Replace `_` with `{}`", &code)), + label: Label::new(format!("Replace `_` with `{code}`")), group: Some(GroupLabel("Replace `_` with a term".to_owned())), target: original_range.range, source_change: Some(SourceChange::from_text_edit( original_range.file_id, TextEdit::replace(original_range.range, code), )), - trigger_signature_help: false, + command: None, }) .collect(); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index 77ffd0fd968..1b71a3a3e69 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -4,14 +4,14 @@ use std::iter; use hir::{db::DefDatabase, DefMap, InFile, ModuleSource}; use ide_db::{ - base_db::{FileId, FileLoader, FileRange, SourceDatabase, SourceDatabaseExt}, + base_db::{FileLoader, SourceDatabaseExt}, source_change::SourceChange, - RootDatabase, + FileId, FileRange, LineIndexDatabase, }; use paths::Utf8Component; use syntax::{ ast::{self, edit::IndentLevel, HasModuleItem, HasName}, - AstNode, TextRange, + AstNode, TextRange, ToSmolStr, }; use text_edit::TextEdit; @@ -26,7 +26,8 @@ pub(crate) fn unlinked_file( acc: &mut Vec<Diagnostic>, file_id: FileId, ) { - let fixes = fixes(ctx, file_id); + let mut range = TextRange::up_to(ctx.sema.db.line_index(file_id).len()); + let fixes = fixes(ctx, file_id, range); // FIXME: This is a hack for the vscode extension to notice whether there is an autofix or not before having to resolve diagnostics. // This is to prevent project linking popups from appearing when there is an autofix. https://github.com/rust-lang/rust-analyzer/issues/14523 let message = if fixes.is_none() { @@ -37,7 +38,6 @@ pub(crate) fn unlinked_file( let message = format!("{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings."); - let mut range = ctx.sema.db.parse(file_id).syntax_node().text_range(); let mut unused = true; if fixes.is_none() { @@ -70,7 +70,11 @@ pub(crate) fn unlinked_file( ); } -fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { +fn fixes( + ctx: &DiagnosticsContext<'_>, + file_id: FileId, + trigger_range: TextRange, +) -> Option<Vec<Assist>> { // If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file, // suggest that as a fix. @@ -94,7 +98,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { let root_module = &crate_def_map[DefMap::ROOT]; let Some(root_file_id) = root_module.origin.file_id() else { continue }; - let Some(crate_root_path) = source_root.path_for_file(&root_file_id) else { continue }; + let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id()) else { + continue; + }; let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue }; // try resolving the relative difference of the paths as inline modules @@ -106,7 +112,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { // shouldn't occur _ => continue 'crates, }; - match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { + match current.children.iter().find(|(name, _)| name.display_no_db().to_smolstr() == seg) + { Some((_, &child)) => current = &crate_def_map[child], None => continue 'crates, } @@ -118,7 +125,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { let InFile { file_id: parent_file_id, value: source } = current.definition_source(ctx.sema.db); let parent_file_id = parent_file_id.file_id()?; - return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); + return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range); } // if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible @@ -138,25 +145,28 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { 'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { let crate_def_map = ctx.sema.db.crate_def_map(krate); let Some((_, module)) = crate_def_map.modules().find(|(_, module)| { - module.origin.file_id() == Some(parent_id) && !module.origin.is_inline() + module.origin.file_id().map(Into::into) == Some(parent_id) && !module.origin.is_inline() }) else { continue; }; if stack.is_empty() { return make_fixes( - ctx.sema.db, parent_id, module.definition_source(ctx.sema.db).value, &module_name, - file_id, + trigger_range, ); } else { // direct parent file is missing, // try finding a parent that has an inline tree from here on let mut current = module; for s in stack.iter().rev() { - match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { + match module + .children + .iter() + .find(|(name, _)| name.display_no_db().to_smolstr() == s) + { Some((_, child)) => { current = &crate_def_map[*child]; } @@ -169,7 +179,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { let InFile { file_id: parent_file_id, value: source } = current.definition_source(ctx.sema.db); let parent_file_id = parent_file_id.file_id()?; - return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); + return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range); } } @@ -177,11 +187,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { } fn make_fixes( - db: &RootDatabase, parent_file_id: FileId, source: ModuleSource, new_mod_name: &str, - added_file_id: FileId, + trigger_range: TextRange, ) -> Option<Vec<Assist>> { fn is_outline_mod(item: &ast::Item) -> bool { matches!(item, ast::Item::Module(m) if m.item_list().is_none()) @@ -252,7 +261,6 @@ fn make_fixes( } } - let trigger_range = db.parse(added_file_id).tree().syntax().text_range(); Some(vec![ fix( "add_mod_declaration", diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 41357b59622..eb8eea69f67 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -1,9 +1,8 @@ use std::iter; -use hir::{db::ExpandDatabase, Adt, HasSource, HirDisplay, InFile, Struct, Union}; +use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union}; use ide_db::{ assists::{Assist, AssistId, AssistKind}, - base_db::FileRange, helpers::is_editable_crate, label::Label, source_change::{SourceChange, SourceChangeBuilder}, @@ -76,7 +75,7 @@ fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<A let expr = d.expr.value.to_node(&root); let error_range = ctx.sema.original_range_opt(expr.syntax())?; - let field_name = d.name.as_str()?; + let field_name = d.name.as_str(); // Convert the receiver to an ADT let adt = d.receiver.strip_references().as_adt()?; let target_module = adt.module(ctx.sema.db); @@ -130,7 +129,7 @@ fn add_variant_to_union( group: None, target: error_range.range, source_change: Some(src_change_builder.finish()), - trigger_signature_help: false, + command: None, }) } @@ -153,7 +152,12 @@ fn add_field_to_struct_fix( } else { Some(make::visibility_pub_crate()) }; - let field_name = make::name(field_name); + + let field_name = match field_name.chars().next() { + Some(ch) if ch.is_numeric() => return None, + Some(_) => make::name(field_name), + None => return None, + }; let (offset, record_field) = record_field_layout( visibility, @@ -173,13 +177,18 @@ fn add_field_to_struct_fix( group: None, target: error_range.range, source_change: Some(src_change_builder.finish()), - trigger_signature_help: false, + command: None, }) } None => { // Add a field list to the Unit Struct let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id); - let field_name = make::name(field_name); + let field_name = match field_name.chars().next() { + // FIXME : See match arm below regarding tuple structs. + Some(ch) if ch.is_numeric() => return None, + Some(_) => make::name(field_name), + None => return None, + }; let visibility = if error_range.file_id == struct_range.file_id { None } else { @@ -204,7 +213,7 @@ fn add_field_to_struct_fix( group: None, target: error_range.range, source_change: Some(src_change_builder.finish()), - trigger_signature_help: false, + command: None, }) } Some(FieldList::TupleFieldList(_tuple)) => { @@ -266,7 +275,7 @@ fn method_fix( file_id, TextEdit::insert(range.end(), "()".to_owned()), )), - trigger_signature_help: false, + command: None, }) } #[cfg(test)] @@ -275,7 +284,7 @@ mod tests { use crate::{ tests::{ check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, - check_fix, + check_fix, check_no_fix, }, DiagnosticsConfig, }; @@ -460,4 +469,36 @@ fn foo() { "#, ); } + + #[test] + fn no_fix_when_indexed() { + check_no_fix( + r#" + struct Kek {} +impl Kek { + pub fn foo(self) { + self.$00 + } +} + +fn main() {} + "#, + ) + } + + #[test] + fn no_fix_when_without_field() { + check_no_fix( + r#" + struct Kek {} +impl Kek { + pub fn foo(self) { + self.$0 + } +} + +fn main() {} + "#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 42211cdbe5d..387d56b890b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -1,13 +1,12 @@ -use hir::{db::ExpandDatabase, AssocItem, HirDisplay, InFile}; +use hir::{db::ExpandDatabase, AssocItem, FileRange, HirDisplay, InFile}; use ide_db::{ assists::{Assist, AssistId, AssistKind}, - base_db::FileRange, label::Label, source_change::SourceChange, }; use syntax::{ ast::{self, make, HasArgList}, - AstNode, SmolStr, TextRange, + format_smolstr, AstNode, SmolStr, TextRange, ToSmolStr, }; use text_edit::TextEdit; @@ -105,10 +104,10 @@ fn field_fix( group: None, target: range, source_change: Some(SourceChange::from_iter([ - (file_id, TextEdit::insert(range.start(), "(".to_owned())), - (file_id, TextEdit::insert(range.end(), ")".to_owned())), + (file_id.into(), TextEdit::insert(range.start(), "(".to_owned())), + (file_id.into(), TextEdit::insert(range.end(), ")".to_owned())), ])), - trigger_signature_help: false, + command: None, }) } @@ -154,14 +153,16 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) - _ => false, }; - let mut receiver_type_adt_name = receiver_type.as_adt()?.name(db).to_smol_str().to_string(); + let mut receiver_type_adt_name = + receiver_type.as_adt()?.name(db).display_no_db().to_smolstr(); let generic_parameters: Vec<SmolStr> = receiver_type.generic_parameters(db).collect(); // if receiver should be pass as first arg in the assoc func, // we could omit generic parameters cause compiler can deduce it automatically if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() { let generic_parameters = generic_parameters.join(", "); - receiver_type_adt_name = format!("{receiver_type_adt_name}::<{generic_parameters}>"); + receiver_type_adt_name = + format_smolstr!("{receiver_type_adt_name}::<{generic_parameters}>"); } let method_name = call.name_ref()?; @@ -191,7 +192,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) - file_id, TextEdit::replace(range, assoc_func_call_expr_string), )), - trigger_signature_help: false, + command: None, }) } else { None diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 11556883249..2bd8e484f85 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec< &format!("Create module at `{candidate}`"), FileSystemEdit::CreateFile { dst: AnchoredPathBuf { - anchor: d.decl.file_id.original_file(ctx.sema.db), + anchor: d.decl.file_id.original_file(ctx.sema.db).file_id(), path: candidate.clone(), }, initial_contents: "".to_owned(), diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs deleted file mode 100644 index 7ea50c496fb..00000000000 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ /dev/null @@ -1,45 +0,0 @@ -use hir::db::DefDatabase; - -use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; - -// Diagnostic: unresolved-proc-macro -// -// This diagnostic is shown when a procedural macro can not be found. This usually means that -// procedural macro support is simply disabled (and hence is only a weak hint instead of an error), -// but can also indicate project setup problems. -// -// If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the -// `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can -// enable support for procedural macros (see `rust-analyzer.procMacro.attributes.enable`). -pub(crate) fn unresolved_proc_macro( - ctx: &DiagnosticsContext<'_>, - d: &hir::UnresolvedProcMacro, - proc_macros_enabled: bool, - proc_attr_macros_enabled: bool, -) -> Diagnostic { - // Use more accurate position if available. - let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); - - let config_enabled = match d.kind { - hir::MacroKind::Attr => proc_macros_enabled && proc_attr_macros_enabled, - _ => proc_macros_enabled, - }; - - let not_expanded_message = match &d.macro_name { - Some(name) => format!("proc macro `{name}` not expanded"), - None => "proc macro not expanded".to_owned(), - }; - let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning }; - let def_map = ctx.sema.db.crate_def_map(d.krate); - let message = if config_enabled { - def_map.proc_macro_loading_error().unwrap_or("internal error") - } else { - match d.kind { - hir::MacroKind::Attr if proc_macros_enabled => "attribute macro expansion is disabled", - _ => "proc-macro expansion is disabled", - } - }; - let message = format!("{not_expanded_message}: {message}"); - - Diagnostic::new(DiagnosticCode::Ra("unresolved-proc-macro", severity), message, display_range) -} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs index fdd4e862caf..bf19331d9fd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs @@ -1,10 +1,9 @@ use hir::Name; use ide_db::{ assists::{Assist, AssistId, AssistKind}, - base_db::FileRange, label::Label, source_change::SourceChange, - RootDatabase, + FileRange, RootDatabase, }; use syntax::TextRange; use text_edit::TextEdit; @@ -43,7 +42,7 @@ pub(crate) fn unused_variables( ast, ) .with_fixes(name_range.and_then(|it| { - fixes(ctx.sema.db, var_name, it.range, diagnostic_range, ast.file_id.is_macro()) + fixes(ctx.sema.db, var_name, it.range, diagnostic_range.into(), ast.file_id.is_macro()) })) .experimental(), ) @@ -73,7 +72,7 @@ fn fixes( diagnostic_range.file_id, TextEdit::replace(name_range, format!("_{}", var_name.display(db))), )), - trigger_signature_help: false, + command: None, }]) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs index 79bcaa0a9c4..2d380ae0457 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -1,8 +1,5 @@ use hir::InFile; -use ide_db::{ - base_db::{FileId, FileRange}, - source_change::SourceChange, -}; +use ide_db::{source_change::SourceChange, EditionedFileId, FileRange}; use itertools::Itertools; use syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; use text_edit::TextEdit; @@ -14,7 +11,7 @@ use crate::{fix, Diagnostic, DiagnosticCode}; // Diagnostic for unnecessary braces in `use` items. pub(crate) fn useless_braces( acc: &mut Vec<Diagnostic>, - file_id: FileId, + file_id: EditionedFileId, node: &SyntaxNode, ) -> Option<()> { let use_tree_list = ast::UseTreeList::cast(node.clone())?; @@ -41,7 +38,7 @@ pub(crate) fn useless_braces( Diagnostic::new( DiagnosticCode::RustcLint("unused_braces"), "Unnecessary braces in use statement".to_owned(), - FileRange { file_id, range: use_range }, + FileRange { file_id: file_id.into(), range: use_range }, ) .with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node))) .with_fixes(Some(vec![fix( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 6d1226d65c5..263ab747559 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -62,7 +62,6 @@ mod handlers { pub(crate) mod unresolved_macro_call; pub(crate) mod unresolved_method; pub(crate) mod unresolved_module; - pub(crate) mod unresolved_proc_macro; pub(crate) mod unused_variables; // The handlers below are unusual, the implement the diagnostics as well. @@ -78,13 +77,13 @@ mod tests; use hir::{diagnostics::AnyDiagnostic, InFile, Semantics}; use ide_db::{ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy}, - base_db::{FileId, FileRange, SourceDatabase}, + base_db::SourceDatabase, generated::lints::{LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS}, imports::insert_use::InsertUseConfig, label::Label, source_change::SourceChange, syntax_helpers::node_ext::parse_tt_as_comma_sep_paths, - FxHashMap, FxHashSet, RootDatabase, SnippetCap, + EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, SnippetCap, }; use once_cell::sync::Lazy; use stdx::never; @@ -144,12 +143,16 @@ pub struct Diagnostic { } impl Diagnostic { - fn new(code: DiagnosticCode, message: impl Into<String>, range: FileRange) -> Diagnostic { + fn new( + code: DiagnosticCode, + message: impl Into<String>, + range: impl Into<FileRange>, + ) -> Diagnostic { let message = message.into(); Diagnostic { code, message, - range, + range: range.into(), severity: match code { DiagnosticCode::RustcHardError(_) => Severity::Error, // FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings. @@ -290,6 +293,7 @@ impl DiagnosticsContext<'_> { } })() .unwrap_or_else(|| sema.diagnostics_display_range(*node)) + .into() } } @@ -303,6 +307,9 @@ pub fn diagnostics( ) -> Vec<Diagnostic> { let _p = tracing::info_span!("diagnostics").entered(); let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let mut res = Vec::new(); // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. @@ -310,7 +317,7 @@ pub fn diagnostics( Diagnostic::new( DiagnosticCode::RustcHardError("syntax-error"), format!("Syntax Error: {err}"), - FileRange { file_id, range: err.range() }, + FileRange { file_id: file_id.into(), range: err.range() }, ) })); let parse_errors = res.len(); @@ -336,7 +343,7 @@ pub fn diagnostics( // file, so we skip semantic diagnostics so we can show these faster. Some(m) if parse_errors < 16 => m.diagnostics(db, &mut diags, config.style_lints), Some(_) => (), - None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id), + None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()), } for diag in diags { @@ -397,7 +404,6 @@ pub fn diagnostics( AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d), AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d), AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d), - AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled), AnyDiagnostic::UnusedMut(d) => match handlers::mutability_errors::unused_mut(&ctx, &d) { Some(it) => it, None => continue, @@ -613,7 +619,7 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist { group: None, target, source_change: None, - trigger_signature_help: false, + command: None, } } @@ -627,4 +633,5 @@ fn adjusted_display_range<N: AstNode>( diag_ptr .with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range())) .original_node_file_range_rooted(ctx.sema.db) + .into() } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index cd5e95cc1e3..e56fca1e500 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -60,7 +60,7 @@ fn check_nth_fix_with_config( let (db, file_position) = RootDatabase::with_position(ra_fixture_before); let diagnostic = - super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id) + super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id.into()) .pop() .expect("no diagnostics"); let fix = &diagnostic @@ -102,34 +102,37 @@ pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture_before); let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; - let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) - .into_iter() - .find(|d| { - d.fixes - .as_ref() - .and_then(|fixes| { - fixes.iter().find(|fix| { - if !fix.target.contains_inclusive(file_position.offset) { - return false; - } - let actual = { - let source_change = fix.source_change.as_ref().unwrap(); - let file_id = *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let fix = + super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into()) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = + *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); - for (edit, snippet_edit) in source_change.source_file_edits.values() { - edit.apply(&mut actual); - if let Some(snippet_edit) = snippet_edit { - snippet_edit.apply(&mut actual); + for (edit, snippet_edit) in source_change.source_file_edits.values() + { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } } - } - actual - }; - after == actual + actual + }; + after == actual + }) }) - }) - .is_some() - }); + .is_some() + }); assert!(fix.is_some(), "no diagnostic with desired fix"); } @@ -141,35 +144,38 @@ pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &s let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; let mut n_fixes = 0; - let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) - .into_iter() - .find(|d| { - d.fixes - .as_ref() - .and_then(|fixes| { - n_fixes += fixes.len(); - fixes.iter().find(|fix| { - if !fix.target.contains_inclusive(file_position.offset) { - return false; - } - let actual = { - let source_change = fix.source_change.as_ref().unwrap(); - let file_id = *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let fix = + super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into()) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + n_fixes += fixes.len(); + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = + *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); - for (edit, snippet_edit) in source_change.source_file_edits.values() { - edit.apply(&mut actual); - if let Some(snippet_edit) = snippet_edit { - snippet_edit.apply(&mut actual); + for (edit, snippet_edit) in source_change.source_file_edits.values() + { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } } - } - actual - }; - after == actual + actual + }; + after == actual + }) }) - }) - .is_some() - }); + .is_some() + }); assert!(fix.is_some(), "no diagnostic with desired fix"); assert!(n_fixes == 1, "Too many fixes suggested"); } @@ -181,7 +187,7 @@ pub(crate) fn check_no_fix(ra_fixture: &str) { &db, &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, - file_position.file_id, + file_position.file_id.into(), ) .pop() .unwrap(); @@ -209,8 +215,9 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur .iter() .copied() .flat_map(|file_id| { - super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id).into_iter().map( - |d| { + super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into()) + .into_iter() + .map(|d| { let mut annotation = String::new(); if let Some(fixes) = &d.fixes { assert!(!fixes.is_empty()); @@ -225,12 +232,12 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur annotation.push_str(": "); annotation.push_str(&d.message); (d.range, annotation) - }, - ) + }) }) .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation))) .into_group_map(); for file_id in files { + let file_id = file_id.into(); let line_index = db.line_index(file_id); let mut actual = annotations.remove(&file_id).unwrap_or_default(); @@ -268,6 +275,7 @@ fn test_disabled_diagnostics() { config.disabled.insert("E0583".into()); let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); + let file_id = file_id.into(); let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); assert!(diagnostics.is_empty()); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs index 5b6e016251b..a14e69030e3 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs @@ -1,10 +1,7 @@ //! This module allows building an SSR MatchFinder by parsing the SSR rule //! from a comment. -use ide_db::{ - base_db::{FilePosition, FileRange, SourceDatabase}, - RootDatabase, -}; +use ide_db::{base_db::SourceDatabase, EditionedFileId, FilePosition, FileRange, RootDatabase}; use syntax::{ ast::{self, AstNode, AstToken}, TextRange, @@ -20,7 +17,7 @@ pub fn ssr_from_comment( frange: FileRange, ) -> Option<(MatchFinder<'_>, TextRange)> { let comment = { - let file = db.parse(frange.file_id); + let file = db.parse(EditionedFileId::current_edition(frange.file_id)); file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast) }?; let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap(); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index 407433ed192..e62ef604336 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -83,9 +83,8 @@ mod tests; pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Match}; use crate::{errors::bail, matching::MatchFailureReason}; -use hir::Semantics; -use ide_db::base_db::{FileId, FilePosition, FileRange}; -use nohash_hasher::IntMap; +use hir::{FileRange, Semantics}; +use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase}; use resolving::ResolvedRule; use syntax::{ast, AstNode, SyntaxNode, TextRange}; use text_edit::TextEdit; @@ -116,21 +115,27 @@ pub struct MatchFinder<'db> { sema: Semantics<'db, ide_db::RootDatabase>, rules: Vec<ResolvedRule>, resolution_scope: resolving::ResolutionScope<'db>, - restrict_ranges: Vec<FileRange>, + restrict_ranges: Vec<ide_db::FileRange>, } impl<'db> MatchFinder<'db> { /// Constructs a new instance where names will be looked up as if they appeared at /// `lookup_context`. pub fn in_context( - db: &'db ide_db::RootDatabase, - lookup_context: FilePosition, - mut restrict_ranges: Vec<FileRange>, + db: &'db RootDatabase, + lookup_context: ide_db::FilePosition, + mut restrict_ranges: Vec<ide_db::FileRange>, ) -> Result<MatchFinder<'db>, SsrError> { restrict_ranges.retain(|range| !range.range.is_empty()); let sema = Semantics::new(db); - let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context) - .ok_or_else(|| SsrError("no resolution scope for file".into()))?; + let file_id = sema + .attach_first_edition(lookup_context.file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(lookup_context.file_id)); + let resolution_scope = resolving::ResolutionScope::new( + &sema, + hir::FilePosition { file_id, offset: lookup_context.offset }, + ) + .ok_or_else(|| SsrError("no resolution scope for file".into()))?; Ok(MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges }) } @@ -143,7 +148,7 @@ impl<'db> MatchFinder<'db> { { MatchFinder::in_context( db, - FilePosition { file_id: first_file_id, offset: 0.into() }, + ide_db::FilePosition { file_id: first_file_id, offset: 0.into() }, vec![], ) } else { @@ -166,12 +171,12 @@ impl<'db> MatchFinder<'db> { } /// Finds matches for all added rules and returns edits for all found matches. - pub fn edits(&self) -> IntMap<FileId, TextEdit> { + pub fn edits(&self) -> FxHashMap<FileId, TextEdit> { use ide_db::base_db::SourceDatabaseExt; - let mut matches_by_file = IntMap::default(); + let mut matches_by_file = FxHashMap::default(); for m in self.matches().matches { matches_by_file - .entry(m.range.file_id) + .entry(m.range.file_id.file_id()) .or_insert_with(SsrMatches::default) .matches .push(m); @@ -218,11 +223,15 @@ impl<'db> MatchFinder<'db> { /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match /// them, while recording reasons why they don't match. This API is useful for command /// line-based debugging where providing a range is difficult. - pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> { + pub fn debug_where_text_equal( + &self, + file_id: EditionedFileId, + snippet: &str, + ) -> Vec<MatchDebugInfo> { use ide_db::base_db::SourceDatabaseExt; let file = self.sema.parse(file_id); let mut res = Vec::new(); - let file_text = self.sema.db.file_text(file_id); + let file_text = self.sema.db.file_text(file_id.into()); let mut remaining_text = &*file_text; let mut base = 0; let len = snippet.len() as u32; @@ -349,7 +358,7 @@ impl std::error::Error for SsrError {} #[cfg(test)] impl MatchDebugInfo { - pub(crate) fn match_failure_reason(&self) -> Option<&str> { + pub fn match_failure_reason(&self) -> Option<&str> { self.matched.as_ref().err().map(|r| r.reason.as_str()) } } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs index 7c357b3c217..5f6d77c064c 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs @@ -6,8 +6,8 @@ use crate::{ resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, SsrMatches, }; -use hir::{ImportPathConfig, Semantics}; -use ide_db::{base_db::FileRange, FxHashMap}; +use hir::{FileRange, ImportPathConfig, Semantics}; +use ide_db::FxHashMap; use std::{cell::Cell, iter::Peekable}; use syntax::{ ast::{self, AstNode, AstToken, HasGenericArgs}, @@ -801,7 +801,12 @@ mod tests { let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }"; let (db, position, selections) = crate::tests::single_file(input); - let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap(); + let mut match_finder = MatchFinder::in_context( + &db, + position.into(), + selections.into_iter().map(Into::into).collect(), + ) + .unwrap(); match_finder.add_rule(rule).unwrap(); let matches = match_finder.matches(); assert_eq!(matches.matches.len(), 1); @@ -810,7 +815,7 @@ mod tests { let edits = match_finder.edits(); assert_eq!(edits.len(), 1); - let edit = &edits[&position.file_id]; + let edit = &edits[&position.file_id.into()]; let mut after = input.to_owned(); edit.apply(&mut after); assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }"); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs index 2f91271c465..e752ee3d775 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs @@ -255,7 +255,7 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { } fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> { - let lexed = parser::LexedStr::new(source); + let lexed = parser::LexedStr::new(parser::Edition::CURRENT, source); if let Some((_, first_error)) = lexed.errors().next() { bail!("Failed to parse pattern: {}", first_error); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs index d3c1af1f31e..270ee0b3ec9 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs @@ -1,7 +1,7 @@ //! This module is responsible for resolving paths within rules. use hir::AsAssocItem; -use ide_db::{base_db::FilePosition, FxHashMap}; +use ide_db::FxHashMap; use parsing::Placeholder; use syntax::{ ast::{self, HasGenericArgs}, @@ -195,7 +195,7 @@ impl Resolver<'_, '_> { impl<'db> ResolutionScope<'db> { pub(crate) fn new( sema: &hir::Semantics<'db, ide_db::RootDatabase>, - resolve_context: FilePosition, + resolve_context: hir::FilePosition, ) -> Option<ResolutionScope<'db>> { use syntax::ast::AstNode; let file = sema.parse(resolve_context.file_id); @@ -238,7 +238,7 @@ impl<'db> ResolutionScope<'db> { None, |assoc_item| { let item_name = assoc_item.name(self.scope.db)?; - if item_name.to_smol_str().as_str() == name.text() { + if item_name.as_str() == name.text() { Some(hir::PathResolution::Def(assoc_item.into())) } else { None diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index 55a49da2424..832386685d7 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -5,11 +5,11 @@ use crate::{ resolving::{ResolvedPath, ResolvedPattern, ResolvedRule}, Match, MatchFinder, }; +use hir::FileRange; use ide_db::{ - base_db::{FileId, FileRange}, defs::Definition, search::{SearchScope, UsageSearchResult}, - FxHashSet, + EditionedFileId, FileId, FxHashSet, }; use syntax::{ast, AstNode, SyntaxKind, SyntaxNode}; @@ -136,14 +136,18 @@ impl MatchFinder<'_> { // seems to get put into a single source root. let mut files = Vec::new(); self.search_files_do(|file_id| { - files.push(file_id); + files.push( + self.sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)), + ); }); SearchScope::files(&files) } fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) { self.search_files_do(|file_id| { - let file = self.sema.parse(file_id); + let file = self.sema.parse_guess_edition(file_id); let code = file.syntax(); self.slow_scan_node(code, rule, &None, matches_out); }) diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs index e608b0a7c42..4477a268b29 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs @@ -1,7 +1,8 @@ use expect_test::{expect, Expect}; +use hir::{FilePosition, FileRange}; use ide_db::{ - base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt}, - FxHashSet, + base_db::{salsa::Durability, SourceDatabaseExt}, + EditionedFileId, FxHashSet, }; use test_utils::RangeOrOffset; use triomphe::Arc; @@ -97,7 +98,12 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) { fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { let (db, position, selections) = single_file(input); - let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap(); + let mut match_finder = MatchFinder::in_context( + &db, + position.into(), + selections.into_iter().map(Into::into).collect(), + ) + .unwrap(); for rule in rules { let rule: SsrRule = rule.parse().unwrap(); match_finder.add_rule(rule).unwrap(); @@ -108,13 +114,13 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { } // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters // stuff. - let mut actual = db.file_text(position.file_id).to_string(); - edits[&position.file_id].apply(&mut actual); + let mut actual = db.file_text(position.file_id.into()).to_string(); + edits[&position.file_id.into()].apply(&mut actual); expected.assert_eq(&actual); } #[allow(clippy::print_stdout)] -fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) { +fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: EditionedFileId, snippet: &str) { let debug_info = match_finder.debug_where_text_equal(file_id, snippet); println!( "Match debug info: {} nodes had text exactly equal to '{}'", @@ -128,7 +134,12 @@ fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snipp fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { let (db, position, selections) = single_file(code); - let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap(); + let mut match_finder = MatchFinder::in_context( + &db, + position.into(), + selections.into_iter().map(Into::into).collect(), + ) + .unwrap(); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); let matched_strings: Vec<String> = match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); @@ -140,7 +151,12 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { fn assert_no_match(pattern: &str, code: &str) { let (db, position, selections) = single_file(code); - let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap(); + let mut match_finder = MatchFinder::in_context( + &db, + position.into(), + selections.into_iter().map(Into::into).collect(), + ) + .unwrap(); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); let matches = match_finder.matches().flattened().matches; if !matches.is_empty() { @@ -151,7 +167,12 @@ fn assert_no_match(pattern: &str, code: &str) { fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) { let (db, position, selections) = single_file(code); - let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap(); + let mut match_finder = MatchFinder::in_context( + &db, + position.into(), + selections.into_iter().map(Into::into).collect(), + ) + .unwrap(); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); let mut reasons = Vec::new(); for d in match_finder.debug_where_text_equal(position.file_id, snippet) { @@ -452,7 +473,7 @@ fn match_struct_instantiation() { fn match_path() { let code = r#" mod foo { - pub fn bar() {} + pub(crate) fn bar() {} } fn f() {foo::bar(42)}"#; assert_matches("foo::bar", code, &["foo::bar"]); @@ -471,8 +492,8 @@ fn match_pattern() { fn match_fully_qualified_fn_path() { let code = r#" mod a { - pub mod b { - pub fn c(_: i32) {} + pub(crate) mod b { + pub(crate) fn c(_: i32) {} } } use a::b::c; @@ -487,8 +508,8 @@ fn match_fully_qualified_fn_path() { fn match_resolved_type_name() { let code = r#" mod m1 { - pub mod m2 { - pub trait Foo<T> {} + pub(crate) mod m2 { + pub(crate) trait Foo<T> {} } } mod m3 { @@ -508,9 +529,9 @@ fn type_arguments_within_path() { cov_mark::check!(type_arguments_within_path); let code = r#" mod foo { - pub struct Bar<T> {t: T} + pub(crate) struct Bar<T> {t: T} impl<T> Bar<T> { - pub fn baz() {} + pub(crate) fn baz() {} } } fn f1() {foo::Bar::<i32>::baz();} @@ -659,9 +680,9 @@ fn replace_associated_trait_default_function_call() { "Bar2::foo() ==>> Bar2::foo2()", r#" trait Foo { fn foo() {} } - pub struct Bar {} + pub(crate) struct Bar {} impl Foo for Bar {} - pub struct Bar2 {} + pub(crate) struct Bar2 {} impl Foo for Bar2 {} impl Bar2 { fn foo2() {} } fn main() { @@ -671,9 +692,9 @@ fn replace_associated_trait_default_function_call() { "#, expect![[r#" trait Foo { fn foo() {} } - pub struct Bar {} + pub(crate) struct Bar {} impl Foo for Bar {} - pub struct Bar2 {} + pub(crate) struct Bar2 {} impl Foo for Bar2 {} impl Bar2 { fn foo2() {} } fn main() { @@ -691,9 +712,9 @@ fn replace_associated_trait_constant() { "Bar2::VALUE ==>> Bar2::VALUE_2222", r#" trait Foo { const VALUE: i32; const VALUE_2222: i32; } - pub struct Bar {} + pub(crate) struct Bar {} impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } - pub struct Bar2 {} + pub(crate) struct Bar2 {} impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } impl Bar2 { fn foo2() {} } fn main() { @@ -703,9 +724,9 @@ fn replace_associated_trait_constant() { "#, expect![[r#" trait Foo { const VALUE: i32; const VALUE_2222: i32; } - pub struct Bar {} + pub(crate) struct Bar {} impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } - pub struct Bar2 {} + pub(crate) struct Bar2 {} impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; } impl Bar2 { fn foo2() {} } fn main() { @@ -726,10 +747,10 @@ fn replace_path_in_different_contexts() { "c::foo() ==>> c::bar()", r#" mod a { - pub mod b {$0 - pub mod c { - pub fn foo() {} - pub fn bar() {} + pub(crate) mod b {$0 + pub(crate) mod c { + pub(crate) fn foo() {} + pub(crate) fn bar() {} fn f1() { foo() } } fn f2() { c::foo() } @@ -741,10 +762,10 @@ fn replace_path_in_different_contexts() { "#, expect![[r#" mod a { - pub mod b { - pub mod c { - pub fn foo() {} - pub fn bar() {} + pub(crate) mod b { + pub(crate) mod c { + pub(crate) fn foo() {} + pub(crate) fn bar() {} fn f1() { bar() } } fn f2() { c::bar() } @@ -763,15 +784,15 @@ fn replace_associated_function_with_generics() { "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()", r#" mod c { - pub struct Foo<T> {v: T} - impl<T> Foo<T> { pub fn new() {} } + pub(crate) struct Foo<T> {v: T} + impl<T> Foo<T> { pub(crate) fn new() {} } fn f1() { Foo::<i32>::new(); } } mod d { - pub struct Bar<T> {v: T} - impl<T> Bar<T> { pub fn default() {} } + pub(crate) struct Bar<T> {v: T} + impl<T> Bar<T> { pub(crate) fn default() {} } fn f1() { super::c::Foo::<i32>::new(); } @@ -779,15 +800,15 @@ fn replace_associated_function_with_generics() { "#, expect![[r#" mod c { - pub struct Foo<T> {v: T} - impl<T> Foo<T> { pub fn new() {} } + pub(crate) struct Foo<T> {v: T} + impl<T> Foo<T> { pub(crate) fn new() {} } fn f1() { crate::d::Bar::<i32>::default(); } } mod d { - pub struct Bar<T> {v: T} - impl<T> Bar<T> { pub fn default() {} } + pub(crate) struct Bar<T> {v: T} + impl<T> Bar<T> { pub(crate) fn default() {} } fn f1() { Bar::<i32>::default(); } @@ -823,9 +844,9 @@ fn f1() -> DynTrait<Vec<Error>> {foo()} #[test] fn replace_macro_invocations() { assert_ssr_transform( - "try!($a) ==>> $a?", - "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}", - expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]], + "try_!($a) ==>> $a?", + "macro_rules! try_ {() => {}} fn f1() -> Result<(), E> {bar(try_!(foo()));}", + expect![["macro_rules! try_ {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]], ); // FIXME: Figure out why this doesn't work anymore // assert_ssr_transform( @@ -1029,14 +1050,14 @@ fn use_declaration_with_braces() { assert_ssr_transform( "foo::bar ==>> foo2::bar2", r#" - mod foo { pub fn bar() {} pub fn baz() {} } - mod foo2 { pub fn bar2() {} } + mod foo { pub(crate) fn bar() {} pub(crate) fn baz() {} } + mod foo2 { pub(crate) fn bar2() {} } use foo::{baz, bar}; fn main() { bar() } "#, expect![[" - mod foo { pub fn bar() {} pub fn baz() {} } - mod foo2 { pub fn bar2() {} } + mod foo { pub(crate) fn bar() {} pub(crate) fn baz() {} } + mod foo2 { pub(crate) fn bar2() {} } use foo::{baz, bar}; fn main() { foo2::bar2() } "]], @@ -1266,9 +1287,9 @@ fn match_trait_method_call() { // `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type // matches what's in the pattern. Also checks that we handle autoderef. let code = r#" - pub struct Bar {} - pub struct Bar2 {} - pub trait Foo { + pub(crate) struct Bar {} + pub(crate) struct Bar2 {} + pub(crate) trait Foo { fn foo(&self, _: i32) {} } impl Foo for Bar {} diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs index f49c5af0af1..8e0166a4a76 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs @@ -1,9 +1,7 @@ use hir::{HasSource, InFile, InRealFile, Semantics}; use ide_db::{ - base_db::{FileId, FilePosition, FileRange}, - defs::Definition, - helpers::visit_file_defs, - FxHashSet, RootDatabase, + defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxHashSet, + RootDatabase, }; use itertools::Itertools; use syntax::{ast::HasName, AstNode, TextRange}; @@ -256,7 +254,7 @@ fn main() { Annotation { range: 6..10, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -264,7 +262,7 @@ fn main() { }, data: Some( [ - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -277,7 +275,7 @@ fn main() { Annotation { range: 30..36, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -310,7 +308,7 @@ fn main() { Annotation { range: 53..57, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -341,7 +339,7 @@ fn main() { Annotation { range: 7..11, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -349,7 +347,7 @@ fn main() { }, data: Some( [ - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -362,7 +360,7 @@ fn main() { Annotation { range: 7..11, kind: HasImpls { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -395,7 +393,7 @@ fn main() { Annotation { range: 17..21, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -430,7 +428,7 @@ fn main() { Annotation { range: 7..11, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -438,13 +436,13 @@ fn main() { }, data: Some( [ - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), range: 57..61, }, - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -457,7 +455,7 @@ fn main() { Annotation { range: 7..11, kind: HasImpls { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -481,7 +479,7 @@ fn main() { Annotation { range: 20..31, kind: HasImpls { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -505,7 +503,7 @@ fn main() { Annotation { range: 20..31, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -513,7 +511,7 @@ fn main() { }, data: Some( [ - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -526,7 +524,7 @@ fn main() { Annotation { range: 69..73, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -591,7 +589,7 @@ fn main() {} Annotation { range: 3..7, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -626,7 +624,7 @@ fn main() { Annotation { range: 7..11, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -634,13 +632,13 @@ fn main() { }, data: Some( [ - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), range: 19..23, }, - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -653,7 +651,7 @@ fn main() { Annotation { range: 7..11, kind: HasImpls { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -677,7 +675,7 @@ fn main() { Annotation { range: 33..44, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -685,7 +683,7 @@ fn main() { }, data: Some( [ - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -698,7 +696,7 @@ fn main() { Annotation { range: 61..65, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -749,7 +747,7 @@ mod tests { Annotation { range: 3..7, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -879,7 +877,7 @@ struct Foo; Annotation { range: 0..71, kind: HasReferences { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), @@ -893,7 +891,7 @@ struct Foo; Annotation { range: 0..71, kind: HasImpls { - pos: FilePosition { + pos: FilePositionWrapper { file_id: FileId( 0, ), diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs index 2e7e230e5ac..08cc10509cb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs @@ -13,7 +13,7 @@ pub(super) fn find_all_methods( file_id: FileId, ) -> Vec<(TextRange, Option<TextRange>)> { let sema = Semantics::new(db); - let source_file = sema.parse(file_id); + let source_file = sema.parse_guess_edition(file_id); source_file.syntax().descendants().filter_map(method_range).collect() } diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs index 3c29f2f4276..87093104852 100644 --- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs +++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs @@ -7,9 +7,8 @@ use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, search::FileReference, - FxIndexMap, RootDatabase, + FileRange, FxIndexMap, RootDatabase, }; -use span::FileRange; use syntax::{ast, AstNode, SyntaxKind::IDENT}; use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav}; @@ -33,7 +32,7 @@ pub(crate) fn incoming_calls( ) -> Option<Vec<CallItem>> { let sema = &Semantics::new(db); - let file = sema.parse(file_id); + let file = sema.parse_guess_edition(file_id); let file = file.syntax(); let mut calls = CallLocations::default(); @@ -63,9 +62,9 @@ pub(crate) fn incoming_calls( }); if let Some(nav) = nav { let range = sema.original_range(name.syntax()); - calls.add(nav.call_site, range); + calls.add(nav.call_site, range.into()); if let Some(other) = nav.def_site { - calls.add(other, range); + calls.add(other, range.into()); } } } @@ -79,7 +78,7 @@ pub(crate) fn outgoing_calls( FilePosition { file_id, offset }: FilePosition, ) -> Option<Vec<CallItem>> { let sema = Semantics::new(db); - let file = sema.parse(file_id); + let file = sema.parse_guess_edition(file_id); let file = file.syntax(); let token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT => 1, @@ -121,7 +120,7 @@ pub(crate) fn outgoing_calls( Some(nav_target.into_iter().zip(iter::repeat(range))) }) .flatten() - .for_each(|(nav, range)| calls.add(nav, range)); + .for_each(|(nav, range)| calls.add(nav, range.into())); Some(calls.into_items()) } @@ -144,7 +143,7 @@ impl CallLocations { #[cfg(test)] mod tests { use expect_test::{expect, Expect}; - use ide_db::base_db::FilePosition; + use ide_db::FilePosition; use itertools::Itertools; use crate::fixture; diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index f42613637b2..e9e5240897e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -11,7 +11,8 @@ use stdx::format_to; use url::Url; use hir::{ - db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, HasAttrs, + db::HirDatabase, sym, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, + HasAttrs, }; use ide_db::{ base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase}, @@ -136,7 +137,7 @@ pub(crate) fn external_docs( sysroot: Option<&str>, ) -> Option<DocumentationLinks> { let sema = &Semantics::new(db); - let file = sema.parse(file_id).syntax().clone(); + let file = sema.parse_guess_edition(file_id).syntax().clone(); let token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER | T![self] => 3, T!['('] | T![')'] => 2, @@ -593,12 +594,14 @@ fn filename_and_frag_for_def( }, Definition::Module(m) => match m.name(db) { // `#[doc(keyword = "...")]` is internal used only by rust compiler - Some(name) => match m.attrs(db).by_key("doc").find_string_value_in_tt("keyword") { - Some(kw) => { - format!("keyword.{}.html", kw.trim_matches('"')) + Some(name) => { + match m.attrs(db).by_key(&sym::doc).find_string_value_in_tt(&sym::keyword) { + Some(kw) => { + format!("keyword.{}.html", kw) + } + None => format!("{}/index.html", name.display(db.upcast())), } - None => format!("{}/index.html", name.display(db.upcast())), - }, + } None => String::from("index.html"), }, Definition::Trait(t) => format!("trait.{}.html", t.name(db).display(db.upcast())), diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs index ac44908a902..fe91c81a615 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs @@ -3,10 +3,9 @@ use std::iter; use expect_test::{expect, Expect}; use hir::Semantics; use ide_db::{ - base_db::{FilePosition, FileRange}, defs::Definition, documentation::{Documentation, HasDocs}, - RootDatabase, + FilePosition, FileRange, RootDatabase, }; use itertools::Itertools; use syntax::{ast, match_ast, AstNode, SyntaxNode}; @@ -80,7 +79,7 @@ fn def_under_cursor( position: &FilePosition, ) -> (Definition, Documentation) { let (docs, def) = sema - .parse(position.file_id) + .parse_guess_edition(position.file_id) .syntax() .token_at_offset(position.offset) .left_biased() diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index 4b54c057bf3..c8fe45c9cf0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -1,7 +1,7 @@ use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics}; use ide_db::{ - base_db::FileId, helpers::pick_best_token, - syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, + helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, FileId, + RootDatabase, }; use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T}; @@ -25,7 +25,7 @@ pub struct ExpandedMacro { // image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[] pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> { let sema = Semantics::new(db); - let file = sema.parse(position.file_id); + let file = sema.parse_guess_edition(position.file_id); let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { SyntaxKind::IDENT => 1, diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs index e8d6dc97341..5f6aaeaabb6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs +++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs @@ -26,7 +26,7 @@ use crate::FileRange; // image::https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif[] pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { let sema = Semantics::new(db); - let src = sema.parse(frange.file_id); + let src = sema.parse_guess_edition(frange.file_id); try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range) } diff --git a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs index 14c2655f84d..37b3cb03b33 100644 --- a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs +++ b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs @@ -1,6 +1,6 @@ use ide_db::{ - base_db::{CrateOrigin, FileId, SourceDatabase}, - FxIndexSet, RootDatabase, + base_db::{CrateOrigin, SourceDatabase}, + FileId, FxIndexSet, RootDatabase, }; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -38,5 +38,5 @@ fn crate_info(data: &ide_db::base_db::CrateData) -> CrateInfo { } fn crate_name(data: &ide_db::base_db::CrateData) -> Option<String> { - data.display_name.as_ref().map(|it| it.canonical_name().to_owned()) + data.display_name.as_ref().map(|it| it.canonical_name().as_str().to_owned()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs index 568906a098e..92458185849 100644 --- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs +++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs @@ -197,7 +197,9 @@ fn structure_token(token: SyntaxToken) -> Option<StructureNode> { if let Some(comment) = ast::Comment::cast(token) { let text = comment.text().trim(); - if let Some(region_name) = text.strip_prefix("// region:").map(str::trim) { + if let Some(region_name) = + text.strip_prefix("// region:").map(str::trim).filter(|it| !it.is_empty()) + { return Some(StructureNode { parent: None, label: region_name.to_owned(), diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs index 3b19b85c4bc..b16511072bd 100644 --- a/src/tools/rust-analyzer/crates/ide/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs @@ -10,7 +10,7 @@ pub(crate) fn file(ra_fixture: &str) -> (Analysis, FileId) { let change_fixture = ChangeFixture::parse(ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); - (host.analysis(), change_fixture.files[0]) + (host.analysis(), change_fixture.files[0].into()) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -21,7 +21,7 @@ pub(crate) fn position(ra_fixture: &str) -> (Analysis, FilePosition) { host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (host.analysis(), FilePosition { file_id, offset }) + (host.analysis(), FilePosition { file_id: file_id.into(), offset }) } /// Creates analysis for a single file, returns range marked with a pair of $0. @@ -32,7 +32,7 @@ pub(crate) fn range(ra_fixture: &str) -> (Analysis, FileRange) { host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let range = range_or_offset.expect_range(); - (host.analysis(), FileRange { file_id, range }) + (host.analysis(), FileRange { file_id: file_id.into(), range }) } /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0. @@ -42,7 +42,7 @@ pub(crate) fn range_or_position(ra_fixture: &str) -> (Analysis, FileId, RangeOrO host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); - (host.analysis(), file_id, range_or_offset) + (host.analysis(), file_id.into(), range_or_offset) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -58,12 +58,14 @@ pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(Fil .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id).unwrap(); + let file_text = host.analysis().file_text(file_id.into()).unwrap(); let annotations = extract_annotations(&file_text); - annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data)) + annotations + .into_iter() + .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data)) }) .collect(); - (host.analysis(), FilePosition { file_id, offset }, annotations) + (host.analysis(), FilePosition { file_id: file_id.into(), offset }, annotations) } /// Creates analysis from a multi-file fixture with annotations without $0 @@ -77,9 +79,11 @@ pub(crate) fn annotations_without_marker(ra_fixture: &str) -> (Analysis, Vec<(Fi .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id).unwrap(); + let file_text = host.analysis().file_text(file_id.into()).unwrap(); let annotations = extract_annotations(&file_text); - annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data)) + annotations + .into_iter() + .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data)) }) .collect(); (host.analysis(), annotations) diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs index c19f19803f1..6076de54eba 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs @@ -23,7 +23,7 @@ pub(crate) fn goto_declaration( position @ FilePosition { file_id, offset }: FilePosition, ) -> Option<RangeInfo<Vec<NavigationTarget>>> { let sema = Semantics::new(db); - let file = sema.parse(file_id).syntax().clone(); + let file = sema.parse_guess_edition(file_id).syntax().clone(); let original_token = file .token_at_offset(offset) .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; @@ -78,7 +78,7 @@ pub(crate) fn goto_declaration( #[cfg(test)] mod tests { - use ide_db::base_db::FileRange; + use ide_db::FileRange; use itertools::Itertools; use crate::fixture; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index f57cb1cb730..d0701a45b10 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -1,18 +1,29 @@ use std::{iter, mem::discriminant}; use crate::{ - doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, - RangeInfo, TryToNav, + doc_links::token_as_doc_comment, + navigation_target::{self, ToNav}, + FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult, +}; +use hir::{ + AsAssocItem, AssocItem, DescendPreference, FileRange, InFile, MacroFileIdExt, ModuleDef, + Semantics, }; -use hir::{AsAssocItem, AssocItem, DescendPreference, MacroFileIdExt, ModuleDef, Semantics}; use ide_db::{ - base_db::{AnchoredPath, FileId, FileLoader}, + base_db::{AnchoredPath, FileLoader}, defs::{Definition, IdentClass}, helpers::pick_best_token, - RootDatabase, + RootDatabase, SymbolKind, }; use itertools::Itertools; -use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T}; + +use span::FileId; +use syntax::{ + ast::{self, HasLoopBody}, + match_ast, AstNode, AstToken, + SyntaxKind::*, + SyntaxNode, SyntaxToken, TextRange, T, +}; // Feature: Go to Definition // @@ -32,7 +43,7 @@ pub(crate) fn goto_definition( FilePosition { file_id, offset }: FilePosition, ) -> Option<RangeInfo<Vec<NavigationTarget>>> { let sema = &Semantics::new(db); - let file = sema.parse(file_id).syntax().clone(); + let file = sema.parse_guess_edition(file_id).syntax().clone(); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER @@ -68,6 +79,10 @@ pub(crate) fn goto_definition( )); } + if let Some(navs) = handle_control_flow_keywords(sema, &original_token) { + return Some(RangeInfo::new(original_token.text_range(), navs)); + } + let navs = sema .descend_into_macros(DescendPreference::None, original_token.clone()) .into_iter() @@ -150,7 +165,7 @@ fn try_lookup_macro_def_in_macro_use( for mod_def in krate.root_module().declarations(sema.db) { if let ModuleDef::Macro(mac) = mod_def { - if mac.name(sema.db).as_str() == Some(token.text()) { + if mac.name(sema.db).as_str() == token.text() { if let Some(nav) = mac.try_to_nav(sema.db) { return Some(nav.call_site); } @@ -190,13 +205,224 @@ fn try_filter_trait_item_definition( } } +fn handle_control_flow_keywords( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, +) -> Option<Vec<NavigationTarget>> { + match token.kind() { + // For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self, + // so that VSCode will find the references when using `ctrl + click` + T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token), + T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token), + T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => { + nav_for_break_points(sema, token) + } + _ => None, + } +} + +pub(crate) fn find_fn_or_blocks( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, +) -> Vec<SyntaxNode> { + let find_ancestors = |token: SyntaxToken| { + let token_kind = token.kind(); + + for anc in sema.token_ancestors_with_macros(token) { + let node = match_ast! { + match anc { + ast::Fn(fn_) => fn_.syntax().clone(), + ast::ClosureExpr(c) => c.syntax().clone(), + ast::BlockExpr(blk) => { + match blk.modifier() { + Some(ast::BlockModifier::Async(_)) => blk.syntax().clone(), + Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => blk.syntax().clone(), + _ => continue, + } + }, + _ => continue, + } + }; + + return Some(node); + } + None + }; + + sema.descend_into_macros(DescendPreference::None, token.clone()) + .into_iter() + .filter_map(find_ancestors) + .collect_vec() +} + +fn nav_for_exit_points( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, +) -> Option<Vec<NavigationTarget>> { + let db = sema.db; + let token_kind = token.kind(); + + let navs = find_fn_or_blocks(sema, token) + .into_iter() + .filter_map(|node| { + let file_id = sema.hir_file_for(&node); + + match_ast! { + match node { + ast::Fn(fn_) => { + let mut nav = sema.to_def(&fn_)?.try_to_nav(db)?; + // For async token, we navigate to itself, which triggers + // VSCode to find the references + let focus_token = if matches!(token_kind, T![async]) { + fn_.async_token()? + } else { + fn_.fn_token()? + }; + + let focus_frange = InFile::new(file_id, focus_token.text_range()) + .original_node_file_range_opt(db) + .map(|(frange, _)| frange); + + if let Some(FileRange { file_id, range }) = focus_frange { + let contains_frange = |nav: &NavigationTarget| { + nav.file_id == file_id && nav.full_range.contains_range(range) + }; + + if let Some(def_site) = nav.def_site.as_mut() { + if contains_frange(def_site) { + def_site.focus_range = Some(range); + } + } else if contains_frange(&nav.call_site) { + nav.call_site.focus_range = Some(range); + } + } + + Some(nav) + }, + ast::ClosureExpr(c) => { + let pipe_tok = c.param_list().and_then(|it| it.pipe_token())?.text_range(); + let closure_in_file = InFile::new(file_id, c.into()); + Some(expr_to_nav(db, closure_in_file, Some(pipe_tok))) + }, + ast::BlockExpr(blk) => { + match blk.modifier() { + Some(ast::BlockModifier::Async(_)) => { + let async_tok = blk.async_token()?.text_range(); + let blk_in_file = InFile::new(file_id, blk.into()); + Some(expr_to_nav(db, blk_in_file, Some(async_tok))) + }, + Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => { + let try_tok = blk.try_token()?.text_range(); + let blk_in_file = InFile::new(file_id, blk.into()); + Some(expr_to_nav(db, blk_in_file, Some(try_tok))) + }, + _ => None, + } + }, + _ => None, + } + } + }) + .flatten() + .collect_vec(); + + Some(navs) +} + +pub(crate) fn find_loops( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, +) -> Option<Vec<ast::Expr>> { + let parent = token.parent()?; + let lbl = match_ast! { + match parent { + ast::BreakExpr(break_) => break_.lifetime(), + ast::ContinueExpr(continue_) => continue_.lifetime(), + _ => None, + } + }; + let label_matches = + |it: Option<ast::Label>| match (lbl.as_ref(), it.and_then(|it| it.lifetime())) { + (Some(lbl), Some(it)) => lbl.text() == it.text(), + (None, _) => true, + (Some(_), None) => false, + }; + + let find_ancestors = |token: SyntaxToken| { + for anc in sema.token_ancestors_with_macros(token).filter_map(ast::Expr::cast) { + let node = match &anc { + ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => anc, + ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => anc, + ast::Expr::ForExpr(for_) if label_matches(for_.label()) => anc, + ast::Expr::BlockExpr(blk) + if blk.label().is_some() && label_matches(blk.label()) => + { + anc + } + _ => continue, + }; + + return Some(node); + } + None + }; + + sema.descend_into_macros(DescendPreference::None, token.clone()) + .into_iter() + .filter_map(find_ancestors) + .collect_vec() + .into() +} + +fn nav_for_break_points( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, +) -> Option<Vec<NavigationTarget>> { + let db = sema.db; + + let navs = find_loops(sema, token)? + .into_iter() + .filter_map(|expr| { + let file_id = sema.hir_file_for(expr.syntax()); + let expr_in_file = InFile::new(file_id, expr.clone()); + let focus_range = match expr { + ast::Expr::LoopExpr(loop_) => loop_.loop_token()?.text_range(), + ast::Expr::WhileExpr(while_) => while_.while_token()?.text_range(), + ast::Expr::ForExpr(for_) => for_.for_token()?.text_range(), + // We guarantee that the label exists + ast::Expr::BlockExpr(blk) => blk.label().unwrap().syntax().text_range(), + _ => return None, + }; + let nav = expr_to_nav(db, expr_in_file, Some(focus_range)); + Some(nav) + }) + .flatten() + .collect_vec(); + + Some(navs) +} + fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> { def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default() } +fn expr_to_nav( + db: &RootDatabase, + InFile { file_id, value }: InFile<ast::Expr>, + focus_range: Option<TextRange>, +) -> UpmappingResult<NavigationTarget> { + let kind = SymbolKind::Label; + + let value_range = value.syntax().text_range(); + let navs = navigation_target::orig_range_with_focus_r(db, file_id, value_range, focus_range); + navs.map(|(hir::FileRangeWrapper { file_id, range }, focus_range)| { + NavigationTarget::from_syntax(file_id, "<expr>".into(), focus_range, range, kind) + }) +} + #[cfg(test)] mod tests { - use ide_db::base_db::FileRange; + use ide_db::FileRange; use itertools::Itertools; use crate::fixture; @@ -2313,4 +2539,200 @@ pub mod prelude { "#, ); } + + #[test] + fn goto_def_on_return_kw() { + check( + r#" +macro_rules! N { + ($i:ident, $x:expr, $blk:expr) => { + for $i in 0..$x { + $blk + } + }; +} + +fn main() { + fn f() { + // ^^ + N!(i, 5, { + println!("{}", i); + return$0; + }); + + for i in 1..5 { + return; + } + (|| { + return; + })(); + } +} +"#, + ) + } + + #[test] + fn goto_def_on_return_kw_in_closure() { + check( + r#" +macro_rules! N { + ($i:ident, $x:expr, $blk:expr) => { + for $i in 0..$x { + $blk + } + }; +} + +fn main() { + fn f() { + N!(i, 5, { + println!("{}", i); + return; + }); + + for i in 1..5 { + return; + } + (|| { + // ^ + return$0; + })(); + } +} +"#, + ) + } + + #[test] + fn goto_def_on_break_kw() { + check( + r#" +fn main() { + for i in 1..5 { + // ^^^ + break$0; + } +} +"#, + ) + } + + #[test] + fn goto_def_on_continue_kw() { + check( + r#" +fn main() { + for i in 1..5 { + // ^^^ + continue$0; + } +} +"#, + ) + } + + #[test] + fn goto_def_on_break_kw_for_block() { + check( + r#" +fn main() { + 'a:{ + // ^^^ + break$0 'a; + } +} +"#, + ) + } + + #[test] + fn goto_def_on_break_with_label() { + check( + r#" +fn foo() { + 'outer: loop { + // ^^^^ + 'inner: loop { + 'innermost: loop { + } + break$0 'outer; + } + } +} +"#, + ); + } + + #[test] + fn goto_def_on_return_in_try() { + check( + r#" +fn main() { + fn f() { + // ^^ + try { + return$0; + } + + return; + } +} +"#, + ) + } + + #[test] + fn goto_def_on_break_in_try() { + check( + r#" +fn main() { + for i in 1..100 { + // ^^^ + let x: Result<(), ()> = try { + break$0; + }; + } +} +"#, + ) + } + + #[test] + fn goto_def_on_return_in_async_block() { + check( + r#" +fn main() { + async { + // ^^^^^ + return$0; + } +} +"#, + ) + } + + #[test] + fn goto_def_on_for_kw() { + check( + r#" +fn main() { + for$0 i in 1..5 {} + // ^^^ +} +"#, + ) + } + + #[test] + fn goto_def_on_fn_kw() { + check( + r#" +fn main() { + fn$0 foo() {} + // ^^ +} +"#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs index 76e5e9dd928..2eff7796d54 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs @@ -24,7 +24,7 @@ pub(crate) fn goto_implementation( FilePosition { file_id, offset }: FilePosition, ) -> Option<RangeInfo<Vec<NavigationTarget>>> { let sema = Semantics::new(db); - let source_file = sema.parse(file_id); + let source_file = sema.parse_guess_edition(file_id); let syntax = source_file.syntax().clone(); let original_token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { @@ -117,7 +117,7 @@ fn impls_for_trait_item( #[cfg(test)] mod tests { - use ide_db::base_db::FileRange; + use ide_db::FileRange; use itertools::Itertools; use crate::fixture; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index ad393d98001..f75b8fb7d02 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -21,7 +21,7 @@ pub(crate) fn goto_type_definition( ) -> Option<RangeInfo<Vec<NavigationTarget>>> { let sema = hir::Semantics::new(db); - let file: ast::SourceFile = sema.parse(file_id); + let file: ast::SourceFile = sema.parse_guess_edition(file_id); let token: SyntaxToken = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER | T![self] => 2, @@ -113,7 +113,7 @@ pub(crate) fn goto_type_definition( #[cfg(test)] mod tests { - use ide_db::base_db::FileRange; + use ide_db::FileRange; use itertools::Itertools; use crate::fixture; diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index a5689403ee1..8fcd38b4e34 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -1,24 +1,25 @@ use std::iter; -use hir::{DescendPreference, Semantics}; +use hir::{db, DescendPreference, FilePosition, FileRange, HirFileId, InFile, Semantics}; use ide_db::{ - base_db::{FileId, FilePosition, FileRange}, defs::{Definition, IdentClass}, helpers::pick_best_token, search::{FileReference, ReferenceCategory, SearchScope}, syntax_helpers::node_ext::{ - for_each_break_and_continue_expr, for_each_tail_expr, full_path_of_name_ref, walk_expr, + eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif, + preorder_expr_with_ctx_checker, }, - FxHashSet, RootDatabase, + FxHashMap, FxHashSet, RootDatabase, }; +use span::EditionedFileId; use syntax::{ ast::{self, HasLoopBody}, match_ast, AstNode, SyntaxKind::{self, IDENT, INT_NUMBER}, - SyntaxToken, TextRange, T, + SyntaxToken, TextRange, WalkEvent, T, }; -use crate::{navigation_target::ToNav, NavigationTarget, TryToNav}; +use crate::{goto_definition, navigation_target::ToNav, NavigationTarget, TryToNav}; #[derive(PartialEq, Eq, Hash)] pub struct HighlightedRange { @@ -53,9 +54,12 @@ pub struct HighlightRelatedConfig { pub(crate) fn highlight_related( sema: &Semantics<'_, RootDatabase>, config: HighlightRelatedConfig, - pos @ FilePosition { offset, file_id }: FilePosition, + ide_db::FilePosition { offset, file_id }: ide_db::FilePosition, ) -> Option<Vec<HighlightedRange>> { let _p = tracing::info_span!("highlight_related").entered(); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let syntax = sema.parse(file_id).syntax().clone(); let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { @@ -69,19 +73,25 @@ pub(crate) fn highlight_related( // most if not all of these should be re-implemented with information seeded from hir match token.kind() { T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => { - highlight_exit_points(sema, token) + highlight_exit_points(sema, token).remove(&file_id) + } + T![fn] | T![return] | T![->] if config.exit_points => { + highlight_exit_points(sema, token).remove(&file_id) + } + T![await] | T![async] if config.yield_points => { + highlight_yield_points(sema, token).remove(&file_id) } - T![fn] | T![return] | T![->] if config.exit_points => highlight_exit_points(sema, token), - T![await] | T![async] if config.yield_points => highlight_yield_points(token), T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => { - highlight_break_points(token) + highlight_break_points(sema, token).remove(&file_id) } T![break] | T![loop] | T![while] | T![continue] if config.break_points => { - highlight_break_points(token) + highlight_break_points(sema, token).remove(&file_id) } T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id), - _ if config.references => highlight_references(sema, token, pos), + _ if config.references => { + highlight_references(sema, token, FilePosition { file_id, offset }) + } _ => None, } } @@ -89,7 +99,7 @@ pub(crate) fn highlight_related( fn highlight_closure_captures( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, - file_id: FileId, + file_id: EditionedFileId, ) -> Option<Vec<HighlightedRange>> { let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?; let search_range = closure.body()?.syntax().text_range(); @@ -271,50 +281,66 @@ fn highlight_references( } } -fn highlight_exit_points( +// If `file_id` is None, +pub(crate) fn highlight_exit_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> Option<Vec<HighlightedRange>> { +) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> { fn hl( sema: &Semantics<'_, RootDatabase>, - def_ranges: [Option<TextRange>; 2], - body: Option<ast::Expr>, - ) -> Option<Vec<HighlightedRange>> { - let mut highlights = Vec::new(); - highlights.extend( - def_ranges - .into_iter() - .flatten() - .map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }), - ); - let body = body?; - walk_expr(&body, &mut |expr| match expr { - ast::Expr::ReturnExpr(expr) => { - if let Some(token) = expr.return_token() { - highlights.push(HighlightedRange { - category: ReferenceCategory::empty(), - range: token.text_range(), - }); - } + def_token: Option<SyntaxToken>, + body: ast::Expr, + ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> { + let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default(); + + let mut push_to_highlights = |file_id, range| { + if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { + let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; + highlights.entry(file_id).or_default().insert(hrange); } - ast::Expr::TryExpr(try_) => { - if let Some(token) = try_.question_mark_token() { - highlights.push(HighlightedRange { - category: ReferenceCategory::empty(), - range: token.text_range(), - }); + }; + + if let Some(tok) = def_token { + let file_id = sema.hir_file_for(&tok.parent()?); + let range = Some(tok.text_range()); + push_to_highlights(file_id, range); + } + + WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| { + let file_id = sema.hir_file_for(expr.syntax()); + + let range = match &expr { + ast::Expr::TryExpr(try_) => { + try_.question_mark_token().map(|token| token.text_range()) } - } - ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_) => { - if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) { - highlights.push(HighlightedRange { - category: ReferenceCategory::empty(), - range: expr.syntax().text_range(), - }); + ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_) + if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) => + { + Some(expr.syntax().text_range()) } - } - _ => (), + _ => None, + }; + + push_to_highlights(file_id, range); }); + + // We should handle `return` separately, because when it is used in a `try` block, + // it will exit the outside function instead of the block itself. + WalkExpandedExprCtx::new(sema) + .with_check_ctx(&WalkExpandedExprCtx::is_async_const_block_or_closure) + .walk(&body, &mut |_, expr| { + let file_id = sema.hir_file_for(expr.syntax()); + + let range = match &expr { + ast::Expr::ReturnExpr(expr) => { + expr.return_token().map(|token| token.text_range()) + } + _ => None, + }; + + push_to_highlights(file_id, range); + }); + let tail = match body { ast::Expr::BlockExpr(b) => b.tail_expr(), e => Some(e), @@ -322,171 +348,188 @@ fn highlight_exit_points( if let Some(tail) = tail { for_each_tail_expr(&tail, &mut |tail| { + let file_id = sema.hir_file_for(tail.syntax()); let range = match tail { ast::Expr::BreakExpr(b) => b .break_token() .map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()), _ => tail.syntax().text_range(), }; - highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range }) + push_to_highlights(file_id, Some(range)); }); } Some(highlights) } - for anc in token.parent_ancestors() { - return match_ast! { - match anc { - ast::Fn(fn_) => hl(sema, [fn_.fn_token().map(|it| it.text_range()), None], fn_.body().map(ast::Expr::BlockExpr)), - ast::ClosureExpr(closure) => hl( - sema, - closure.param_list().map_or([None; 2], |p| [p.l_paren_token().map(|it| it.text_range()), p.r_paren_token().map(|it| it.text_range())]), - closure.body() - ), - ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) { - hl( - sema, - [block_expr.modifier().and_then(|modifier| match modifier { - ast::BlockModifier::Async(t) | ast::BlockModifier::Try(t) | ast::BlockModifier::Const(t) => Some(t.text_range()), - _ => None, - }), None], - Some(block_expr.into()) - ) - } else { - continue; + + let mut res = FxHashMap::default(); + for def in goto_definition::find_fn_or_blocks(sema, &token) { + let new_map = match_ast! { + match def { + ast::Fn(fn_) => fn_.body().and_then(|body| hl(sema, fn_.fn_token(), body.into())), + ast::ClosureExpr(closure) => { + let pipe_tok = closure.param_list().and_then(|p| p.pipe_token()); + closure.body().and_then(|body| hl(sema, pipe_tok, body)) + }, + ast::BlockExpr(blk) => match blk.modifier() { + Some(ast::BlockModifier::Async(t)) => hl(sema, Some(t), blk.into()), + Some(ast::BlockModifier::Try(t)) if token.kind() != T![return] => { + hl(sema, Some(t), blk.into()) + }, + _ => continue, }, _ => continue, } }; + merge_map(&mut res, new_map); } - None + + res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect() } -fn highlight_break_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> { - fn hl( +pub(crate) fn highlight_break_points( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, +) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> { + pub(crate) fn hl( + sema: &Semantics<'_, RootDatabase>, cursor_token_kind: SyntaxKind, - token: Option<SyntaxToken>, + loop_token: Option<SyntaxToken>, label: Option<ast::Label>, - body: Option<ast::StmtList>, - ) -> Option<Vec<HighlightedRange>> { - let mut highlights = Vec::new(); - let range = cover_range( - token.map(|tok| tok.text_range()), + expr: ast::Expr, + ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> { + let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default(); + + let mut push_to_highlights = |file_id, range| { + if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { + let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; + highlights.entry(file_id).or_default().insert(hrange); + } + }; + + let label_lt = label.as_ref().and_then(|it| it.lifetime()); + + if let Some(range) = cover_range( + loop_token.as_ref().map(|tok| tok.text_range()), label.as_ref().map(|it| it.syntax().text_range()), - ); - highlights.extend( - range.map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }), - ); - for_each_break_and_continue_expr(label, body, &mut |expr| { - let range: Option<TextRange> = match (cursor_token_kind, expr) { - (T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => { - cover_range( - break_.break_token().map(|it| it.text_range()), - break_.lifetime().map(|it| it.syntax().text_range()), - ) + ) { + let file_id = loop_token + .and_then(|tok| Some(sema.hir_file_for(&tok.parent()?))) + .unwrap_or_else(|| sema.hir_file_for(label.unwrap().syntax())); + push_to_highlights(file_id, Some(range)); + } + + WalkExpandedExprCtx::new(sema) + .with_check_ctx(&WalkExpandedExprCtx::is_async_const_block_or_closure) + .walk(&expr, &mut |depth, expr| { + let file_id = sema.hir_file_for(expr.syntax()); + + // Only highlight the `break`s for `break` and `continue`s for `continue` + let (token, token_lt) = match expr { + ast::Expr::BreakExpr(b) if cursor_token_kind != T![continue] => { + (b.break_token(), b.lifetime()) + } + ast::Expr::ContinueExpr(c) if cursor_token_kind != T![break] => { + (c.continue_token(), c.lifetime()) + } + _ => return, + }; + + if !(depth == 1 && token_lt.is_none() || eq_label_lt(&label_lt, &token_lt)) { + return; } - ( - T![for] | T![while] | T![loop] | T![continue], - ast::Expr::ContinueExpr(continue_), - ) => cover_range( - continue_.continue_token().map(|it| it.text_range()), - continue_.lifetime().map(|it| it.syntax().text_range()), - ), - _ => None, - }; - highlights.extend( - range.map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }), - ); - }); + + let text_range = cover_range( + token.map(|it| it.text_range()), + token_lt.map(|it| it.syntax().text_range()), + ); + + push_to_highlights(file_id, text_range); + }); + Some(highlights) } - let parent = token.parent()?; - let lbl = match_ast! { - match parent { - ast::BreakExpr(b) => b.lifetime(), - ast::ContinueExpr(c) => c.lifetime(), - ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()), - ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()), - ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()), - ast::BlockExpr(b) => Some(b.label().and_then(|it| it.lifetime())?), - _ => return None, - } - }; - let lbl = lbl.as_ref(); - let label_matches = |def_lbl: Option<ast::Label>| match lbl { - Some(lbl) => { - Some(lbl.text()) == def_lbl.and_then(|it| it.lifetime()).as_ref().map(|it| it.text()) - } - None => true, + + let Some(loops) = goto_definition::find_loops(sema, &token) else { + return FxHashMap::default(); }; + + let mut res = FxHashMap::default(); let token_kind = token.kind(); - for anc in token.parent_ancestors().flat_map(ast::Expr::cast) { - return match anc { - ast::Expr::LoopExpr(l) if label_matches(l.label()) => hl( - token_kind, - l.loop_token(), - l.label(), - l.loop_body().and_then(|it| it.stmt_list()), - ), - ast::Expr::ForExpr(f) if label_matches(f.label()) => hl( - token_kind, - f.for_token(), - f.label(), - f.loop_body().and_then(|it| it.stmt_list()), - ), - ast::Expr::WhileExpr(w) if label_matches(w.label()) => hl( - token_kind, - w.while_token(), - w.label(), - w.loop_body().and_then(|it| it.stmt_list()), - ), - ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => { - hl(token_kind, None, e.label(), e.stmt_list()) - } + for expr in loops { + let new_map = match &expr { + ast::Expr::LoopExpr(l) => hl(sema, token_kind, l.loop_token(), l.label(), expr), + ast::Expr::ForExpr(f) => hl(sema, token_kind, f.for_token(), f.label(), expr), + ast::Expr::WhileExpr(w) => hl(sema, token_kind, w.while_token(), w.label(), expr), + ast::Expr::BlockExpr(e) => hl(sema, token_kind, None, e.label(), expr), _ => continue, }; + merge_map(&mut res, new_map); } - None + + res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect() } -fn highlight_yield_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> { +pub(crate) fn highlight_yield_points( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, +) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> { fn hl( + sema: &Semantics<'_, RootDatabase>, async_token: Option<SyntaxToken>, body: Option<ast::Expr>, - ) -> Option<Vec<HighlightedRange>> { - let mut highlights = vec![HighlightedRange { - category: ReferenceCategory::empty(), - range: async_token?.text_range(), - }]; - if let Some(body) = body { - walk_expr(&body, &mut |expr| { - if let ast::Expr::AwaitExpr(expr) = expr { - if let Some(token) = expr.await_token() { - highlights.push(HighlightedRange { - category: ReferenceCategory::empty(), - range: token.text_range(), - }); - } - } - }); - } + ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> { + let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default(); + + let mut push_to_highlights = |file_id, range| { + if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { + let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; + highlights.entry(file_id).or_default().insert(hrange); + } + }; + + let async_token = async_token?; + let async_tok_file_id = sema.hir_file_for(&async_token.parent()?); + push_to_highlights(async_tok_file_id, Some(async_token.text_range())); + + let Some(body) = body else { + return Some(highlights); + }; + + WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| { + let file_id = sema.hir_file_for(expr.syntax()); + + let text_range = match expr { + ast::Expr::AwaitExpr(expr) => expr.await_token(), + ast::Expr::ReturnExpr(expr) => expr.return_token(), + _ => None, + } + .map(|it| it.text_range()); + + push_to_highlights(file_id, text_range); + }); + Some(highlights) } - for anc in token.parent_ancestors() { - return match_ast! { + + let mut res = FxHashMap::default(); + for anc in goto_definition::find_fn_or_blocks(sema, &token) { + let new_map = match_ast! { match anc { - ast::Fn(fn_) => hl(fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)), + ast::Fn(fn_) => hl(sema, fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)), ast::BlockExpr(block_expr) => { if block_expr.async_token().is_none() { continue; } - hl(block_expr.async_token(), Some(block_expr.into())) + hl(sema, block_expr.async_token(), Some(block_expr.into())) }, - ast::ClosureExpr(closure) => hl(closure.async_token(), closure.body()), + ast::ClosureExpr(closure) => hl(sema, closure.async_token(), closure.body()), _ => continue, } }; + merge_map(&mut res, new_map); } - None + + res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect() } fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> { @@ -506,6 +549,115 @@ fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSe .collect() } +fn original_frange( + db: &dyn db::ExpandDatabase, + file_id: HirFileId, + text_range: Option<TextRange>, +) -> Option<FileRange> { + InFile::new(file_id, text_range?).original_node_file_range_opt(db).map(|(frange, _)| frange) +} + +fn merge_map( + res: &mut FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>, + new: Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>>, +) { + let Some(new) = new else { + return; + }; + new.into_iter().for_each(|(file_id, ranges)| { + res.entry(file_id).or_default().extend(ranges); + }); +} + +/// Preorder walk all the expression's child expressions. +/// For macro calls, the callback will be called on the expanded expressions after +/// visiting the macro call itself. +struct WalkExpandedExprCtx<'a> { + sema: &'a Semantics<'a, RootDatabase>, + depth: usize, + check_ctx: &'static dyn Fn(&ast::Expr) -> bool, +} + +impl<'a> WalkExpandedExprCtx<'a> { + fn new(sema: &'a Semantics<'a, RootDatabase>) -> Self { + Self { sema, depth: 0, check_ctx: &is_closure_or_blk_with_modif } + } + + fn with_check_ctx(&self, check_ctx: &'static dyn Fn(&ast::Expr) -> bool) -> Self { + Self { check_ctx, ..*self } + } + + fn walk(&mut self, expr: &ast::Expr, cb: &mut dyn FnMut(usize, ast::Expr)) { + preorder_expr_with_ctx_checker(expr, self.check_ctx, &mut |ev: WalkEvent<ast::Expr>| { + match ev { + syntax::WalkEvent::Enter(expr) => { + cb(self.depth, expr.clone()); + + if Self::should_change_depth(&expr) { + self.depth += 1; + } + + if let ast::Expr::MacroExpr(expr) = expr { + if let Some(expanded) = + expr.macro_call().and_then(|call| self.sema.expand(&call)) + { + match_ast! { + match expanded { + ast::MacroStmts(it) => { + self.handle_expanded(it, cb); + }, + ast::Expr(it) => { + self.walk(&it, cb); + }, + _ => {} + } + } + } + } + } + syntax::WalkEvent::Leave(expr) if Self::should_change_depth(&expr) => { + self.depth -= 1; + } + _ => {} + } + false + }) + } + + fn handle_expanded(&mut self, expanded: ast::MacroStmts, cb: &mut dyn FnMut(usize, ast::Expr)) { + if let Some(expr) = expanded.expr() { + self.walk(&expr, cb); + } + + for stmt in expanded.statements() { + if let ast::Stmt::ExprStmt(stmt) = stmt { + if let Some(expr) = stmt.expr() { + self.walk(&expr, cb); + } + } + } + } + + fn should_change_depth(expr: &ast::Expr) -> bool { + match expr { + ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => true, + ast::Expr::BlockExpr(blk) if blk.label().is_some() => true, + _ => false, + } + } + + fn is_async_const_block_or_closure(expr: &ast::Expr) -> bool { + match expr { + ast::Expr::BlockExpr(b) => matches!( + b.modifier(), + Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Const(_)) + ), + ast::Expr::ClosureExpr(_) => true, + _ => false, + } + } +} + #[cfg(test)] mod tests { use itertools::Itertools; @@ -892,6 +1044,7 @@ impl Never { } macro_rules! never { () => { never() } + // ^^^^^^^ } fn never() -> ! { loop {} } fn foo() ->$0 u32 { @@ -1718,4 +1871,140 @@ fn test() { "#, ); } + + #[test] + fn return_in_macros() { + check( + r#" +macro_rules! N { + ($i:ident, $x:expr, $blk:expr) => { + for $i in 0..$x { + $blk + } + }; +} + +fn main() { + fn f() { + // ^^ + N!(i, 5, { + println!("{}", i); + return$0; + // ^^^^^^ + }); + + for i in 1..5 { + return; + // ^^^^^^ + } + (|| { + return; + })(); + } +} +"#, + ) + } + + #[test] + fn return_in_closure() { + check( + r#" +macro_rules! N { + ($i:ident, $x:expr, $blk:expr) => { + for $i in 0..$x { + $blk + } + }; +} + +fn main() { + fn f() { + N!(i, 5, { + println!("{}", i); + return; + }); + + for i in 1..5 { + return; + } + (|| { + // ^ + return$0; + // ^^^^^^ + })(); + } +} +"#, + ) + } + + #[test] + fn return_in_try() { + check( + r#" +fn main() { + fn f() { + // ^^ + try { + return$0; + // ^^^^^^ + } + + return; + // ^^^^^^ + } +} +"#, + ) + } + + #[test] + fn break_in_try() { + check( + r#" +fn main() { + for i in 1..100 { + // ^^^ + let x: Result<(), ()> = try { + break$0; + // ^^^^^ + }; + } +} +"#, + ) + } + + #[test] + fn no_highlight_on_return_in_macro_call() { + check( + r#" +//- minicore:include +//- /lib.rs +macro_rules! M { + ($blk:expr) => { + $blk + }; +} + +fn main() { + fn f() { + // ^^ + M!({ return$0; }); + // ^^^^^^ + // ^^^^^^^^^^^^^^^ + + include!("a.rs") + // ^^^^^^^^^^^^^^^^ + } +} + +//- /a.rs +{ + return; +} +"#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 2006baa30a8..500674e32b3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -8,11 +8,10 @@ use std::{iter, ops::Not}; use either::Either; use hir::{db::DefDatabase, DescendPreference, HasCrate, HasSource, LangItem, Semantics}; use ide_db::{ - base_db::FileRange, defs::{Definition, IdentClass, NameRefClass, OperatorClass}, famous_defs::FamousDefs, helpers::pick_best_token, - FxIndexSet, RootDatabase, + FileRange, FxIndexSet, RootDatabase, }; use itertools::{multizip, Itertools}; use syntax::{ast, AstNode, SyntaxKind::*, SyntaxNode, T}; @@ -110,7 +109,7 @@ pub(crate) fn hover( config: &HoverConfig, ) -> Option<RangeInfo<HoverResult>> { let sema = &hir::Semantics::new(db); - let file = sema.parse(file_id).syntax().clone(); + let file = sema.parse_guess_edition(file_id).syntax().clone(); let mut res = if range.is_empty() { hover_simple(sema, FilePosition { file_id, offset: range.start() }, file, config) } else { @@ -454,7 +453,7 @@ fn runnable_action( Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable), Definition::Function(func) => { let src = func.source(sema.db)?; - if src.file_id != file_id.into() { + if src.file_id != file_id { cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment); cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr); return None; diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 5036770fec8..3257305184e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -1,5 +1,5 @@ use expect_test::{expect, Expect}; -use ide_db::base_db::{FileLoader, FileRange}; +use ide_db::{base_db::FileLoader, FileRange}; use syntax::TextRange; use crate::{ @@ -2358,17 +2358,17 @@ fn test_hover_trait_show_qualifiers() { check_actions( r"unsafe trait foo$0() {}", expect![[r#" - [ - Implementation( - FilePosition { - file_id: FileId( - 0, - ), - offset: 13, - }, - ), - ] - "#]], + [ + Implementation( + FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 13, + }, + ), + ] + "#]], ); } @@ -2925,17 +2925,17 @@ fn test_hover_trait_has_impl_action() { check_actions( r#"trait foo$0() {}"#, expect![[r#" - [ - Implementation( - FilePosition { - file_id: FileId( - 0, - ), - offset: 6, - }, - ), - ] - "#]], + [ + Implementation( + FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 6, + }, + ), + ] + "#]], ); } @@ -2944,17 +2944,17 @@ fn test_hover_struct_has_impl_action() { check_actions( r"struct foo$0() {}", expect![[r#" - [ - Implementation( - FilePosition { - file_id: FileId( - 0, - ), - offset: 7, - }, - ), - ] - "#]], + [ + Implementation( + FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 7, + }, + ), + ] + "#]], ); } @@ -2963,17 +2963,17 @@ fn test_hover_union_has_impl_action() { check_actions( r#"union foo$0() {}"#, expect![[r#" - [ - Implementation( - FilePosition { - file_id: FileId( - 0, - ), - offset: 6, - }, - ), - ] - "#]], + [ + Implementation( + FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 6, + }, + ), + ] + "#]], ); } @@ -2982,17 +2982,17 @@ fn test_hover_enum_has_impl_action() { check_actions( r"enum foo$0() { A, B }", expect![[r#" - [ - Implementation( - FilePosition { - file_id: FileId( - 0, - ), - offset: 5, - }, - ), - ] - "#]], + [ + Implementation( + FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 5, + }, + ), + ] + "#]], ); } @@ -3001,17 +3001,17 @@ fn test_hover_self_has_impl_action() { check_actions( r#"struct foo where Self$0:;"#, expect![[r#" - [ - Implementation( - FilePosition { - file_id: FileId( - 0, - ), - offset: 7, - }, - ), - ] - "#]], + [ + Implementation( + FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 7, + }, + ), + ] + "#]], ); } @@ -3025,7 +3025,7 @@ fn foo_$0test() {} expect![[r#" [ Reference( - FilePosition { + FilePositionWrapper { file_id: FileId( 0, ), @@ -8450,7 +8450,7 @@ impl Iterator for S { expect![[r#" [ Implementation( - FilePosition { + FilePositionWrapper { file_id: FileId( 0, ), diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 944951f26a2..0a8d2727575 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -5,12 +5,13 @@ use std::{ use either::Either; use hir::{ - known, ClosureStyle, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef, + sym, ClosureStyle, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef, ModuleDefId, Semantics, }; -use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase}; +use ide_db::{famous_defs::FamousDefs, FileRange, RootDatabase}; use itertools::Itertools; use smallvec::{smallvec, SmallVec}; +use span::EditionedFileId; use stdx::never; use syntax::{ ast::{self, AstNode}, @@ -493,6 +494,9 @@ pub(crate) fn inlay_hints( ) -> Vec<InlayHint> { let _p = tracing::info_span!("inlay_hints").entered(); let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let file = sema.parse(file_id); let file = file.syntax(); @@ -527,6 +531,9 @@ pub(crate) fn inlay_hints_resolve( ) -> Option<InlayHint> { let _p = tracing::info_span!("inlay_hints_resolve").entered(); let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let file = sema.parse(file_id); let file = file.syntax(); @@ -551,7 +558,7 @@ fn hints( hints: &mut Vec<InlayHint>, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: FileId, + file_id: EditionedFileId, node: SyntaxNode, ) { closing_brace::hints(hints, sema, config, file_id, node.clone()); @@ -633,7 +640,7 @@ fn hint_iterator( if ty.impls_trait(db, iter_trait, &[]) { let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item { - hir::AssocItem::TypeAlias(alias) if alias.name(db) == known::Item => Some(alias), + hir::AssocItem::TypeAlias(alias) if alias.name(db) == sym::Item.clone() => Some(alias), _ => None, })?; if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 1118f11d99d..5775abaeb18 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -4,9 +4,10 @@ //! let _x /* i32 */= f(4, 4); //! ``` use hir::Semantics; -use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase}; +use ide_db::{famous_defs::FamousDefs, RootDatabase}; use itertools::Itertools; +use span::EditionedFileId; use syntax::{ ast::{self, AstNode, HasGenericArgs, HasName}, match_ast, @@ -21,7 +22,7 @@ pub(super) fn hints( acc: &mut Vec<InlayHint>, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: FileId, + _file_id: EditionedFileId, pat: &ast::IdentPat, ) -> Option<()> { if !config.type_hints { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index d86487d4b41..4e15213b8bb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -1,11 +1,12 @@ //! Implementation of "chaining" inlay hints. use ide_db::famous_defs::FamousDefs; +use span::EditionedFileId; use syntax::{ ast::{self, AstNode}, Direction, NodeOrToken, SyntaxKind, T, }; -use crate::{FileId, InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind}; +use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind}; use super::label_of_ty; @@ -13,7 +14,7 @@ pub(super) fn hints( acc: &mut Vec<InlayHint>, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: FileId, + _file_id: EditionedFileId, expr: &ast::Expr, ) -> Option<()> { if !config.chaining_hints { @@ -142,7 +143,7 @@ fn main() { InlayHintLabelPart { text: "B", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -161,7 +162,7 @@ fn main() { InlayHintLabelPart { text: "A", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -225,7 +226,7 @@ fn main() { InlayHintLabelPart { text: "C", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -244,7 +245,7 @@ fn main() { InlayHintLabelPart { text: "B", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -292,7 +293,7 @@ fn main() { InlayHintLabelPart { text: "C", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -311,7 +312,7 @@ fn main() { InlayHintLabelPart { text: "B", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -360,7 +361,7 @@ fn main() { InlayHintLabelPart { text: "B", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -373,7 +374,7 @@ fn main() { InlayHintLabelPart { text: "X", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -392,7 +393,7 @@ fn main() { InlayHintLabelPart { text: "A", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -405,7 +406,7 @@ fn main() { InlayHintLabelPart { text: "X", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -456,7 +457,7 @@ fn main() { InlayHintLabelPart { text: "Iterator", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 1, ), @@ -469,7 +470,7 @@ fn main() { InlayHintLabelPart { text: "Item", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 1, ), @@ -488,7 +489,7 @@ fn main() { InlayHintLabelPart { text: "Iterator", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 1, ), @@ -501,7 +502,7 @@ fn main() { InlayHintLabelPart { text: "Item", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 1, ), @@ -520,7 +521,7 @@ fn main() { InlayHintLabelPart { text: "Iterator", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 1, ), @@ -533,7 +534,7 @@ fn main() { InlayHintLabelPart { text: "Item", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 1, ), @@ -552,7 +553,7 @@ fn main() { InlayHintLabelPart { text: "MyIter", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -600,7 +601,7 @@ fn main() { InlayHintLabelPart { text: "Struct", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -619,7 +620,7 @@ fn main() { InlayHintLabelPart { text: "Struct", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -638,7 +639,7 @@ fn main() { InlayHintLabelPart { text: "Struct", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), @@ -656,7 +657,7 @@ fn main() { InlayHintLabelPart { text: "self", linked_location: Some( - FileRange { + FileRangeWrapper { file_id: FileId( 0, ), diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index 2cefd5acdc2..ea96c9504e5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -4,20 +4,21 @@ //! } /* fn g */ //! ``` use hir::{HirDisplay, Semantics}; -use ide_db::{base_db::FileRange, RootDatabase}; +use ide_db::{FileRange, RootDatabase}; +use span::EditionedFileId; use syntax::{ ast::{self, AstNode, HasName}, match_ast, SyntaxKind, SyntaxNode, T, }; -use crate::{FileId, InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; +use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec<InlayHint>, sema: &Semantics<'_, RootDatabase>, config: &InlayHintsConfig, - file_id: FileId, - node: SyntaxNode, + file_id: EditionedFileId, + mut node: SyntaxNode, ) -> Option<()> { let min_lines = config.closing_brace_hints_min_lines?; @@ -51,6 +52,15 @@ pub(super) fn hints( let module = ast::Module::cast(list.syntax().parent()?)?; (format!("mod {}", module.name()?), module.name().map(name)) + } else if let Some(label) = ast::Label::cast(node.clone()) { + // in this case, `ast::Label` could be seen as a part of `ast::BlockExpr` + // the actual number of lines in this case should be the line count of the parent BlockExpr, + // which the `min_lines` config cares about + node = node.parent()?; + let block = label.syntax().parent().and_then(ast::BlockExpr::cast)?; + closing_token = block.stmt_list()?.r_curly_token()?; + let lifetime = label.lifetime().map_or_else(String::new, |it| it.to_string()); + (lifetime, Some(label.syntax().text_range())) } else if let Some(block) = ast::BlockExpr::cast(node.clone()) { closing_token = block.stmt_list()?.r_curly_token()?; @@ -107,7 +117,7 @@ pub(super) fn hints( return None; } - let linked_location = name_range.map(|range| FileRange { file_id, range }); + let linked_location = name_range.map(|range| FileRange { file_id: file_id.into(), range }); acc.push(InlayHint { range: closing_token.text_range(), kind: InlayKind::ClosingBrace, @@ -191,4 +201,27 @@ fn f() { "#, ); } + + #[test] + fn hints_closing_brace_for_block_expr() { + check_with_config( + InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG }, + r#" +fn test() { + 'end: { + 'do_a: { + 'do_b: { + + } + //^ 'do_b + break 'end; + } + //^ 'do_a + } + //^ 'end + } +//^ fn test +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs index f1b524e0880..e87e10d8504 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs @@ -1,7 +1,8 @@ //! Implementation of "closure return type" inlay hints. //! //! Tests live in [`bind_pat`][super::bind_pat] module. -use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use ide_db::famous_defs::FamousDefs; +use span::EditionedFileId; use stdx::TupleExt; use syntax::ast::{self, AstNode}; use text_edit::{TextRange, TextSize}; @@ -12,7 +13,7 @@ pub(super) fn hints( acc: &mut Vec<InlayHint>, FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: FileId, + _file_id: EditionedFileId, closure: ast::ClosureExpr, ) -> Option<()> { if !config.closure_capture_hints { @@ -73,7 +74,7 @@ pub(super) fn hints( ), None, source.name().and_then(|name| { - name.syntax().original_file_range_opt(sema.db).map(TupleExt::head) + name.syntax().original_file_range_opt(sema.db).map(TupleExt::head).map(Into::into) }), ); acc.push(InlayHint { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs index 3b41db0f13d..f6bd7ca064f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs @@ -1,7 +1,8 @@ //! Implementation of "closure return type" inlay hints. //! //! Tests live in [`bind_pat`][super::bind_pat] module. -use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use ide_db::famous_defs::FamousDefs; +use span::EditionedFileId; use syntax::ast::{self, AstNode}; use crate::{ @@ -13,7 +14,7 @@ pub(super) fn hints( acc: &mut Vec<InlayHint>, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: FileId, + _file_id: EditionedFileId, closure: ast::ClosureExpr, ) -> Option<()> { if config.closure_return_type_hints == ClosureReturnTypeHints::Never { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs index 202954100fb..eca0ebe629f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs @@ -5,7 +5,8 @@ //! } //! ``` use hir::Semantics; -use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase}; +use ide_db::{famous_defs::FamousDefs, RootDatabase}; +use span::EditionedFileId; use syntax::ast::{self, AstNode, HasName}; use crate::{ @@ -17,7 +18,7 @@ pub(super) fn enum_hints( acc: &mut Vec<InlayHint>, FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _: FileId, + _: EditionedFileId, enum_: ast::Enum, ) -> Option<()> { if let DiscriminantHints::Never = config.discriminant_hints { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs index 51855eeae23..b60a80a8ac6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs @@ -46,7 +46,7 @@ pub(crate) fn hints( } let name = param.name(sema.db); - let param_name = name.as_str()?; + let param_name = name.as_str(); let should_hide = { let argument = get_string_representation(&arg)?; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index 31f0c790374..7f901db28d3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -10,11 +10,11 @@ use hir::{ mir::{MirSpan, TerminatorKind}, ChalkTyInterner, DefWithBody, Semantics, }; -use ide_db::{base_db::FileRange, RootDatabase}; +use ide_db::{FileRange, RootDatabase}; use syntax::{ ast::{self, AstNode}, - match_ast, + match_ast, ToSmolStr, }; use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; @@ -32,9 +32,8 @@ pub(super) fn hints( let def = sema.to_def(def)?; let def: DefWithBody = def.into(); - let source_map = sema.db.body_with_source_map(def.into()).1; + let (hir, source_map) = sema.db.body_with_source_map(def.into()); - let hir = sema.db.body(def.into()); let mir = sema.db.mir_body(def.into()).ok()?; let local_to_binding = mir.local_to_binding_map(); @@ -74,21 +73,34 @@ pub(super) fn hints( Ok(s) => s.value.text_range(), Err(_) => continue, }, + MirSpan::BindingId(b) => { + match source_map + .patterns_for_binding(b) + .iter() + .find_map(|p| source_map.pat_syntax(*p).ok()) + { + Some(s) => s.value.text_range(), + None => continue, + } + } MirSpan::SelfParam => match source_map.self_param_syntax() { Some(s) => s.value.text_range(), None => continue, }, MirSpan::Unknown => continue, }; - let binding = &hir.bindings[*binding]; - let binding_source = binding - .definitions + let binding_source = source_map + .patterns_for_binding(*binding) .first() .and_then(|d| source_map.pat_syntax(*d).ok()) .and_then(|d| { - Some(FileRange { file_id: d.file_id.file_id()?, range: d.value.text_range() }) + Some(FileRange { + file_id: d.file_id.file_id()?.into(), + range: d.value.text_range(), + }) }); - let name = binding.name.to_smol_str(); + let binding = &hir.bindings[*binding]; + let name = binding.name.display_no_db().to_smolstr(); if name.starts_with("<ra@") { continue; // Ignore desugared variables } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 2e2a64c5520..70d790efad3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -7,10 +7,13 @@ use std::fmt::Display; use either::Either; use hir::{Callable, Semantics}; -use ide_db::{base_db::FileRange, RootDatabase}; +use ide_db::RootDatabase; use stdx::to_lower_snake_case; -use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp}; +use syntax::{ + ast::{self, AstNode, HasArgList, HasName, UnaryOp}, + ToSmolStr, +}; use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; @@ -45,7 +48,7 @@ pub(super) fn hints( .filter(|(_, param_name, arg, _)| { !should_hide_param_name_hint(sema, &callable, ¶m_name.text(), arg) }) - .map(|(param, param_name, _, FileRange { range, .. })| { + .map(|(param, param_name, _, hir::FileRange { range, .. })| { let linked_location = param.and_then(|name| sema.original_range_opt(name.syntax())); let label = render_label(¶m_name, config, linked_location); @@ -67,11 +70,11 @@ pub(super) fn hints( pub(super) fn render_label( param_name: impl Display, config: &InlayHintsConfig, - linked_location: Option<FileRange>, + linked_location: Option<hir::FileRange>, ) -> InlayHintLabel { let colon = if config.render_colons { ":" } else { "" }; - InlayHintLabel::simple(format!("{param_name}{colon}"), None, linked_location) + InlayHintLabel::simple(format!("{param_name}{colon}"), None, linked_location.map(Into::into)) } fn get_callable( @@ -118,7 +121,7 @@ fn should_hide_param_name_hint( } let fn_name = match callable.kind() { - hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()), + hir::CallableKind::Function(it) => Some(it.name(sema.db).display_no_db().to_smolstr()), _ => None, }; let fn_name = fn_name.as_deref(); diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs index 7bd2da9f88a..aeb3c8c1ee5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs +++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs @@ -1,8 +1,5 @@ use hir::Semantics; -use ide_db::{ - base_db::{FilePosition, SourceDatabaseExt}, - LineIndexDatabase, RootDatabase, -}; +use ide_db::{base_db::SourceDatabaseExt, FilePosition, LineIndexDatabase, RootDatabase}; use std::{fmt::Write, time::Instant}; use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange}; @@ -26,7 +23,7 @@ pub(crate) fn interpret_function(db: &RootDatabase, position: FilePosition) -> S fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<String> { let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); + let source_file = sema.parse_guess_edition(position.file_id); let item = ancestors_at_offset(source_file.syntax(), position.offset) .filter(|it| !ast::MacroCall::can_cast(it.kind())) diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index f0b35903f38..8cb81a9cc45 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -43,7 +43,6 @@ mod parent_module; mod references; mod rename; mod runnables; -mod shuffle_crate_graph; mod signature_help; mod ssr; mod static_index; @@ -62,7 +61,7 @@ use std::panic::UnwindSafe; use cfg::CfgOptions; use fetch_crates::CrateInfo; -use hir::ChangeWithProcMacros; +use hir::{sym, ChangeWithProcMacros}; use ide_db::{ base_db::{ salsa::{self, ParallelDatabase}, @@ -70,6 +69,7 @@ use ide_db::{ }, prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase, }; +use span::EditionedFileId; use syntax::SourceFile; use triomphe::Arc; use view_memory_layout::{view_memory_layout, RecursiveMemoryLayout}; @@ -120,10 +120,7 @@ pub use ide_completion::{ Snippet, SnippetScope, }; pub use ide_db::{ - base_db::{ - Cancelled, CrateGraph, CrateId, FileChange, FileId, FilePosition, FileRange, SourceRoot, - SourceRootId, - }, + base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId}, documentation::Documentation, label::Label, line_index::{LineCol, LineIndex}, @@ -131,7 +128,7 @@ pub use ide_db::{ search::{ReferenceCategory, SearchScope}, source_change::{FileSystemEdit, SnippetEdit, SourceChange}, symbol_index::Query, - RootDatabase, SymbolKind, + FileId, FilePosition, FileRange, RootDatabase, SymbolKind, }; pub use ide_diagnostics::{ Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity, @@ -163,7 +160,7 @@ pub struct AnalysisHost { } impl AnalysisHost { - pub fn new(lru_capacity: Option<usize>) -> AnalysisHost { + pub fn new(lru_capacity: Option<u16>) -> AnalysisHost { AnalysisHost { db: RootDatabase::new(lru_capacity) } } @@ -171,11 +168,11 @@ impl AnalysisHost { AnalysisHost { db } } - pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { + pub fn update_lru_capacity(&mut self, lru_capacity: Option<u16>) { self.db.update_base_query_lru_capacities(lru_capacity); } - pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) { + pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, u16>) { self.db.update_lru_capacities(lru_capacities); } @@ -204,10 +201,6 @@ impl AnalysisHost { pub fn raw_database_mut(&mut self) -> &mut RootDatabase { &mut self.db } - - pub fn shuffle_crate_graph(&mut self) { - shuffle_crate_graph::shuffle_crate_graph(&mut self.db); - } } impl Default for AnalysisHost { @@ -248,7 +241,7 @@ impl Analysis { // FIXME: cfg options // Default to enable test for single file. let mut cfg_options = CfgOptions::default(); - cfg_options.insert_atom("test".into()); + cfg_options.insert_atom(sym::test.clone()); crate_graph.add_crate_root( file_id, Edition::CURRENT, @@ -298,7 +291,8 @@ impl Analysis { /// Gets the syntax tree of the file. pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> { - self.with_db(|db| db.parse(file_id).tree()) + // FIXME editiojn + self.with_db(|db| db.parse(EditionedFileId::current_edition(file_id)).tree()) } /// Returns true if this file belongs to an immutable library. @@ -321,7 +315,7 @@ impl Analysis { /// supported). pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> { self.with_db(|db| { - let parse = db.parse(position.file_id); + let parse = db.parse(EditionedFileId::current_edition(position.file_id)); let file = parse.tree(); matching_brace::matching_brace(&file, position.offset) }) @@ -386,7 +380,7 @@ impl Analysis { /// stuff like trailing commas. pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> { self.with_db(|db| { - let parse = db.parse(frange.file_id); + let parse = db.parse(EditionedFileId::current_edition(frange.file_id)); join_lines::join_lines(config, &parse.tree(), frange.range) }) } @@ -422,7 +416,12 @@ impl Analysis { /// Returns a tree representation of symbols in the file. Useful to draw a /// file outline. pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> { - self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree())) + // FIXME: Edition + self.with_db(|db| { + file_structure::file_structure( + &db.parse(EditionedFileId::current_edition(file_id)).tree(), + ) + }) } /// Returns a list of the places in the file where type hints can be displayed. @@ -449,7 +448,11 @@ impl Analysis { /// Returns the set of folding ranges. pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> { - self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree())) + self.with_db(|db| { + folding_ranges::folding_ranges( + &db.parse(EditionedFileId::current_edition(file_id)).tree(), + ) + }) } /// Fuzzy searches for a symbol. @@ -751,7 +754,7 @@ impl Analysis { ide_ssr::MatchFinder::in_context(db, resolve_context, selections)?; match_finder.add_rule(rule)?; let edits = if parse_only { Default::default() } else { match_finder.edits() }; - Ok(SourceChange::from(edits)) + Ok(SourceChange::from_iter(edits)) }) } diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index 68854c33cef..1b64bc92603 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -5,10 +5,10 @@ use core::fmt; use hir::{Adt, AsAssocItem, AssocItemContainer, Crate, DescendPreference, MacroKind, Semantics}; use ide_db::{ - base_db::{CrateOrigin, FilePosition, LangCrateOrigin}, + base_db::{CrateOrigin, LangCrateOrigin}, defs::{Definition, IdentClass}, helpers::pick_best_token, - RootDatabase, + FilePosition, RootDatabase, }; use itertools::Itertools; use syntax::{AstNode, SyntaxKind::*, T}; @@ -133,7 +133,7 @@ pub(crate) fn moniker( FilePosition { file_id, offset }: FilePosition, ) -> Option<RangeInfo<Vec<MonikerResult>>> { let sema = &Semantics::new(db); - let file = sema.parse(file_id).syntax().clone(); + let file = sema.parse_guess_edition(file_id).syntax().clone(); let current_crate: hir::Crate = crates_for(db, file_id).pop()?.into(); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT @@ -408,7 +408,7 @@ pub(crate) fn def_to_moniker( }), ), }; - PackageInformation { name, repo, version } + PackageInformation { name: name.as_str().to_owned(), repo, version } }, }) } diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs index b955ea99f0c..ea6cc9d6de2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs +++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs @@ -1,7 +1,7 @@ use std::{iter::once, mem}; use hir::Semantics; -use ide_db::{base_db::FileRange, helpers::pick_best_token, RootDatabase}; +use ide_db::{helpers::pick_best_token, FileRange, RootDatabase}; use itertools::Itertools; use syntax::{algo, ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange}; use text_edit::{TextEdit, TextEditBuilder}; @@ -30,7 +30,7 @@ pub(crate) fn move_item( direction: Direction, ) -> Option<TextEdit> { let sema = Semantics::new(db); - let file = sema.parse(range.file_id); + let file = sema.parse_guess_edition(range.file_id); let item = if range.range.is_empty() { SyntaxElement::Token(pick_best_token( diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index bfd62e76243..066141d36f1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -9,15 +9,14 @@ use hir::{ HirDisplay, HirFileId, InFile, LocalSource, ModuleSource, }; use ide_db::{ - base_db::{FileId, FileRange}, defs::Definition, documentation::{Documentation, HasDocs}, - RootDatabase, SymbolKind, + FileId, FileRange, RootDatabase, SymbolKind, }; use stdx::never; use syntax::{ ast::{self, HasName}, - format_smolstr, AstNode, SmolStr, SyntaxNode, TextRange, + format_smolstr, AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr, }; /// `NavigationTarget` represents an element in the editor's UI which you can @@ -98,7 +97,7 @@ impl NavigationTarget { db: &RootDatabase, module: hir::Module, ) -> UpmappingResult<NavigationTarget> { - let name = module.name(db).map(|it| it.to_smol_str()).unwrap_or_default(); + let name = module.name(db).map(|it| it.display_no_db().to_smolstr()).unwrap_or_default(); match module.declaration_source(db) { Some(InFile { value, file_id }) => { orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( @@ -153,7 +152,7 @@ impl NavigationTarget { ) } - fn from_syntax( + pub(crate) fn from_syntax( file_id: FileId, name: SmolStr, focus_range: Option<TextRange>, @@ -190,7 +189,7 @@ impl TryToNav for FileSymbol { .is_alias .then(|| self.def.name(db)) .flatten() - .map_or_else(|| self.name.clone(), |it| it.to_smol_str()), + .map_or_else(|| self.name.clone(), |it| it.display_no_db().to_smolstr()), alias: self.is_alias.then(|| self.name.clone()), kind: Some(hir::ModuleDefId::from(self.def).into()), full_range, @@ -274,9 +273,9 @@ pub(crate) trait ToNavFromAst: Sized { fn container_name(db: &RootDatabase, t: impl HasContainer) -> Option<SmolStr> { match t.container(db) { - hir::ItemContainer::Trait(it) => Some(it.name(db).to_smol_str()), + hir::ItemContainer::Trait(it) => Some(it.name(db).display_no_db().to_smolstr()), // FIXME: Handle owners of blocks correctly here - hir::ItemContainer::Module(it) => it.name(db).map(|name| name.to_smol_str()), + hir::ItemContainer::Module(it) => it.name(db).map(|name| name.display_no_db().to_smolstr()), _ => None, } } @@ -367,7 +366,7 @@ impl ToNav for hir::Module { fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> { let InFile { file_id, value } = self.definition_source(db); - let name = self.name(db).map(|it| it.to_smol_str()).unwrap_or_default(); + let name = self.name(db).map(|it| it.display_no_db().to_smolstr()).unwrap_or_default(); let (syntax, focus) = match &value { ModuleSource::SourceFile(node) => (node.syntax(), None), ModuleSource::Module(node) => (node.syntax(), node.name()), @@ -424,7 +423,10 @@ impl TryToNav for hir::ExternCrateDecl { |(FileRange { file_id, range: full_range }, focus_range)| { let mut res = NavigationTarget::from_syntax( file_id, - self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(), + self.alias_or_name(db) + .unwrap_or_else(|| self.name(db)) + .display_no_db() + .to_smolstr(), focus_range, full_range, SymbolKind::Module, @@ -532,7 +534,7 @@ impl ToNav for LocalSource { orig_range_with_focus(db, file_id, node, name).map( |(FileRange { file_id, range: full_range }, focus_range)| { - let name = local.name(db).to_smol_str(); + let name = local.name(db).display_no_db().to_smolstr(); let kind = if local.is_self(db) { SymbolKind::SelfParam } else if local.is_param(db) { @@ -565,7 +567,7 @@ impl ToNav for hir::Local { impl TryToNav for hir::Label { fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> { let InFile { file_id, value } = self.source(db)?; - let name = self.name(db).to_smol_str(); + let name = self.name(db).display_no_db().to_smolstr(); Some(orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map( |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { @@ -586,7 +588,7 @@ impl TryToNav for hir::Label { impl TryToNav for hir::TypeParam { fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> { let InFile { file_id, value } = self.merge().source(db)?; - let name = self.name(db).to_smol_str(); + let name = self.name(db).display_no_db().to_smolstr(); let value = match value { Either::Left(ast::TypeOrConstParam::Type(x)) => Either::Left(x), @@ -628,7 +630,7 @@ impl TryToNav for hir::TypeOrConstParam { impl TryToNav for hir::LifetimeParam { fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> { let InFile { file_id, value } = self.source(db)?; - let name = self.name(db).to_smol_str(); + let name = self.name(db).display_no_db().to_smolstr(); Some(orig_range(db, file_id, value.syntax()).map( |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { @@ -649,7 +651,7 @@ impl TryToNav for hir::LifetimeParam { impl TryToNav for hir::ConstParam { fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> { let InFile { file_id, value } = self.merge().source(db)?; - let name = self.name(db).to_smol_str(); + let name = self.name(db).display_no_db().to_smolstr(); let value = match value { Either::Left(ast::TypeOrConstParam::Const(x)) => x, @@ -708,7 +710,7 @@ impl<T> IntoIterator for UpmappingResult<T> { } impl<T> UpmappingResult<T> { - fn map<U>(self, f: impl Fn(T) -> U) -> UpmappingResult<U> { + pub(crate) fn map<U>(self, f: impl Fn(T) -> U) -> UpmappingResult<U> { UpmappingResult { call_site: f(self.call_site), def_site: self.def_site.map(f) } } } @@ -730,13 +732,13 @@ fn orig_range_with_focus( ) } -fn orig_range_with_focus_r( +pub(crate) fn orig_range_with_focus_r( db: &RootDatabase, hir_file: HirFileId, value: TextRange, - name: Option<TextRange>, + focus_range: Option<TextRange>, ) -> UpmappingResult<(FileRange, Option<TextRange>)> { - let Some(name) = name else { return orig_range_r(db, hir_file, value) }; + let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) }; let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind; @@ -821,8 +823,8 @@ fn orig_range_with_focus_r( UpmappingResult { call_site: ( - call_site_range, - call_site_focus.and_then(|FileRange { file_id, range }| { + call_site_range.into(), + call_site_focus.and_then(|hir::FileRange { file_id, range }| { if call_site_range.file_id == file_id && call_site_range.range.contains_range(range) { Some(range) @@ -833,8 +835,8 @@ fn orig_range_with_focus_r( ), def_site: def_site.map(|(def_site_range, def_site_focus)| { ( - def_site_range, - def_site_focus.and_then(|FileRange { file_id, range }| { + def_site_range.into(), + def_site_focus.and_then(|hir::FileRange { file_id, range }| { if def_site_range.file_id == file_id && def_site_range.range.contains_range(range) { @@ -854,7 +856,7 @@ fn orig_range( value: &SyntaxNode, ) -> UpmappingResult<(FileRange, Option<TextRange>)> { UpmappingResult { - call_site: (InFile::new(hir_file, value).original_file_range_rooted(db), None), + call_site: (InFile::new(hir_file, value).original_file_range_rooted(db).into(), None), def_site: None, } } @@ -865,7 +867,7 @@ fn orig_range_r( value: TextRange, ) -> UpmappingResult<(FileRange, Option<TextRange>)> { UpmappingResult { - call_site: (InFile::new(hir_file, value).original_node_file_range(db).0, None), + call_site: (InFile::new(hir_file, value).original_node_file_range(db).0.into(), None), def_site: None, } } diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs index ce7a6779e27..74c50fcac35 100644 --- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs +++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs @@ -1,7 +1,7 @@ use hir::{db::DefDatabase, Semantics}; use ide_db::{ - base_db::{CrateId, FileId, FileLoader, FilePosition}, - RootDatabase, + base_db::{CrateId, FileLoader}, + FileId, FilePosition, RootDatabase, }; use itertools::Itertools; use syntax::{ @@ -26,7 +26,7 @@ use crate::NavigationTarget; /// This returns `Vec` because a module may be included from several places. pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); + let source_file = sema.parse_guess_edition(position.file_id); let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset); @@ -66,7 +66,7 @@ pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { #[cfg(test)] mod tests { - use ide_db::base_db::FileRange; + use ide_db::FileRange; use crate::fixture; diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 6f9e1b3740c..46c2d47ee82 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -11,10 +11,9 @@ use hir::{DescendPreference, PathResolution, Semantics}; use ide_db::{ - base_db::FileId, defs::{Definition, NameClass, NameRefClass}, search::{ReferenceCategory, SearchScope, UsageSearchResult}, - RootDatabase, + FileId, RootDatabase, }; use itertools::Itertools; use nohash_hasher::IntMap; @@ -25,7 +24,7 @@ use syntax::{ SyntaxNode, TextRange, TextSize, T, }; -use crate::{FilePosition, NavigationTarget, TryToNav}; +use crate::{highlight_related, FilePosition, HighlightedRange, NavigationTarget, TryToNav}; #[derive(Debug, Clone)] pub struct ReferenceSearchResult { @@ -56,7 +55,7 @@ pub(crate) fn find_all_refs( search_scope: Option<SearchScope>, ) -> Option<Vec<ReferenceSearchResult>> { let _p = tracing::info_span!("find_all_refs").entered(); - let syntax = sema.parse(position.file_id).syntax().clone(); + let syntax = sema.parse_guess_edition(position.file_id).syntax().clone(); let make_searcher = |literal_search: bool| { move |def: Definition| { let mut usages = @@ -70,7 +69,7 @@ pub(crate) fn find_all_refs( .into_iter() .map(|(file_id, refs)| { ( - file_id, + file_id.into(), refs.into_iter() .map(|file_ref| (file_ref.range, file_ref.category)) .unique() @@ -104,6 +103,11 @@ pub(crate) fn find_all_refs( } }; + // Find references for control-flow keywords. + if let Some(res) = handle_control_flow_keywords(sema, position) { + return Some(vec![res]); + } + match name_for_constructor_search(&syntax, position) { Some(name) => { let def = match NameClass::classify(sema, &name)? { @@ -297,10 +301,42 @@ fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool { }).unwrap_or(false) } +fn handle_control_flow_keywords( + sema: &Semantics<'_, RootDatabase>, + FilePosition { file_id, offset }: FilePosition, +) -> Option<ReferenceSearchResult> { + let file = sema.parse_guess_edition(file_id); + let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword())?; + + let references = match token.kind() { + T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token), + T![async] => highlight_related::highlight_yield_points(sema, token), + T![loop] | T![while] | T![break] | T![continue] => { + highlight_related::highlight_break_points(sema, token) + } + T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => { + highlight_related::highlight_break_points(sema, token) + } + _ => return None, + } + .into_iter() + .map(|(file_id, ranges)| { + let ranges = ranges + .into_iter() + .map(|HighlightedRange { range, category }| (range, category)) + .collect(); + (file_id.into(), ranges) + }) + .collect(); + + Some(ReferenceSearchResult { declaration: None, references }) +} + #[cfg(test)] mod tests { use expect_test::{expect, Expect}; - use ide_db::base_db::FileId; + use ide_db::FileId; + use span::EditionedFileId; use stdx::format_to; use crate::{fixture, SearchScope}; @@ -941,7 +977,7 @@ pub(super) struct Foo$0 { check_with_scope( code, - Some(SearchScope::single_file(FileId::from_raw(2))), + Some(SearchScope::single_file(EditionedFileId::current_edition(FileId::from_raw(2)))), expect![[r#" quux Function FileId(0) 19..35 26..30 @@ -1200,7 +1236,7 @@ impl Foo { let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap(); let mut actual = String::new(); - for refs in refs { + for mut refs in refs { actual += "\n\n"; if let Some(decl) = refs.declaration { @@ -1211,7 +1247,8 @@ impl Foo { actual += "\n\n"; } - for (file_id, references) in &refs.references { + for (file_id, references) in &mut refs.references { + references.sort_by_key(|(range, _)| range.start()); for (range, category) in references { format_to!(actual, "{:?} {:?}", file_id, range); for (name, _flag) in category.iter_names() { @@ -2187,4 +2224,264 @@ fn test() { "#]], ); } + + #[test] + fn goto_ref_fn_kw() { + check( + r#" +macro_rules! N { + ($i:ident, $x:expr, $blk:expr) => { + for $i in 0..$x { + $blk + } + }; +} + +fn main() { + $0fn f() { + N!(i, 5, { + println!("{}", i); + return; + }); + + for i in 1..5 { + return; + } + + (|| { + return; + })(); + } +} +"#, + expect![[r#" + FileId(0) 136..138 + FileId(0) 207..213 + FileId(0) 264..270 + "#]], + ) + } + + #[test] + fn goto_ref_exit_points() { + check( + r#" +fn$0 foo() -> u32 { + if true { + return 0; + } + + 0?; + 0xDEAD_BEEF +} +"#, + expect![[r#" + FileId(0) 0..2 + FileId(0) 40..46 + FileId(0) 62..63 + FileId(0) 69..80 + "#]], + ); + } + + #[test] + fn test_ref_yield_points() { + check( + r#" +pub async$0 fn foo() { + let x = foo() + .await + .await; + || { 0.await }; + (async { 0.await }).await +} +"#, + expect![[r#" + FileId(0) 4..9 + FileId(0) 48..53 + FileId(0) 63..68 + FileId(0) 114..119 + "#]], + ); + } + + #[test] + fn goto_ref_for_kw() { + check( + r#" +fn main() { + $0for i in 1..5 { + break; + continue; + } +} +"#, + expect![[r#" + FileId(0) 16..19 + FileId(0) 40..45 + FileId(0) 55..63 + "#]], + ) + } + + #[test] + fn goto_ref_on_break_kw() { + check( + r#" +fn main() { + for i in 1..5 { + $0break; + continue; + } +} +"#, + expect![[r#" + FileId(0) 16..19 + FileId(0) 40..45 + "#]], + ) + } + + #[test] + fn goto_ref_on_break_kw_for_block() { + check( + r#" +fn main() { + 'a:{ + $0break 'a; + } +} +"#, + expect![[r#" + FileId(0) 16..19 + FileId(0) 29..37 + "#]], + ) + } + + #[test] + fn goto_ref_on_break_with_label() { + check( + r#" +fn foo() { + 'outer: loop { + break; + 'inner: loop { + 'innermost: loop { + } + $0break 'outer; + break; + } + break; + } +} +"#, + expect![[r#" + FileId(0) 15..27 + FileId(0) 39..44 + FileId(0) 127..139 + FileId(0) 178..183 + "#]], + ); + } + + #[test] + fn goto_ref_on_return_in_try() { + check( + r#" +fn main() { + fn f() { + try { + $0return; + } + + return; + } + return; +} +"#, + expect![[r#" + FileId(0) 16..18 + FileId(0) 51..57 + FileId(0) 78..84 + "#]], + ) + } + + #[test] + fn goto_ref_on_break_in_try() { + check( + r#" +fn main() { + for i in 1..100 { + let x: Result<(), ()> = try { + $0break; + }; + } +} +"#, + expect![[r#" + FileId(0) 16..19 + FileId(0) 84..89 + "#]], + ) + } + + #[test] + fn goto_ref_on_return_in_async_block() { + check( + r#" +fn main() { + $0async { + return; + } +} +"#, + expect![[r#" + FileId(0) 16..21 + FileId(0) 32..38 + "#]], + ) + } + + #[test] + fn goto_ref_on_return_in_macro_call() { + check( + r#" +//- minicore:include +//- /lib.rs +macro_rules! M { + ($blk:expr) => { + fn f() { + $blk + } + + $blk + }; +} + +fn main() { + M!({ + return$0; + }); + + f(); + include!("a.rs") +} + +//- /a.rs +{ + return; +} +"#, + expect![[r#" + FileId(0) 46..48 + FileId(0) 106..108 + FileId(0) 122..149 + FileId(0) 135..141 + FileId(0) 165..181 + FileId(1) 6..12 + "#]], + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index 3d08e2f3718..9581474ca7b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -6,16 +6,17 @@ use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ - base_db::{FileId, FileRange}, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, source_change::SourceChangeBuilder, - RootDatabase, + FileId, FileRange, RootDatabase, }; use itertools::Itertools; +use span::Edition; use stdx::{always, never}; use syntax::{ ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize, + ToSmolStr, }; use text_edit::TextEdit; @@ -33,7 +34,7 @@ pub(crate) fn prepare_rename( position: FilePosition, ) -> RenameResult<RangeInfo<()>> { let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); + let source_file = sema.parse_guess_edition(position.file_id); let syntax = source_file.syntax(); let res = find_definitions(&sema, syntax, position)? @@ -87,7 +88,10 @@ pub(crate) fn rename( new_name: &str, ) -> RenameResult<SourceChange> { let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); + let file_id = sema + .attach_first_edition(position.file_id) + .ok_or_else(|| format_err!("No references found at position"))?; + let source_file = sema.parse(file_id); let syntax = source_file.syntax(); let defs = find_definitions(&sema, syntax, position)?; @@ -98,7 +102,7 @@ pub(crate) fn rename( // FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can // properly find "direct" usages/references. .map(|(.., def)| { - match IdentifierKind::classify(new_name)? { + match IdentifierKind::classify(Edition::CURRENT_FIXME, new_name)? { IdentifierKind::Ident => (), IdentifierKind::Lifetime => { bail!("Cannot alias reference to a lifetime identifier") @@ -109,7 +113,7 @@ pub(crate) fn rename( let mut usages = def.usages(&sema).all(); // FIXME: hack - removes the usage that triggered this rename operation. - match usages.references.get_mut(&position.file_id).and_then(|refs| { + match usages.references.get_mut(&file_id).and_then(|refs| { refs.iter() .position(|ref_| ref_.range.contains_inclusive(position.offset)) .map(|idx| refs.remove(idx)) @@ -119,9 +123,9 @@ pub(crate) fn rename( }; let mut source_change = SourceChange::default(); - source_change.extend(usages.references.get_mut(&position.file_id).iter().map( - |refs| (position.file_id, source_edit_from_references(refs, def, new_name)), - )); + source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| { + (position.file_id, source_edit_from_references(refs, def, new_name)) + })); Ok(source_change) }) @@ -266,7 +270,7 @@ fn find_definitions( // if the name differs from the definitions name it has to be an alias if def .name(sema.db) - .map_or(false, |it| it.to_smol_str() != name_ref.text().as_str()) + .map_or(false, |it| it.display_no_db().to_smolstr() != name_ref.text().as_str()) { Err(format_err!("Renaming aliases is currently unsupported")) } else { @@ -300,7 +304,11 @@ fn find_definitions( Err(format_err!("No references found at position")) } else { // remove duplicates, comparing `Definition`s - Ok(v.into_iter().unique_by(|&(.., def)| def).collect::<Vec<_>>().into_iter()) + Ok(v.into_iter() + .unique_by(|&(.., def)| def) + .map(|(a, b, c)| (a.into(), b, c)) + .collect::<Vec<_>>() + .into_iter()) } } Err(e) => Err(e), @@ -368,8 +376,8 @@ fn rename_to_self( let def = Definition::Local(local); let usages = def.usages(sema).all(); let mut source_change = SourceChange::default(); - source_change.extend(usages.iter().map(|(&file_id, references)| { - (file_id, source_edit_from_references(references, def, "self")) + source_change.extend(usages.iter().map(|(file_id, references)| { + (file_id.into(), source_edit_from_references(references, def, "self")) })); source_change.insert_source_edit( file_id.original_file(sema.db), @@ -390,7 +398,7 @@ fn rename_self_to_param( return Ok(SourceChange::default()); } - let identifier_kind = IdentifierKind::classify(new_name)?; + let identifier_kind = IdentifierKind::classify(Edition::CURRENT_FIXME, new_name)?; let InFile { file_id, value: self_param } = sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?; @@ -404,8 +412,8 @@ fn rename_self_to_param( } let mut source_change = SourceChange::default(); source_change.insert_source_edit(file_id.original_file(sema.db), edit); - source_change.extend(usages.iter().map(|(&file_id, references)| { - (file_id, source_edit_from_references(references, def, new_name)) + source_change.extend(usages.iter().map(|(file_id, references)| { + (file_id.into(), source_edit_from_references(references, def, new_name)) })); Ok(source_change) } diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index a68ee4f8671..5d4b8b36439 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -3,23 +3,22 @@ use std::fmt; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; use hir::{ - db::HirDatabase, AsAssocItem, AttrsWithOwner, HasAttrs, HasSource, HirFileIdExt, Semantics, + db::HirDatabase, sym, AsAssocItem, AttrsWithOwner, HasAttrs, HasSource, HirFileIdExt, Semantics, }; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::{ - base_db::{FilePosition, FileRange}, defs::Definition, documentation::docs_from_attrs, helpers::visit_file_defs, search::{FileReferenceNode, SearchScope}, - FxHashMap, FxHashSet, RootDatabase, SymbolKind, + FilePosition, FxHashMap, FxHashSet, RootDatabase, SymbolKind, }; use itertools::Itertools; use span::TextSize; use stdx::{always, format_to}; use syntax::{ ast::{self, AstNode}, - SmolStr, SyntaxNode, + SmolStr, SyntaxNode, ToSmolStr, }; use crate::{references, FileId, NavigationTarget, ToNav, TryToNav}; @@ -229,7 +228,7 @@ pub(crate) fn related_tests( ) -> Vec<Runnable> { let sema = Semantics::new(db); let mut res: FxHashSet<Runnable> = FxHashSet::default(); - let syntax = sema.parse(position.file_id).syntax().clone(); + let syntax = sema.parse_guess_edition(position.file_id).syntax().clone(); find_related_tests(&sema, &syntax, position, search_scope, &mut res); @@ -290,8 +289,9 @@ fn find_related_tests_in_module( let mod_source = parent_module.definition_source_range(sema.db); let file_id = mod_source.file_id.original_file(sema.db); - let mod_scope = SearchScope::file_range(FileRange { file_id, range: mod_source.value }); - let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() }; + let mod_scope = SearchScope::file_range(hir::FileRange { file_id, range: mod_source.value }); + let fn_pos = + FilePosition { file_id: file_id.into(), offset: fn_name.syntax().text_range().start() }; find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests) } @@ -332,7 +332,7 @@ pub(crate) fn runnable_fn( }; canonical_path .map(TestId::Path) - .unwrap_or(TestId::Name(def.name(sema.db).to_smol_str())) + .unwrap_or(TestId::Name(def.name(sema.db).display_no_db().to_smolstr())) }; if def.is_test(sema.db) { @@ -403,7 +403,7 @@ pub(crate) fn runnable_impl( } fn has_cfg_test(attrs: AttrsWithOwner) -> bool { - attrs.cfgs().any(|cfg| matches!(cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if s == "test")) + attrs.cfgs().any(|cfg| matches!(&cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if *s == sym::test)) } /// Creates a test mod runnable for outline modules at the top of their definition. @@ -481,7 +481,8 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> { Some(path) })(); - let test_id = path.map_or_else(|| TestId::Name(def_name.to_smol_str()), TestId::Path); + let test_id = + path.map_or_else(|| TestId::Name(def_name.display_no_db().to_smolstr()), TestId::Path); let mut nav = match def { Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def), diff --git a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs deleted file mode 100644 index 453d1836e16..00000000000 --- a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs +++ /dev/null @@ -1,58 +0,0 @@ -use hir::{db::ExpandDatabase, ProcMacros}; -use ide_db::{ - base_db::{salsa::Durability, CrateGraph, SourceDatabase}, - FxHashMap, RootDatabase, -}; -use triomphe::Arc; - -// Feature: Shuffle Crate Graph -// -// Randomizes all crate IDs in the crate graph, for debugging. -// -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: Shuffle Crate Graph** -// |=== -pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { - let crate_graph = db.crate_graph(); - let proc_macros = db.proc_macros(); - - let mut shuffled_ids = crate_graph.iter().collect::<Vec<_>>(); - - let mut rng = oorandom::Rand32::new(stdx::rand::seed()); - stdx::rand::shuffle(&mut shuffled_ids, |i| rng.rand_range(0..i as u32) as usize); - - let mut new_graph = CrateGraph::default(); - let mut new_proc_macros = ProcMacros::default(); - - let mut map = FxHashMap::default(); - for old_id in shuffled_ids.iter().copied() { - let data = &crate_graph[old_id]; - let new_id = new_graph.add_crate_root( - data.root_file_id, - data.edition, - data.display_name.clone(), - data.version.clone(), - data.cfg_options.clone(), - data.potential_cfg_options.clone(), - data.env.clone(), - data.is_proc_macro, - data.origin.clone(), - ); - new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); - map.insert(old_id, new_id); - } - - for old_id in shuffled_ids.iter().copied() { - let data = &crate_graph[old_id]; - for dep in &data.dependencies { - let mut new_dep = dep.clone(); - new_dep.crate_id = map[&dep.crate_id]; - new_graph.add_dep(map[&old_id], new_dep).unwrap(); - } - } - - db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH); - db.set_proc_macros_with_durability(Arc::new(new_proc_macros), Durability::HIGH); -} diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index c5eaacdb10d..b6c9e2f6366 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -10,16 +10,15 @@ use hir::{ }; use ide_db::{ active_parameter::{callable_for_node, generic_def_for_node}, - base_db::FilePosition, documentation::{Documentation, HasDocs}, - FxIndexMap, + FilePosition, FxIndexMap, }; use stdx::format_to; use syntax::{ algo, ast::{self, AstChildren, HasArgList}, match_ast, AstNode, Direction, NodeOrToken, SyntaxElementChildren, SyntaxNode, SyntaxToken, - TextRange, TextSize, T, + TextRange, TextSize, ToSmolStr, T, }; use crate::RootDatabase; @@ -74,7 +73,7 @@ pub(crate) fn signature_help( FilePosition { file_id, offset }: FilePosition, ) -> Option<SignatureHelp> { let sema = Semantics::new(db); - let file = sema.parse(file_id); + let file = sema.parse_guess_edition(file_id); let file = file.syntax(); let token = file .token_at_offset(offset) @@ -379,7 +378,7 @@ fn add_assoc_type_bindings( for item in tr.items_with_supertraits(db) { if let AssocItem::TypeAlias(ty) = item { - let name = ty.name(db).to_smol_str(); + let name = ty.name(db).display_no_db().to_smolstr(); if !present_bindings.contains(&*name) { buf.clear(); format_to!(buf, "{} = …", name); @@ -660,7 +659,7 @@ mod tests { use std::iter; use expect_test::{expect, Expect}; - use ide_db::base_db::FilePosition; + use ide_db::FilePosition; use stdx::format_to; use test_fixture::ChangeFixture; @@ -674,7 +673,7 @@ mod tests { let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (database, FilePosition { file_id, offset }) + (database, FilePosition { file_id: file_id.into(), offset }) } #[track_caller] diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs index b49fe391bf2..41cc9c067d3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/ssr.rs +++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs @@ -3,7 +3,7 @@ //! depend on the ide_ssr crate. use ide_assists::{Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel}; -use ide_db::{base_db::FileRange, label::Label, source_change::SourceChange, RootDatabase}; +use ide_db::{label::Label, source_change::SourceChange, FileRange, RootDatabase}; pub(crate) fn ssr_assists( db: &RootDatabase, @@ -26,7 +26,7 @@ pub(crate) fn ssr_assists( SourceChange::from_text_edit(frange.file_id, text_edit_for_file) }; - let source_change_for_workspace = SourceChange::from(match_finder.edits()); + let source_change_for_workspace = SourceChange::from_iter(match_finder.edits()); (Some(source_change_for_file), Some(source_change_for_workspace)) } else { @@ -45,7 +45,7 @@ pub(crate) fn ssr_assists( group: Some(GroupLabel("Apply SSR".into())), target: comment_range, source_change, - trigger_signature_help: false, + command: None, }; ssr_assists.push(assist); @@ -59,9 +59,8 @@ mod tests { use expect_test::expect; use ide_assists::{Assist, AssistResolveStrategy}; use ide_db::{ - base_db::{salsa::Durability, FileRange}, - symbol_index::SymbolsDatabase, - FxHashSet, RootDatabase, + base_db::salsa::Durability, symbol_index::SymbolsDatabase, FileRange, FxHashSet, + RootDatabase, }; use test_fixture::WithFixture; use triomphe::Arc; @@ -73,7 +72,11 @@ mod tests { let mut local_roots = FxHashSet::default(); local_roots.insert(test_fixture::WORKSPACE); db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); - ssr_assists(&db, &resolve, FileRange { file_id, range: range_or_offset.into() }) + ssr_assists( + &db, + &resolve, + FileRange { file_id: file_id.into(), range: range_or_offset.into() }, + ) } #[test] @@ -143,7 +146,7 @@ mod tests { is_snippet: false, }, ), - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&apply_in_file_assist); @@ -196,7 +199,7 @@ mod tests { is_snippet: false, }, ), - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&apply_in_workspace_assist); @@ -236,7 +239,7 @@ mod tests { ), target: 10..21, source_change: None, - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&apply_in_file_assist); @@ -256,7 +259,7 @@ mod tests { ), target: 10..21, source_change: None, - trigger_signature_help: false, + command: None, } "#]] .assert_debug_eq(&apply_in_workspace_assist); diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 5eb5c87f13e..cd9b7ae2f62 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -3,12 +3,9 @@ use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; use ide_db::{ - base_db::{FileId, FileRange, SourceDatabaseExt}, - defs::Definition, - documentation::Documentation, - famous_defs::FamousDefs, - helpers::get_definition, - FxHashMap, FxHashSet, RootDatabase, + base_db::SourceDatabaseExt, defs::Definition, documentation::Documentation, + famous_defs::FamousDefs, helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet, + RootDatabase, }; use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; @@ -160,7 +157,7 @@ impl StaticIndex<'_> { .unwrap(); // hovers let sema = hir::Semantics::new(self.db); - let tokens_or_nodes = sema.parse(file_id).syntax().clone(); + let tokens_or_nodes = sema.parse_guess_edition(file_id).syntax().clone(); let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it { syntax::NodeOrToken::Node(_) => None, syntax::NodeOrToken::Token(it) => Some(it), @@ -234,7 +231,7 @@ impl StaticIndex<'_> { let db = &*analysis.db; let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source_file_id(db).original_file(db); - let source_root = db.file_source_root(file_id); + let source_root = db.file_source_root(file_id.into()); let source_root = db.source_root(source_root); !source_root.is_library }); @@ -251,7 +248,7 @@ impl StaticIndex<'_> { if visited_files.contains(&file_id) { continue; } - this.add_file(file_id); + this.add_file(file_id.into()); // mark the file visited_files.insert(file_id); } @@ -262,7 +259,7 @@ impl StaticIndex<'_> { #[cfg(test)] mod tests { use crate::{fixture, StaticIndex}; - use ide_db::{base_db::FileRange, FxHashSet}; + use ide_db::{FileRange, FxHashSet}; use syntax::TextSize; fn check_all_ranges(ra_fixture: &str) { diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index b998c0bfc65..67d6932da96 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -10,7 +10,7 @@ use ide_db::{ debug::{DebugQueryTable, TableEntry}, Query, QueryTable, }, - CompressedFileTextQuery, CrateData, FileId, ParseQuery, SourceDatabase, SourceRootId, + CompressedFileTextQuery, CrateData, ParseQuery, SourceDatabase, SourceRootId, }, symbol_index::ModuleSymbolsQuery, }; @@ -20,6 +20,7 @@ use ide_db::{ }; use itertools::Itertools; use profile::{memory_usage, Bytes}; +use span::{EditionedFileId, FileId}; use stdx::format_to; use syntax::{ast, Parse, SyntaxNode}; use triomphe::Arc; @@ -209,8 +210,8 @@ impl<const MACROS: bool> fmt::Display for SyntaxTreeStats<MACROS> { } } -impl StatCollect<FileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> { - fn collect_entry(&mut self, _: FileId, value: Option<Parse<ast::SourceFile>>) { +impl StatCollect<EditionedFileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> { + fn collect_entry(&mut self, _: EditionedFileId, value: Option<Parse<ast::SourceFile>>) { self.total += 1; self.retained += value.is_some() as usize; } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index fd8e6f40465..23185920058 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -15,6 +15,7 @@ mod tests; use hir::{DescendPreference, Name, Semantics}; use ide_db::{FxHashMap, RootDatabase, SymbolKind}; +use span::EditionedFileId; use syntax::{ ast::{self, IsString}, AstNode, AstToken, NodeOrToken, @@ -188,11 +189,14 @@ pub(crate) fn highlight( ) -> Vec<HlRange> { let _p = tracing::info_span!("highlight").entered(); let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); // Determine the root based on the given range. let (root, range_to_highlight) = { - let source_file = sema.parse(file_id); - let source_file = source_file.syntax(); + let file = sema.parse(file_id); + let source_file = file.syntax(); match range_to_highlight { Some(range) => { let node = match source_file.covering_element(range) { @@ -218,7 +222,7 @@ fn traverse( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, - file_id: FileId, + file_id: EditionedFileId, root: &SyntaxNode, krate: hir::Crate, range_to_highlight: TextRange, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index c73b6acb0d0..291073f8773 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -6,6 +6,7 @@ use ide_db::{ defs::{Definition, IdentClass, NameClass, NameRefClass}, FxHashMap, RootDatabase, SymbolKind, }; +use stdx::hash_once; use syntax::{ ast, match_ast, AstNode, AstToken, NodeOrToken, SyntaxKind::{self, *}, @@ -358,17 +359,7 @@ fn highlight_name( } fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 { - fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { - use ide_db::FxHasher; - - use std::hash::Hasher; - - let mut hasher = FxHasher::default(); - x.hash(&mut hasher); - hasher.finish() - } - - hash((name, shadow_count)) + hash_once::<ide_db::FxHasher>((name.as_str(), shadow_count)) } pub(super) fn highlight_def( diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs index e754b702dee..47ad54759a8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs @@ -1,7 +1,8 @@ //! Renders a bit of code as HTML. -use ide_db::base_db::SourceDatabase; +use hir::Semantics; use oorandom::Rand32; +use span::EditionedFileId; use stdx::format_to; use syntax::AstNode; @@ -11,8 +12,12 @@ use crate::{ }; pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { - let parse = db.parse(file_id); - + let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + let file = sema.parse(file_id); + let file = file.syntax(); fn rainbowify(seed: u64) -> String { let mut rng = Rand32::new(seed); format!( @@ -35,10 +40,10 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo macro_bang: true, syntactic_name_ref_highlighting: false, }, - file_id, + file_id.into(), None, ); - let text = parse.tree().syntax().to_string(); + let text = file.to_string(); let mut buf = String::new(); buf.push_str(STYLE); buf.push_str("<pre><code>"); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs index f9b8a22a3c0..bc1ec530076 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs @@ -3,11 +3,12 @@ use std::mem; use either::Either; -use hir::{InFile, Semantics}; +use hir::{sym, HirFileId, InFile, Semantics}; use ide_db::{ - active_parameter::ActiveParameter, base_db::FileId, defs::Definition, - documentation::docs_with_rangemap, rust_doc::is_rust_fence, SymbolKind, + active_parameter::ActiveParameter, defs::Definition, documentation::docs_with_rangemap, + rust_doc::is_rust_fence, SymbolKind, }; +use span::EditionedFileId; use syntax::{ ast::{self, AstNode, IsString, QuoteOffsets}, AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize, @@ -108,14 +109,14 @@ pub(super) fn doc_comment( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, - src_file_id: FileId, + src_file_id: EditionedFileId, node: &SyntaxNode, ) { let (attributes, def) = match doc_attributes(sema, node) { Some(it) => it, None => return, }; - let src_file_id = src_file_id.into(); + let src_file_id: HirFileId = src_file_id.into(); // Extract intra-doc links and emit highlights for them. if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) { @@ -153,7 +154,7 @@ pub(super) fn doc_comment( let mut new_comments = Vec::new(); let mut string; - for attr in attributes.by_key("doc").attrs() { + for attr in attributes.by_key(&sym::doc).attrs() { let InFile { file_id, value: src } = attrs_source_map.source_of(attr); if file_id != src_file_id { continue; @@ -271,7 +272,7 @@ fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::Stri // #[cfg_attr(..., doc = "", ...)] None => { // We gotta hunt the string token manually here - let text = attr.string_value()?; + let text = attr.string_value()?.as_str(); // FIXME: We just pick the first string literal that has the same text as the doc attribute // This means technically we might highlight the wrong one it.syntax() diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index 5f711600a29..2070022d418 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -1238,7 +1238,7 @@ fn benchmark_syntax_highlighting_parser() { }) .count() }; - assert_eq!(hash, 1169); + assert_eq!(hash, 1167); } #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs index 05cdf430efb..e241cb82bd5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs @@ -1,7 +1,5 @@ -use ide_db::{ - base_db::{FileId, SourceDatabase}, - RootDatabase, -}; +use hir::Semantics; +use ide_db::{FileId, RootDatabase}; use syntax::{ AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize, }; @@ -22,9 +20,10 @@ pub(crate) fn syntax_tree( file_id: FileId, text_range: Option<TextRange>, ) -> String { - let parse = db.parse(file_id); + let sema = Semantics::new(db); + let parse = sema.parse_guess_edition(file_id); if let Some(text_range) = text_range { - let node = match parse.tree().syntax().covering_element(text_range) { + let node = match parse.syntax().covering_element(text_range) { NodeOrToken::Node(node) => node, NodeOrToken::Token(token) => { if let Some(tree) = syntax_tree_for_string(&token, text_range) { @@ -36,7 +35,7 @@ pub(crate) fn syntax_tree( format!("{node:#?}") } else { - format!("{:#?}", parse.tree().syntax()) + format!("{:#?}", parse.syntax()) } } @@ -88,7 +87,7 @@ fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<St // Remove custom markers .replace("$0", ""); - let parsed = SourceFile::parse(&text, span::Edition::CURRENT); + let parsed = SourceFile::parse(&text, span::Edition::CURRENT_FIXME); // If the "file" parsed without errors, // return its syntax diff --git a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs index 99e24308607..30b1d4c39b3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs +++ b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs @@ -2,8 +2,8 @@ use hir::{Crate, Module, ModuleDef, Semantics}; use ide_db::{ - base_db::{CrateGraph, CrateId, FileId, SourceDatabase}, - RootDatabase, + base_db::{CrateGraph, CrateId, SourceDatabase}, + FileId, RootDatabase, }; use syntax::TextRange; @@ -66,8 +66,11 @@ fn discover_tests_in_module( let mut r = vec![]; for c in module.children(db) { - let module_name = - c.name(db).as_ref().and_then(|n| n.as_str()).unwrap_or("[mod without name]").to_owned(); + let module_name = c + .name(db) + .as_ref() + .map(|n| n.as_str().to_owned()) + .unwrap_or_else(|| "[mod without name]".to_owned()); let module_id = format!("{prefix_id}::{module_name}"); let module_children = discover_tests_in_module(db, c, module_id.clone(), only_in_this_file); if !module_children.is_empty() { @@ -94,7 +97,7 @@ fn discover_tests_in_module( continue; } let nav = f.try_to_nav(db).map(|r| r.call_site); - let fn_name = f.name(db).as_str().unwrap_or("[function without name]").to_owned(); + let fn_name = f.name(db).as_str().to_owned(); r.push(TestItem { id: format!("{prefix_id}::{fn_name}"), kind: TestItemKind::Function, @@ -153,7 +156,7 @@ fn find_module_id_and_test_parents( let parent = Some(id.clone()); id += "::"; let module_name = &module.name(sema.db); - let module_name = module_name.as_ref().and_then(|n| n.as_str()).unwrap_or("[mod without name]"); + let module_name = module_name.as_ref().map(|n| n.as_str()).unwrap_or("[mod without name]"); id += module_name; let nav = NavigationTarget::from_module_to_decl(sema.db, module).call_site; r.push(TestItem { diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs index b899304ef28..a09e1e85ae1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs @@ -15,10 +15,8 @@ mod on_enter; -use ide_db::{ - base_db::{FilePosition, SourceDatabase}, - RootDatabase, -}; +use ide_db::{base_db::SourceDatabase, FilePosition, RootDatabase}; +use span::EditionedFileId; use syntax::{ algo::{ancestors_at_offset, find_node_at_offset}, ast::{self, edit::IndentLevel, AstToken}, @@ -68,7 +66,7 @@ pub(crate) fn on_char_typed( if !stdx::always!(TRIGGER_CHARS.contains(char_typed)) { return None; } - let file = &db.parse(position.file_id); + let file = &db.parse(EditionedFileId::current_edition(position.file_id)); if !stdx::always!(file.tree().syntax().text().char_at(position.offset) == Some(char_typed)) { return None; } @@ -128,7 +126,7 @@ fn on_opening_bracket_typed( return None; } // FIXME: Edition - let file = file.reparse(&Indel::delete(range), span::Edition::CURRENT); + let file = file.reparse(&Indel::delete(range), span::Edition::CURRENT_FIXME); if let Some(edit) = bracket_expr(&file.tree(), offset, opening_bracket, closing_bracket) { return Some(edit); @@ -412,7 +410,7 @@ mod tests { let (offset, mut before) = extract_offset(before); let edit = TextEdit::insert(offset, char_typed.to_string()); edit.apply(&mut before); - let parse = SourceFile::parse(&before, span::Edition::CURRENT); + let parse = SourceFile::parse(&before, span::Edition::CURRENT_FIXME); on_char_typed_inner(&parse, offset, char_typed).map(|it| { it.apply(&mut before); before.to_string() diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs index 298482f2ab5..6e56bd61850 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs @@ -1,8 +1,9 @@ //! Handles the `Enter` key press. At the momently, this only continues //! comments, but should handle indent some time in the future as well. -use ide_db::base_db::{FilePosition, SourceDatabase}; use ide_db::RootDatabase; +use ide_db::{base_db::SourceDatabase, FilePosition}; +use span::EditionedFileId; use syntax::{ algo::find_node_at_offset, ast::{self, edit::IndentLevel, AstToken}, @@ -52,7 +53,7 @@ use text_edit::TextEdit; // // image::https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif[] pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> { - let parse = db.parse(position.file_id); + let parse = db.parse(EditionedFileId::current_edition(position.file_id)); let file = parse.tree(); let token = file.syntax().token_at_offset(position.offset).left_biased()?; diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs index 51cf45bd22b..fe532f4cc55 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs @@ -1,6 +1,5 @@ use hir::{DefWithBody, Semantics}; -use ide_db::base_db::FilePosition; -use ide_db::RootDatabase; +use ide_db::{FilePosition, RootDatabase}; use syntax::{algo::ancestors_at_offset, ast, AstNode}; // Feature: View Hir @@ -17,7 +16,7 @@ pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String { fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> { let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); + let source_file = sema.parse_guess_edition(position.file_id); let item = ancestors_at_offset(source_file.syntax(), position.offset) .filter(|it| !ast::MacroCall::can_cast(it.kind())) diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs index e072df430fc..dae79998dc4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs @@ -1,6 +1,6 @@ -use hir::db::DefDatabase; -use ide_db::base_db::FileId; -use ide_db::RootDatabase; +use hir::{db::DefDatabase, Semantics}; +use ide_db::{FileId, RootDatabase}; +use span::EditionedFileId; // Feature: Debug ItemTree // @@ -12,5 +12,9 @@ use ide_db::RootDatabase; // | VS Code | **rust-analyzer: Debug ItemTree** // |=== pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String { + let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); db.file_item_tree(file_id.into()).pretty_print(db) } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs index 826447d058d..df3f2f18b4c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs @@ -64,11 +64,7 @@ enum FieldOrTupleIdx { impl FieldOrTupleIdx { fn name(&self, db: &RootDatabase) -> String { match *self { - FieldOrTupleIdx::Field(f) => f - .name(db) - .as_str() - .map(|s| s.to_owned()) - .unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())), + FieldOrTupleIdx::Field(f) => f.name(db).as_str().to_owned(), FieldOrTupleIdx::TupleIdx(i) => format!(".{i}"), } } @@ -88,7 +84,7 @@ pub(crate) fn view_memory_layout( position: FilePosition, ) -> Option<RecursiveMemoryLayout> { let sema = Semantics::new(db); - let file = sema.parse(position.file_id); + let file = sema.parse_guess_edition(position.file_id); let token = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { SyntaxKind::IDENT => 3, @@ -189,14 +185,7 @@ pub(crate) fn view_memory_layout( | Definition::SelfType(_) => "[ROOT]".to_owned(), // def is an item - def => def - .name(db) - .map(|n| { - n.as_str() - .map(|s| s.to_owned()) - .unwrap_or_else(|| format!(".{}", n.as_tuple_index().unwrap())) - }) - .unwrap_or("[ROOT]".to_owned()), + def => def.name(db).map(|n| n.as_str().to_owned()).unwrap_or("[ROOT]".to_owned()), }; let typename = ty.display(db).to_string(); diff --git a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs index 5fb47039890..7a228375d5e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs @@ -1,6 +1,5 @@ use hir::{DefWithBody, Semantics}; -use ide_db::base_db::FilePosition; -use ide_db::RootDatabase; +use ide_db::{FilePosition, RootDatabase}; use syntax::{algo::ancestors_at_offset, ast, AstNode}; // Feature: View Mir @@ -16,7 +15,7 @@ pub(crate) fn view_mir(db: &RootDatabase, position: FilePosition) -> String { fn body_mir(db: &RootDatabase, position: FilePosition) -> Option<String> { let sema = Semantics::new(db); - let source_file = sema.parse(position.file_id); + let source_file = sema.parse_guess_edition(position.file_id); let item = ancestors_at_offset(source_file.syntax(), position.offset) .filter(|it| !ast::MacroCall::can_cast(it.kind())) diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml index 67b4164ce1f..c08ecb5c307 100644 --- a/src/tools/rust-analyzer/crates/intern/Cargo.toml +++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml @@ -18,6 +18,7 @@ dashmap.workspace = true hashbrown.workspace = true rustc-hash.workspace = true triomphe.workspace = true +sptr = "0.3.2" [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/intern/src/lib.rs b/src/tools/rust-analyzer/crates/intern/src/lib.rs index 40d18b1cf86..58327419f63 100644 --- a/src/tools/rust-analyzer/crates/intern/src/lib.rs +++ b/src/tools/rust-analyzer/crates/intern/src/lib.rs @@ -20,6 +20,9 @@ type Guard<T> = dashmap::RwLockWriteGuard< HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>, >; +mod symbol; +pub use self::symbol::{symbols as sym, Symbol}; + pub struct Interned<T: Internable + ?Sized> { arc: Arc<T>, } diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol.rs b/src/tools/rust-analyzer/crates/intern/src/symbol.rs new file mode 100644 index 00000000000..ef76192ba83 --- /dev/null +++ b/src/tools/rust-analyzer/crates/intern/src/symbol.rs @@ -0,0 +1,360 @@ +//! Attempt at flexible symbol interning, allowing to intern and free strings at runtime while also +//! supporting compile time declaration of symbols that will never be freed. + +use std::{ + borrow::Borrow, + fmt, + hash::{BuildHasherDefault, Hash, Hasher}, + mem::{self, ManuallyDrop}, + ptr::NonNull, + sync::OnceLock, +}; + +use dashmap::{DashMap, SharedValue}; +use hashbrown::{hash_map::RawEntryMut, HashMap}; +use rustc_hash::FxHasher; +use sptr::Strict; +use triomphe::Arc; + +pub mod symbols; + +// some asserts for layout compatibility +const _: () = assert!(std::mem::size_of::<Box<str>>() == std::mem::size_of::<&str>()); +const _: () = assert!(std::mem::align_of::<Box<str>>() == std::mem::align_of::<&str>()); + +const _: () = assert!(std::mem::size_of::<Arc<Box<str>>>() == std::mem::size_of::<&&str>()); +const _: () = assert!(std::mem::align_of::<Arc<Box<str>>>() == std::mem::align_of::<&&str>()); + +const _: () = + assert!(std::mem::size_of::<*const *const str>() == std::mem::size_of::<TaggedArcPtr>()); +const _: () = + assert!(std::mem::align_of::<*const *const str>() == std::mem::align_of::<TaggedArcPtr>()); + +const _: () = assert!(std::mem::size_of::<Arc<Box<str>>>() == std::mem::size_of::<TaggedArcPtr>()); +const _: () = + assert!(std::mem::align_of::<Arc<Box<str>>>() == std::mem::align_of::<TaggedArcPtr>()); + +/// A pointer that points to a pointer to a `str`, it may be backed as a `&'static &'static str` or +/// `Arc<Box<str>>` but its size is that of a thin pointer. The active variant is encoded as a tag +/// in the LSB of the alignment niche. +// Note, Ideally this would encode a `ThinArc<str>` and `ThinRef<str>`/`ThinConstPtr<str>` instead of the double indirection. +#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)] +struct TaggedArcPtr { + packed: NonNull<*const str>, +} + +unsafe impl Send for TaggedArcPtr {} +unsafe impl Sync for TaggedArcPtr {} + +impl TaggedArcPtr { + const BOOL_BITS: usize = true as usize; + + const fn non_arc(r: &'static &'static str) -> Self { + assert!( + mem::align_of::<&'static &'static str>().trailing_zeros() as usize > Self::BOOL_BITS + ); + // SAFETY: The pointer is non-null as it is derived from a reference + // Ideally we would call out to `pack_arc` but for a `false` tag, unfortunately the + // packing stuff requires reading out the pointer to an integer which is not supported + // in const contexts, so here we make use of the fact that for the non-arc version the + // tag is false (0) and thus does not need touching the actual pointer value.ext) + + let packed = + unsafe { NonNull::new_unchecked((r as *const &str).cast::<*const str>().cast_mut()) }; + Self { packed } + } + + fn arc(arc: Arc<Box<str>>) -> Self { + assert!( + mem::align_of::<&'static &'static str>().trailing_zeros() as usize > Self::BOOL_BITS + ); + Self { + packed: Self::pack_arc( + // Safety: `Arc::into_raw` always returns a non null pointer + unsafe { NonNull::new_unchecked(Arc::into_raw(arc).cast_mut().cast()) }, + ), + } + } + + /// Retrieves the tag. + #[inline] + pub(crate) fn try_as_arc_owned(self) -> Option<ManuallyDrop<Arc<Box<str>>>> { + // Unpack the tag from the alignment niche + let tag = Strict::addr(self.packed.as_ptr()) & Self::BOOL_BITS; + if tag != 0 { + // Safety: We checked that the tag is non-zero -> true, so we are pointing to the data offset of an `Arc` + Some(ManuallyDrop::new(unsafe { + Arc::from_raw(self.pointer().as_ptr().cast::<Box<str>>()) + })) + } else { + None + } + } + + #[inline] + fn pack_arc(ptr: NonNull<*const str>) -> NonNull<*const str> { + let packed_tag = true as usize; + + // can't use this strict provenance stuff here due to trait methods not being const + // unsafe { + // // Safety: The pointer is derived from a non-null + // NonNull::new_unchecked(Strict::map_addr(ptr.as_ptr(), |addr| { + // // Safety: + // // - The pointer is `NonNull` => it's address is `NonZero<usize>` + // // - `P::BITS` least significant bits are always zero (`Pointer` contract) + // // - `T::BITS <= P::BITS` (from `Self::ASSERTION`) + // // + // // Thus `addr >> T::BITS` is guaranteed to be non-zero. + // // + // // `{non_zero} | packed_tag` can't make the value zero. + + // (addr >> Self::BOOL_BITS) | packed_tag + // })) + // } + // so what follows is roughly what the above looks like but inlined + + let self_addr = ptr.as_ptr() as *const *const str as usize; + let addr = self_addr | packed_tag; + let dest_addr = addr as isize; + let offset = dest_addr.wrapping_sub(self_addr as isize); + + // SAFETY: The resulting pointer is guaranteed to be NonNull as we only modify the niche bytes + unsafe { NonNull::new_unchecked(ptr.as_ptr().cast::<u8>().wrapping_offset(offset).cast()) } + } + + #[inline] + pub(crate) fn pointer(self) -> NonNull<*const str> { + // SAFETY: The resulting pointer is guaranteed to be NonNull as we only modify the niche bytes + unsafe { + NonNull::new_unchecked(Strict::map_addr(self.packed.as_ptr(), |addr| { + addr & !Self::BOOL_BITS + })) + } + } + + #[inline] + pub(crate) fn as_str(&self) -> &str { + // SAFETY: We always point to a pointer to a str no matter what variant is active + unsafe { *self.pointer().as_ptr().cast::<&str>() } + } +} + +#[derive(PartialEq, Eq, Hash)] +pub struct Symbol { + repr: TaggedArcPtr, +} + +impl fmt::Debug for Symbol { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_str().fmt(f) + } +} + +const _: () = assert!(std::mem::size_of::<Symbol>() == std::mem::size_of::<NonNull<()>>()); +const _: () = assert!(std::mem::align_of::<Symbol>() == std::mem::align_of::<NonNull<()>>()); + +static MAP: OnceLock<DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>> = OnceLock::new(); + +impl Symbol { + pub fn intern(s: &str) -> Self { + let (mut shard, hash) = Self::select_shard(s); + // Atomically, + // - check if `obj` is already in the map + // - if so, copy out its entry, conditionally bumping the backing Arc and return it + // - if not, put it into a box and then into an Arc, insert it, bump the ref-count and return the copy + // This needs to be atomic (locking the shard) to avoid races with other thread, which could + // insert the same object between us looking it up and inserting it. + match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) { + RawEntryMut::Occupied(occ) => Self { repr: increase_arc_refcount(occ.key().0) }, + RawEntryMut::Vacant(vac) => Self { + repr: increase_arc_refcount( + vac.insert_hashed_nocheck( + hash, + SymbolProxy(TaggedArcPtr::arc(Arc::new(Box::<str>::from(s)))), + SharedValue::new(()), + ) + .0 + .0, + ), + }, + } + } + + pub fn integer(i: usize) -> Self { + match i { + 0 => symbols::INTEGER_0.clone(), + 1 => symbols::INTEGER_1.clone(), + 2 => symbols::INTEGER_2.clone(), + 3 => symbols::INTEGER_3.clone(), + 4 => symbols::INTEGER_4.clone(), + 5 => symbols::INTEGER_5.clone(), + 6 => symbols::INTEGER_6.clone(), + 7 => symbols::INTEGER_7.clone(), + 8 => symbols::INTEGER_8.clone(), + 9 => symbols::INTEGER_9.clone(), + 10 => symbols::INTEGER_10.clone(), + 11 => symbols::INTEGER_11.clone(), + 12 => symbols::INTEGER_12.clone(), + 13 => symbols::INTEGER_13.clone(), + 14 => symbols::INTEGER_14.clone(), + 15 => symbols::INTEGER_15.clone(), + i => Symbol::intern(&format!("{i}")), + } + } + + pub fn empty() -> Self { + symbols::__empty.clone() + } + + pub fn as_str(&self) -> &str { + self.repr.as_str() + } + + #[inline] + fn select_shard( + s: &str, + ) -> ( + dashmap::RwLockWriteGuard< + 'static, + HashMap<SymbolProxy, SharedValue<()>, BuildHasherDefault<FxHasher>>, + >, + u64, + ) { + let storage = MAP.get_or_init(symbols::prefill); + let hash = { + let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher()); + s.hash(&mut hasher); + hasher.finish() + }; + let shard_idx = storage.determine_shard(hash as usize); + let shard = &storage.shards()[shard_idx]; + (shard.write(), hash) + } + + #[cold] + fn drop_slow(arc: &Arc<Box<str>>) { + let (mut shard, hash) = Self::select_shard(arc); + + match Arc::count(arc) { + 0 => unreachable!(), + 1 => unreachable!(), + 2 => (), + _ => { + // Another thread has interned another copy + return; + } + } + + ManuallyDrop::into_inner( + match shard.raw_entry_mut().from_key_hashed_nocheck::<str>(hash, arc.as_ref()) { + RawEntryMut::Occupied(occ) => occ.remove_entry(), + RawEntryMut::Vacant(_) => unreachable!(), + } + .0 + .0 + .try_as_arc_owned() + .unwrap(), + ); + debug_assert_eq!(Arc::count(arc), 1); + + // Shrink the backing storage if the shard is less than 50% occupied. + if shard.len() * 2 < shard.capacity() { + shard.shrink_to_fit(); + } + } +} + +impl Drop for Symbol { + #[inline] + fn drop(&mut self) { + let Some(arc) = self.repr.try_as_arc_owned() else { + return; + }; + // When the last `Ref` is dropped, remove the object from the global map. + if Arc::count(&arc) == 2 { + // Only `self` and the global map point to the object. + + Self::drop_slow(&arc); + } + // decrement the ref count + ManuallyDrop::into_inner(arc); + } +} + +impl Clone for Symbol { + fn clone(&self) -> Self { + Self { repr: increase_arc_refcount(self.repr) } + } +} + +fn increase_arc_refcount(repr: TaggedArcPtr) -> TaggedArcPtr { + let Some(arc) = repr.try_as_arc_owned() else { + return repr; + }; + // increase the ref count + mem::forget(Arc::clone(&arc)); + repr +} + +impl fmt::Display for Symbol { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_str().fmt(f) + } +} + +// only exists so we can use `from_key_hashed_nocheck` with a &str +#[derive(Debug, PartialEq, Eq)] +struct SymbolProxy(TaggedArcPtr); + +impl Hash for SymbolProxy { + fn hash<H: Hasher>(&self, state: &mut H) { + self.0.as_str().hash(state); + } +} + +impl Borrow<str> for SymbolProxy { + fn borrow(&self) -> &str { + self.0.as_str() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn smoke_test() { + Symbol::intern("isize"); + let base_len = MAP.get().unwrap().len(); + let hello = Symbol::intern("hello"); + let world = Symbol::intern("world"); + let more_worlds = world.clone(); + let bang = Symbol::intern("!"); + let q = Symbol::intern("?"); + assert_eq!(MAP.get().unwrap().len(), base_len + 4); + let bang2 = Symbol::intern("!"); + assert_eq!(MAP.get().unwrap().len(), base_len + 4); + drop(bang2); + assert_eq!(MAP.get().unwrap().len(), base_len + 4); + drop(q); + assert_eq!(MAP.get().unwrap().len(), base_len + 3); + let default = Symbol::intern("default"); + let many_worlds = world.clone(); + assert_eq!(MAP.get().unwrap().len(), base_len + 3); + assert_eq!( + "hello default world!", + format!("{} {} {}{}", hello.as_str(), default.as_str(), world.as_str(), bang.as_str()) + ); + drop(default); + assert_eq!( + "hello world!", + format!("{} {}{}", hello.as_str(), world.as_str(), bang.as_str()) + ); + drop(many_worlds); + drop(more_worlds); + drop(hello); + drop(world); + drop(bang); + assert_eq!(MAP.get().unwrap().len(), base_len); + } +} diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs new file mode 100644 index 00000000000..2feca32ff86 --- /dev/null +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -0,0 +1,462 @@ +//! Module defining all known symbols required by the rest of rust-analyzer. +#![allow(non_upper_case_globals)] + +use std::hash::{BuildHasherDefault, Hash as _, Hasher as _}; + +use dashmap::{DashMap, SharedValue}; +use rustc_hash::FxHasher; + +use crate::{ + symbol::{SymbolProxy, TaggedArcPtr}, + Symbol, +}; + +macro_rules! define_symbols { + (@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => { + // Ideally we would be emitting `const` here, but then we no longer have stable addresses + // which is what we are relying on for equality! In the future if consts can refer to + // statics we should swap these for `const`s and have the the string literal being pointed + // to be statics to refer to such that their address is stable. + $( + pub static $name: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&stringify!($name)) }; + )* + $( + pub static $alias: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&$value) }; + )* + + + pub(super) fn prefill() -> DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>> { + let mut dashmap_ = <DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>>::with_hasher(BuildHasherDefault::default()); + + let hash_thing_ = |hasher_: &BuildHasherDefault<FxHasher>, it_: &SymbolProxy| { + let mut hasher_ = std::hash::BuildHasher::build_hasher(hasher_); + it_.hash(&mut hasher_); + hasher_.finish() + }; + { + $( + + let proxy_ = SymbolProxy($name.repr); + let hash_ = hash_thing_(dashmap_.hasher(), &proxy_); + let shard_idx_ = dashmap_.determine_shard(hash_ as usize); + dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(())); + )* + $( + + let proxy_ = SymbolProxy($alias.repr); + let hash_ = hash_thing_(dashmap_.hasher(), &proxy_); + let shard_idx_ = dashmap_.determine_shard(hash_ as usize); + dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(())); + )* + } + dashmap_ + } + }; +} +define_symbols! { + @WITH_NAME: + + dotdotdot = "...", + INTEGER_0 = "0", + INTEGER_1 = "1", + INTEGER_2 = "2", + INTEGER_3 = "3", + INTEGER_4 = "4", + INTEGER_5 = "5", + INTEGER_6 = "6", + INTEGER_7 = "7", + INTEGER_8 = "8", + INTEGER_9 = "9", + INTEGER_10 = "10", + INTEGER_11 = "11", + INTEGER_12 = "12", + INTEGER_13 = "13", + INTEGER_14 = "14", + INTEGER_15 = "15", + __empty = "", + unsafe_ = "unsafe", + in_ = "in", + super_ = "super", + self_ = "self", + Self_ = "Self", + tick_static = "'static", + dollar_crate = "$crate", + MISSING_NAME = "[missing name]", + fn_ = "fn", + crate_ = "crate", + underscore = "_", + true_ = "true", + false_ = "false", + let_ = "let", + const_ = "const", + proc_dash_macro = "proc-macro", + aapcs_dash_unwind = "aapcs-unwind", + avr_dash_interrupt = "avr-interrupt", + avr_dash_non_dash_blocking_dash_interrupt = "avr-non-blocking-interrupt", + C_dash_cmse_dash_nonsecure_dash_call = "C-cmse-nonsecure-call", + C_dash_unwind = "C-unwind", + cdecl_dash_unwind = "cdecl-unwind", + fastcall_dash_unwind = "fastcall-unwind", + msp430_dash_interrupt = "msp430-interrupt", + platform_dash_intrinsic = "platform-intrinsic", + ptx_dash_kernel = "ptx-kernel", + riscv_dash_interrupt_dash_m = "riscv-interrupt-m", + riscv_dash_interrupt_dash_s = "riscv-interrupt-s", + rust_dash_call = "rust-call", + rust_dash_cold = "rust-cold", + rust_dash_intrinsic = "rust-intrinsic", + stdcall_dash_unwind = "stdcall-unwind", + system_dash_unwind = "system-unwind", + sysv64_dash_unwind = "sysv64-unwind", + thiscall_dash_unwind = "thiscall-unwind", + vectorcall_dash_unwind = "vectorcall-unwind", + win64_dash_unwind = "win64-unwind", + x86_dash_interrupt = "x86-interrupt", + + @PLAIN: + __ra_fixup, + aapcs, + add_assign, + add, + alias, + align_offset, + align, + all, + alloc_layout, + alloc, + allow_internal_unsafe, + allow, + any, + as_str, + asm, + assert, + attributes, + begin_panic, + bench, + bitand_assign, + bitand, + bitor_assign, + bitor, + bitxor_assign, + bitxor, + bool, + box_free, + Box, + boxed, + branch, + Break, + c_void, + C, + call_mut, + call_once, + call, + cdecl, + Center, + cfg_accessible, + cfg_attr, + cfg_eval, + cfg, + char, + clone, + Clone, + coerce_unsized, + column, + compile_error, + concat_bytes, + concat_idents, + concat, + const_format_args, + const_panic_fmt, + const_param_ty, + Context, + Continue, + copy, + Copy, + core_panic, + core, + coroutine_state, + coroutine, + count, + crate_type, + CStr, + debug_assertions, + Debug, + default, + Default, + deprecated, + deref_mut, + deref_target, + deref, + derive_const, + derive, + discriminant_kind, + discriminant_type, + dispatch_from_dyn,destruct, + div_assign, + div, + doc, + drop_in_place, + drop, + dyn_metadata, + efiapi, + eh_catch_typeinfo, + eh_personality, + env, + eq, + Eq, + Err, + exchange_malloc, + exhaustive_patterns, + export_name, + f128, + f16, + f32, + f64, + fastcall, + feature, + file, + filter_map, + fmt, + fn_mut, + fn_once_output, + fn_once, + fn_ptr_addr, + fn_ptr_trait, + format_alignment, + format_args_nl, + format_args, + format_argument, + format_arguments, + format_count, + format_placeholder, + format_unsafe_arg, + format, + freeze, + from_output, + from_residual, + from_usize, + from_yeet, + fundamental, + future_trait, + future, + Future, + ge, + get_context, + global_allocator, + global_asm, + gt, + Hash, + hidden, + html_root_url, + i128, + i16, + i32, + i64, + i8, + ignore, + Implied, + include_bytes, + include_str, + include, + index_mut, + index, + Index, + into_future, + into_iter, + IntoFuture, + IntoIter, + IntoIterator, + is_empty, + Is, + isize, + Item, + iter_mut, + iter, + Iterator, + keyword, + lang, + le, + Left, + len, + line, + llvm_asm, + local_inner_macros, + log_syntax, + lt, + macro_export, + macro_rules, + macro_use, + main, + manually_drop, + may_dangle, + maybe_uninit, + metadata_type, + min_exhaustive_patterns, + miri, + missing, + module_path, + mul_assign, + mul, + ne, + neg, + Neg, + new_binary, + new_debug, + new_display, + new_lower_exp, + new_lower_hex, + new_octal, + new_pointer, + new_unchecked, + new_upper_exp, + new_upper_hex, + new_v1_formatted, + new, + next, + no_core, + no_mangle, + no_std, + non_exhaustive, + none, + None, + not, + Not, + notable_trait, + Ok, + opaque, + ops, + option_env, + option, + Option, + Ord, + Output, + owned_box, + packed, + panic_2015, + panic_2021, + panic_bounds_check, + panic_cannot_unwind, + panic_display, + panic_fmt, + panic_impl, + panic_info, + panic_location, + panic_misaligned_pointer_dereference, + panic_nounwind, + panic, + Param, + partial_ord, + PartialEq, + PartialOrd, + path, + Pending, + phantom_data, + pieces, + pin, + pointee_trait, + pointer_like, + poll, + Poll, + prelude_import, + prelude, + proc_macro_attribute, + proc_macro_derive, + proc_macro, + quote, + range_inclusive_new, + Range, + RangeFrom, + RangeFull, + RangeInclusive, + RangeTo, + RangeToInclusive, + Ready, + receiver, + recursion_limit, + register_attr, + register_tool, + rem_assign, + rem, + repr, + result, + Result, + ResumeTy, + Right, + rust_2015, + rust_2018, + rust_2021, + rust_2024, + rust_analyzer, + Rust, + rustc_allow_incoherent_impl, + rustc_builtin_macro, + rustc_coherence_is_core, + rustc_const_panic_str, + rustc_deprecated_safe_2024, + rustc_has_incoherent_inherent_impls, + rustc_layout_scalar_valid_range_end, + rustc_layout_scalar_valid_range_start, + rustc_legacy_const_generics, + rustc_macro_transparency, + rustc_reservation_impl, + rustc_safe_intrinsic, + rustc_skip_array_during_method_dispatch, + rustc_skip_during_method_dispatch, + semitransparent, + shl_assign, + shl, + shr_assign, + shr, + simd, + sized, + slice_len_fn, + Some, + start, + std_panic, + std, + stdcall, + str, + string, + String, + stringify, + structural_peq, + structural_teq, + sub_assign, + sub, + sync, + system, + sysv64, + Target, + termination, + test_case, + test, + thiscall, + trace_macros, + transmute_opts, + transmute_trait, + transparent, + Try, + tuple_trait, + u128, + u16, + u32, + u64, + u8, + unadjusted, + Unknown, + unpin, + unreachable_2015, + unreachable_2021, + unreachable, + unsafe_cell, + unsize, + unstable, + usize, + v1, + va_list, + vectorcall, + wasm, + win64, + array, + boxed_slice, +} diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml index b6f90ec53b8..64ed93bbb16 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml +++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml @@ -27,6 +27,7 @@ span.workspace = true tt.workspace = true vfs-notify.workspace = true vfs.workspace = true +intern.workspace = true [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index de68b867145..8737f2246be 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -15,9 +15,10 @@ use ide_db::{ }; use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; -use project_model::{CargoConfig, ManifestPath, PackageRoot, ProjectManifest, ProjectWorkspace}; +use project_model::{ + CargoConfig, ManifestPath, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind, +}; use span::Span; -use tracing::instrument; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; pub struct LoadCargoConfig { @@ -51,7 +52,6 @@ pub fn load_workspace_at( load_workspace(workspace, &cargo_config.extra_env, load_config) } -#[instrument(skip_all)] pub fn load_workspace( ws: ProjectWorkspace, extra_env: &FxHashMap<String, String>, @@ -68,11 +68,14 @@ pub fn load_workspace( let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() - .and_then(|it| ProcMacroServer::spawn(&it, extra_env).map_err(Into::into)), + .and_then(|it| ProcMacroServer::spawn(&it, extra_env).map_err(Into::into)) + .map_err(|e| (e, true)), ProcMacroServerChoice::Explicit(path) => { - ProcMacroServer::spawn(path, extra_env).map_err(Into::into) + ProcMacroServer::spawn(path, extra_env).map_err(Into::into).map_err(|e| (e, true)) + } + ProcMacroServerChoice::None => { + Err((anyhow::format_err!("proc macro server disabled"), false)) } - ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")), }; let (crate_graph, proc_macros) = ws.to_crate_graph( @@ -87,7 +90,7 @@ pub fn load_workspace( let proc_macros = { let proc_macro_server = match &proc_macro_server { Ok(it) => Ok(it), - Err(e) => Err(e.to_string()), + Err((e, hard_err)) => Err((e.to_string(), *hard_err)), }; proc_macros .into_iter() @@ -95,7 +98,7 @@ pub fn load_workspace( ( crate_id, path.map_or_else( - |_| Err("proc macro crate is missing dylib".to_owned()), + |e| Err((e, true)), |(_, path)| { proc_macro_server.as_ref().map_err(Clone::clone).and_then( |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]), @@ -240,15 +243,34 @@ impl ProjectFolders { // register the workspace manifest as well, note that this currently causes duplicates for // non-virtual cargo workspaces! We ought to fix that - for manifest in workspaces.iter().filter_map(|ws| ws.manifest().map(ManifestPath::as_ref)) { - let file_set_roots: Vec<VfsPath> = vec![VfsPath::from(manifest.to_owned())]; + for ws in workspaces.iter() { + let mut file_set_roots: Vec<VfsPath> = vec![]; + let mut entries = vec![]; - let entry = vfs::loader::Entry::Files(vec![manifest.to_owned()]); + if let Some(manifest) = ws.manifest().map(ManifestPath::as_ref) { + file_set_roots.push(VfsPath::from(manifest.to_owned())); + entries.push(manifest.to_owned()); + } - res.watch.push(res.load.len()); - res.load.push(entry); - local_filesets.push(fsc.len() as u64); - fsc.add_file_set(file_set_roots) + // In case of detached files we do **not** look for a rust-analyzer.toml. + if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) { + let ws_root = ws.workspace_root(); + let ratoml_path = { + let mut p = ws_root.to_path_buf(); + p.push("rust-analyzer.toml"); + p + }; + file_set_roots.push(VfsPath::from(ratoml_path.to_owned())); + entries.push(ratoml_path.to_owned()); + } + + if !file_set_roots.is_empty() { + let entry = vfs::loader::Entry::Files(entries); + res.watch.push(res.load.len()); + res.load.push(entry); + local_filesets.push(fsc.len() as u64); + fsc.add_file_set(file_set_roots) + } } let fsc = fsc.build(); @@ -336,8 +358,7 @@ impl SourceRootConfig { } } -/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace` -/// with an identity dummy expander. +/// Load the proc-macros for the given lib path, disabling all expanders whose names are in `ignored_macros`. pub fn load_proc_macro( server: &ProcMacroServer, path: &AbsPath, @@ -364,7 +385,7 @@ pub fn load_proc_macro( } Err(e) => { tracing::warn!("proc-macro loading for {path} failed: {e}"); - Err(e) + Err((e, true)) } } } @@ -379,7 +400,7 @@ fn load_crate_graph( ) -> RootDatabase { let ProjectWorkspace { toolchain, target_layout, .. } = ws; - let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); + let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok()); let mut db = RootDatabase::new(lru_cap); let mut analysis_change = ChangeWithProcMacros::new(); @@ -428,14 +449,19 @@ fn expander_to_proc_macro( expander: proc_macro_api::ProcMacro, ignored_macros: &[Box<str>], ) -> ProcMacro { - let name = From::from(expander.name()); + let name = expander.name(); let kind = match expander.kind() { proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive, proc_macro_api::ProcMacroKind::Bang => ProcMacroKind::Bang, proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, }; - let disabled = ignored_macros.iter().any(|replace| **replace == name); - ProcMacro { name, kind, expander: sync::Arc::new(Expander(expander)), disabled } + let disabled = ignored_macros.iter().any(|replace| **replace == *name); + ProcMacro { + name: intern::Symbol::intern(name), + kind, + expander: sync::Arc::new(Expander(expander)), + disabled, + } } #[derive(Debug)] diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml index 18444018e1b..57834623e84 100644 --- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml +++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml @@ -24,12 +24,13 @@ parser.workspace = true tt.workspace = true stdx.workspace = true span.workspace = true +intern.workspace = true [dev-dependencies] test-utils.workspace = true [features] -in-rust-tree = ["parser/in-rust-tree", "syntax/in-rust-tree"] +in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index 27dbc84a2b1..b6db4d2e76c 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -1,10 +1,11 @@ //! This module add real world mbe example for benchmark tests +use intern::Symbol; use rustc_hash::FxHashMap; use span::{Edition, Span}; use syntax::{ ast::{self, HasName}, - AstNode, SmolStr, + AstNode, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; @@ -24,9 +25,7 @@ fn benchmark_parse_macro_rules() { rules .values() .map(|it| { - DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT, true) - .rules - .len() + DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT).rules.len() }) .sum() }; @@ -46,7 +45,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(&tt, |_| (), true, DUMMY, Edition::CURRENT); + let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT); assert!(res.err.is_none()); res.value.0.token_trees.len() }) @@ -58,9 +57,7 @@ fn benchmark_expand_macro_rules() { fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> { macro_rules_fixtures_tt() .into_iter() - .map(|(id, tt)| { - (id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT, true)) - }) + .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT))) .collect() } @@ -121,7 +118,7 @@ fn invocation_fixtures( }, token_trees: token_trees.into_boxed_slice(), }; - if it.expand(&subtree, |_| (), true, DUMMY, Edition::CURRENT).err.is_none() { + if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() { res.push((name.clone(), subtree)); break; } @@ -226,13 +223,24 @@ fn invocation_fixtures( *seed } fn make_ident(ident: &str) -> tt::TokenTree<Span> { - tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into() + tt::Leaf::Ident(tt::Ident { + span: DUMMY, + sym: Symbol::intern(ident), + is_raw: tt::IdentIsRaw::No, + }) + .into() } fn make_punct(char: char) -> tt::TokenTree<Span> { tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into() } fn make_literal(lit: &str) -> tt::TokenTree<Span> { - tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into() + tt::Leaf::Literal(tt::Literal { + span: DUMMY, + symbol: Symbol::intern(lit), + kind: tt::LitKind::Str, + suffix: None, + }) + .into() } fn make_subtree( kind: tt::DelimiterKind, diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs index cfad8bcc0b4..1979e5171ab 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs @@ -5,17 +5,16 @@ mod matcher; mod transcriber; +use intern::Symbol; use rustc_hash::FxHashMap; use span::{Edition, Span}; -use syntax::SmolStr; -use crate::{parser::MetaVarKind, ExpandError, ExpandResult, MatchedArmIndex}; +use crate::{parser::MetaVarKind, ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex}; pub(crate) fn expand_rules( rules: &[crate::Rule], input: &tt::Subtree<Span>, marker: impl Fn(&mut Span) + Copy, - new_meta_vars: bool, call_site: Span, def_site_edition: Edition, ) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> { @@ -27,13 +26,8 @@ pub(crate) fn expand_rules( // If we find a rule that applies without errors, we're done. // Unconditionally returning the transcription here makes the // `test_repeat_bad_var` test fail. - let ExpandResult { value, err: transcribe_err } = transcriber::transcribe( - &rule.rhs, - &new_match.bindings, - marker, - new_meta_vars, - call_site, - ); + let ExpandResult { value, err: transcribe_err } = + transcriber::transcribe(&rule.rhs, &new_match.bindings, marker, call_site); if transcribe_err.is_none() { return ExpandResult::ok((value, Some(idx as u32))); } @@ -52,7 +46,7 @@ pub(crate) fn expand_rules( if let Some((match_, rule, idx)) = match_ { // if we got here, there was no match without errors let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site); + transcriber::transcribe(&rule.rhs, &match_.bindings, marker, call_site); ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) } } else { ExpandResult::new( @@ -63,7 +57,7 @@ pub(crate) fn expand_rules( }, None, ), - ExpandError::NoMatchingRule, + ExpandError::new(call_site, ExpandErrorKind::NoMatchingRule), ) } } @@ -110,12 +104,12 @@ pub(crate) fn expand_rules( /// the `Bindings` we should take. We push to the stack when we enter a /// repetition. /// -/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to +/// In other words, `Bindings` is a *multi* mapping from `Symbol` to /// `tt::TokenTree`, where the index to select a particular `TokenTree` among /// many is not a plain `usize`, but a `&[usize]`. #[derive(Debug, Default, Clone, PartialEq, Eq)] struct Bindings { - inner: FxHashMap<SmolStr, Binding>, + inner: FxHashMap<Symbol, Binding>, } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index b20d5579ca6..e69d7d14e25 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -61,25 +61,25 @@ use std::{rc::Rc, sync::Arc}; +use intern::{sym, Symbol}; use smallvec::{smallvec, SmallVec}; use span::{Edition, Span}; -use syntax::SmolStr; use tt::{iter::TtIter, DelimSpan}; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, expect_fragment, parser::{MetaVarKind, Op, RepeatKind, Separator}, - ExpandError, MetaTemplate, ValueResult, + ExpandError, ExpandErrorKind, MetaTemplate, ValueResult, }; impl Bindings { - fn push_optional(&mut self, name: &SmolStr) { - self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty)); + fn push_optional(&mut self, name: Symbol) { + self.inner.insert(name, Binding::Fragment(Fragment::Empty)); } - fn push_empty(&mut self, name: &SmolStr) { - self.inner.insert(name.clone(), Binding::Empty); + fn push_empty(&mut self, name: Symbol) { + self.inner.insert(name, Binding::Empty); } fn bindings(&self) -> impl Iterator<Item = &Binding> { @@ -127,10 +127,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>, edition: #[derive(Debug, Clone)] enum BindingKind { - Empty(SmolStr), - Optional(SmolStr), - Fragment(SmolStr, Fragment), - Missing(SmolStr, MetaVarKind), + Empty(Symbol), + Optional(Symbol), + Fragment(Symbol, Fragment), + Missing(Symbol, MetaVarKind), Nested(usize, usize), } @@ -178,20 +178,20 @@ impl BindingsBuilder { } } - fn push_empty(&mut self, idx: &mut BindingsIdx, var: &SmolStr) { + fn push_empty(&mut self, idx: &mut BindingsIdx, var: &Symbol) { self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Empty(var.clone())))); } - fn push_optional(&mut self, idx: &mut BindingsIdx, var: &SmolStr) { + fn push_optional(&mut self, idx: &mut BindingsIdx, var: &Symbol) { self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone())))); } - fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { + fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &Symbol, fragment: Fragment) { self.nodes[idx.0] .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment)))); } - fn push_missing(&mut self, idx: &mut BindingsIdx, var: &SmolStr, kind: MetaVarKind) { + fn push_missing(&mut self, idx: &mut BindingsIdx, var: &Symbol, kind: MetaVarKind) { self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Missing(var.clone(), kind)))); } @@ -219,10 +219,10 @@ impl BindingsBuilder { for cmd in nodes { match cmd { BindingKind::Empty(name) => { - bindings.push_empty(name); + bindings.push_empty(name.clone()); } BindingKind::Optional(name) => { - bindings.push_optional(name); + bindings.push_optional(name.clone()); } BindingKind::Fragment(name, fragment) => { bindings.inner.insert(name.clone(), Binding::Fragment(fragment.clone())); @@ -507,28 +507,40 @@ fn match_loop_inner<'t>( } OpDelimited::Op(Op::Literal(lhs)) => { if let Ok(rhs) = src.clone().expect_leaf() { - if matches!(rhs, tt::Leaf::Literal(it) if it.text == lhs.text) { + if matches!(rhs, tt::Leaf::Literal(it) if it.symbol == lhs.symbol) { item.dot.next(); } else { - res.add_err(ExpandError::UnexpectedToken); + res.add_err(ExpandError::new( + *rhs.span(), + ExpandErrorKind::UnexpectedToken, + )); item.is_error = true; } } else { - res.add_err(ExpandError::binding_error(format!("expected literal: `{lhs}`"))); + res.add_err(ExpandError::binding_error( + src.clone().next().map_or(delim_span.close, |it| it.first_span()), + format!("expected literal: `{lhs}`"), + )); item.is_error = true; } try_push!(next_items, item); } OpDelimited::Op(Op::Ident(lhs)) => { if let Ok(rhs) = src.clone().expect_leaf() { - if matches!(rhs, tt::Leaf::Ident(it) if it.text == lhs.text) { + if matches!(rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) { item.dot.next(); } else { - res.add_err(ExpandError::UnexpectedToken); + res.add_err(ExpandError::new( + *rhs.span(), + ExpandErrorKind::UnexpectedToken, + )); item.is_error = true; } } else { - res.add_err(ExpandError::binding_error(format!("expected ident: `{lhs}`"))); + res.add_err(ExpandError::binding_error( + src.clone().next().map_or(delim_span.close, |it| it.first_span()), + format!("expected ident: `{lhs}`"), + )); item.is_error = true; } try_push!(next_items, item); @@ -538,8 +550,8 @@ fn match_loop_inner<'t>( let error = if let Ok(rhs) = fork.expect_glued_punct() { let first_is_single_quote = rhs[0].char == '\''; let lhs = lhs.iter().map(|it| it.char); - let rhs = rhs.iter().map(|it| it.char); - if lhs.clone().eq(rhs) { + let rhs_ = rhs.iter().map(|it| it.char); + if lhs.clone().eq(rhs_) { // HACK: here we use `meta_result` to pass `TtIter` back to caller because // it might have been advanced multiple times. `ValueResult` is // insignificant. @@ -552,13 +564,19 @@ fn match_loop_inner<'t>( if first_is_single_quote { // If the first punct token is a single quote, that's a part of a lifetime // ident, not a punct. - ExpandError::UnexpectedToken + ExpandError::new( + rhs.get(1).map_or(rhs[0].span, |it| it.span), + ExpandErrorKind::UnexpectedToken, + ) } else { - let lhs: SmolStr = lhs.collect(); - ExpandError::binding_error(format!("expected punct: `{lhs}`")) + let lhs = lhs.collect::<String>(); + ExpandError::binding_error(rhs[0].span, format!("expected punct: `{lhs}`")) } } else { - ExpandError::UnexpectedToken + ExpandError::new( + src.clone().next().map_or(delim_span.close, |it| it.first_span()), + ExpandErrorKind::UnexpectedToken, + ) }; res.add_err(error); @@ -651,7 +669,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition) if let Some(item) = error_recover_item { res.bindings = bindings_builder.build(&item); } - res.add_err(ExpandError::UnexpectedToken); + res.add_err(ExpandError::new(span.open, ExpandErrorKind::UnexpectedToken)); } return res; } @@ -670,7 +688,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition) src = it; res.unmatched_tts += src.len(); } - res.add_err(ExpandError::LeftoverTokens); + res.add_err(ExpandError::new(span.open, ExpandErrorKind::LeftoverTokens)); if let Some(error_recover_item) = error_recover_item { res.bindings = bindings_builder.build(&error_recover_item); @@ -746,9 +764,10 @@ fn match_meta_var( ) -> ExpandResult<Option<Fragment>> { let fragment = match kind { MetaVarKind::Path => { - return expect_fragment(input, parser::PrefixEntryPoint::Path, edition).map(|it| { - it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) - }); + return expect_fragment(input, parser::PrefixEntryPoint::Path, edition, delim_span) + .map(|it| { + it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) + }); } MetaVarKind::Expr => { // `expr` should not match underscores, let expressions, or inline const. The latter @@ -759,39 +778,58 @@ fn match_meta_var( // [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576 match input.peek_n(0) { Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) - if it.text == "_" || it.text == "let" || it.text == "const" => + if it.sym == sym::underscore + || it.sym == sym::let_ + || it.sym == sym::const_ => { - return ExpandResult::only_err(ExpandError::NoMatchingRule) + return ExpandResult::only_err(ExpandError::new( + it.span, + ExpandErrorKind::NoMatchingRule, + )) } _ => {} }; - return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition).map(|tt| { - tt.map(|tt| match tt { - tt::TokenTree::Leaf(leaf) => tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), - token_trees: Box::new([leaf.into()]), - }, - tt::TokenTree::Subtree(mut s) => { - if s.delimiter.kind == tt::DelimiterKind::Invisible { - s.delimiter.kind = tt::DelimiterKind::Parenthesis; + return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition, delim_span) + .map(|tt| { + tt.map(|tt| match tt { + tt::TokenTree::Leaf(leaf) => tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), + token_trees: Box::new([leaf.into()]), + }, + tt::TokenTree::Subtree(mut s) => { + if s.delimiter.kind == tt::DelimiterKind::Invisible { + s.delimiter.kind = tt::DelimiterKind::Parenthesis; + } + s } - s - } - }) - .map(Fragment::Expr) - }); + }) + .map(Fragment::Expr) + }); } MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { + let span = input.next_span(); let tt_result = match kind { MetaVarKind::Ident => input .expect_ident() .map(|ident| tt::Leaf::from(ident.clone()).into()) - .map_err(|()| ExpandError::binding_error("expected ident")), - MetaVarKind::Tt => { - expect_tt(input).map_err(|()| ExpandError::binding_error("expected token tree")) - } - MetaVarKind::Lifetime => expect_lifetime(input) - .map_err(|()| ExpandError::binding_error("expected lifetime")), + .map_err(|()| { + ExpandError::binding_error( + span.unwrap_or(delim_span.close), + "expected ident", + ) + }), + MetaVarKind::Tt => expect_tt(input).map_err(|()| { + ExpandError::binding_error( + span.unwrap_or(delim_span.close), + "expected token tree", + ) + }), + MetaVarKind::Lifetime => expect_lifetime(input).map_err(|()| { + ExpandError::binding_error( + span.unwrap_or(delim_span.close), + "expected lifetime", + ) + }), MetaVarKind::Literal => { let neg = eat_char(input, '-'); input @@ -806,7 +844,12 @@ fn match_meta_var( }), } }) - .map_err(|()| ExpandError::binding_error("expected literal")) + .map_err(|()| { + ExpandError::binding_error( + span.unwrap_or(delim_span.close), + "expected literal", + ) + }) } _ => unreachable!(), }; @@ -821,10 +864,10 @@ fn match_meta_var( MetaVarKind::Item => parser::PrefixEntryPoint::Item, MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, }; - expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens)) + expect_fragment(input, fragment, edition, delim_span).map(|it| it.map(Fragment::Tokens)) } -fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { +fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) { for op in pattern.iter() { match op { Op::Var { name, .. } => collector_fun(name.clone()), @@ -908,13 +951,13 @@ fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) -> let mut fork = iter.clone(); let ok = match separator { Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { - Ok(rhs) => rhs.text == lhs.text, + Ok(rhs) => rhs.sym == lhs.sym, Err(_) => false, }, Separator::Literal(lhs) => match fork.expect_literal() { Ok(rhs) => match rhs { - tt::Leaf::Literal(rhs) => rhs.text == lhs.text, - tt::Leaf::Ident(rhs) => rhs.text == lhs.text, + tt::Leaf::Literal(rhs) => rhs.symbol == lhs.symbol, + tt::Leaf::Ident(rhs) => rhs.sym == lhs.symbol, tt::Leaf::Punct(_) => false, }, Err(_) => false, diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs index c09cbd1d071..286bd748cbe 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs @@ -1,36 +1,39 @@ //! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` +use intern::{sym, Symbol}; use span::Span; -use syntax::SmolStr; use tt::Delimiter; use crate::{ expander::{Binding, Bindings, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, - CountError, ExpandError, ExpandResult, MetaTemplate, + ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate, }; impl Bindings { - fn get(&self, name: &str) -> Result<&Binding, ExpandError> { + fn get(&self, name: &Symbol, span: Span) -> Result<&Binding, ExpandError> { match self.inner.get(name) { Some(binding) => Ok(binding), - None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))), + None => Err(ExpandError::new( + span, + ExpandErrorKind::UnresolvedBinding(Box::new(Box::from(name.as_str()))), + )), } } fn get_fragment( &self, - name: &str, + name: &Symbol, mut span: Span, nesting: &mut [NestingState], marker: impl Fn(&mut Span), ) -> Result<Fragment, ExpandError> { macro_rules! binding_err { - ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) }; + ($($arg:tt)*) => { ExpandError::binding_error(span, format!($($arg)*)) }; } - let mut b = self.get(name)?; + let mut b = self.get(name, span)?; for nesting_state in nesting.iter_mut() { nesting_state.hit = true; b = match b { @@ -97,8 +100,9 @@ impl Bindings { | MetaVarKind::Expr | MetaVarKind::Ident => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_static("missing"), + sym: sym::missing.clone(), span, + is_raw: tt::IdentIsRaw::No, }))) } MetaVarKind::Lifetime => { @@ -111,16 +115,18 @@ impl Bindings { spacing: tt::Spacing::Joint, })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_static("missing"), + sym: sym::missing.clone(), span, + is_raw: tt::IdentIsRaw::No, })), ]), })) } MetaVarKind::Literal => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_static("\"missing\""), + sym: sym::missing.clone(), span, + is_raw: tt::IdentIsRaw::No, }))) } } @@ -139,10 +145,9 @@ pub(super) fn transcribe( template: &MetaTemplate, bindings: &Bindings, marker: impl Fn(&mut Span) + Copy, - new_meta_vars: bool, call_site: Span, ) -> ExpandResult<tt::Subtree<Span>> { - let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site }; + let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site }; let mut arena: Vec<tt::TokenTree<Span>> = Vec::new(); expand_subtree(&mut ctx, template, None, &mut arena, marker) } @@ -162,7 +167,6 @@ struct NestingState { struct ExpandCtx<'a> { bindings: &'a Bindings, nesting: Vec<NestingState>, - new_meta_vars: bool, call_site: Span, } @@ -236,8 +240,10 @@ fn expand_subtree( ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |nest| nest.idx); arena.push( tt::Leaf::Literal(tt::Literal { - text: index.to_string().into(), + symbol: Symbol::integer(index), span: ctx.call_site, + kind: tt::LitKind::Integer, + suffix: None, }) .into(), ); @@ -249,14 +255,16 @@ fn expand_subtree( }); arena.push( tt::Leaf::Literal(tt::Literal { - text: length.to_string().into(), + symbol: Symbol::integer(length), span: ctx.call_site, + kind: tt::LitKind::Integer, + suffix: None, }) .into(), ); } Op::Count { name, depth } => { - let mut binding = match ctx.bindings.get(name.as_str()) { + let mut binding = match ctx.bindings.get(name, ctx.call_site) { Ok(b) => b, Err(e) => { if err.is_none() { @@ -292,30 +300,14 @@ fn expand_subtree( } } - let res = if ctx.new_meta_vars { - count(binding, 0, depth.unwrap_or(0)) - } else { - count_old(binding, 0, *depth) - }; + let res = count(binding, 0, depth.unwrap_or(0)); - let c = match res { - Ok(c) => c, - Err(e) => { - // XXX: It *might* make sense to emit a dummy integer value like `0` here. - // That would type inference a bit more robust in cases like - // `v[${count(t)}]` where index doesn't matter, but also could lead to - // wrong infefrence for cases like `tup.${count(t)}` where index itself - // does matter. - if err.is_none() { - err = Some(e.into()); - } - continue; - } - }; arena.push( tt::Leaf::Literal(tt::Literal { - text: c.to_string().into(), + symbol: Symbol::integer(res), span: ctx.call_site, + suffix: None, + kind: tt::LitKind::Integer, }) .into(), ); @@ -335,16 +327,16 @@ fn expand_subtree( fn expand_var( ctx: &mut ExpandCtx<'_>, - v: &SmolStr, + v: &Symbol, id: Span, marker: impl Fn(&mut Span), ) -> ExpandResult<Fragment> { // We already handle $crate case in mbe parser - debug_assert!(v != "crate"); + debug_assert!(*v != sym::crate_); match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) { Ok(it) => ExpandResult::ok(it), - Err(ExpandError::UnresolvedBinding(_)) => { + Err(e) if matches!(e.inner.1, ExpandErrorKind::UnresolvedBinding(_)) => { // Note that it is possible to have a `$var` inside a macro which is not bound. // For example: // ``` @@ -363,7 +355,12 @@ fn expand_var( token_trees: Box::new([ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) .into(), - tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(), + tt::Leaf::from(tt::Ident { + sym: v.clone(), + span: id, + is_raw: tt::IdentIsRaw::No, + }) + .into(), ]), } .into(); @@ -421,7 +418,7 @@ fn expand_repeat( } .into(), ), - err: Some(ExpandError::LimitExceeded), + err: Some(ExpandError::new(ctx.call_site, ExpandErrorKind::LimitExceeded)), }; } @@ -467,16 +464,16 @@ fn expand_repeat( let tt = tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(ctx.call_site), token_trees: buf.into_boxed_slice(), - } - .into(); + }; if RepeatKind::OneOrMore == kind && counter == 0 { + let span = tt.delimiter.open; return ExpandResult { - value: Fragment::Tokens(tt), - err: Some(ExpandError::UnexpectedToken), + value: Fragment::Tokens(tt.into()), + err: Some(ExpandError::new(span, ExpandErrorKind::UnexpectedToken)), }; } - ExpandResult { value: Fragment::Tokens(tt), err } + ExpandResult { value: Fragment::Tokens(tt.into()), err } } fn push_fragment(ctx: &ExpandCtx<'_>, buf: &mut Vec<tt::TokenTree<Span>>, fragment: Fragment) { @@ -543,44 +540,16 @@ fn fix_up_and_push_path_tt( /// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth /// defined by the metavar expression. -fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> Result<usize, CountError> { +fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> usize { match binding { Binding::Nested(bs) => { if depth_curr == depth_max { - Ok(bs.len()) + bs.len() } else { bs.iter().map(|b| count(b, depth_curr + 1, depth_max)).sum() } } - Binding::Empty => Ok(0), - Binding::Fragment(_) | Binding::Missing(_) => Ok(1), - } -} - -fn count_old( - binding: &Binding, - our_depth: usize, - count_depth: Option<usize>, -) -> Result<usize, CountError> { - match binding { - Binding::Nested(bs) => match count_depth { - None => bs.iter().map(|b| count_old(b, our_depth + 1, None)).sum(), - Some(0) => Ok(bs.len()), - Some(d) => bs.iter().map(|b| count_old(b, our_depth + 1, Some(d - 1))).sum(), - }, - Binding::Empty => Ok(0), - Binding::Fragment(_) | Binding::Missing(_) => { - if our_depth == 0 { - // `${count(t)}` is placed inside the innermost repetition. This includes cases - // where `t` is not a repeated fragment. - Err(CountError::Misplaced) - } else if count_depth.is_none() { - Ok(1) - } else { - // We've reached at the innermost repeated fragment, but the user wants us to go - // further! - Err(CountError::OutOfBounds) - } - } + Binding::Empty => 0, + Binding::Fragment(_) | Binding::Missing(_) => 1, } } diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs index b06c6cee12d..568490d5734 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs @@ -15,10 +15,11 @@ mod to_parser_input; mod benchmark; use span::{Edition, Span, SyntaxContextId}; -use stdx::impl_from; use tt::iter::TtIter; +use tt::DelimSpan; use std::fmt; +use std::sync::Arc; use crate::parser::{MetaTemplate, MetaVarKind, Op}; @@ -27,9 +28,9 @@ pub use ::parser::TopEntryPoint; pub use tt::{Delimiter, DelimiterKind, Punct}; pub use crate::syntax_bridge::{ - parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, - syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, - DocCommentDesugarMode, SpanMapper, + desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree, + parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified, + token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper, }; pub use crate::syntax_bridge::dummy_test_span_utils::*; @@ -64,39 +65,45 @@ impl fmt::Display for ParseError { } #[derive(Debug, PartialEq, Eq, Clone, Hash)] -pub enum ExpandError { +pub struct ExpandError { + pub inner: Arc<(Span, ExpandErrorKind)>, +} +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub enum ExpandErrorKind { BindingError(Box<Box<str>>), UnresolvedBinding(Box<Box<str>>), LeftoverTokens, - ConversionError, LimitExceeded, NoMatchingRule, UnexpectedToken, - CountError(CountError), } -impl_from!(CountError for ExpandError); - impl ExpandError { - fn binding_error(e: impl Into<Box<str>>) -> ExpandError { - ExpandError::BindingError(Box::new(e.into())) + fn new(span: Span, kind: ExpandErrorKind) -> ExpandError { + ExpandError { inner: Arc::new((span, kind)) } + } + fn binding_error(span: Span, e: impl Into<Box<str>>) -> ExpandError { + ExpandError { inner: Arc::new((span, ExpandErrorKind::BindingError(Box::new(e.into())))) } } } - impl fmt::Display for ExpandError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.1.fmt(f) + } +} + +impl fmt::Display for ExpandErrorKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"), - ExpandError::UnexpectedToken => f.write_str("unexpected token in input"), - ExpandError::BindingError(e) => f.write_str(e), - ExpandError::UnresolvedBinding(binding) => { + ExpandErrorKind::NoMatchingRule => f.write_str("no rule matches input tokens"), + ExpandErrorKind::UnexpectedToken => f.write_str("unexpected token in input"), + ExpandErrorKind::BindingError(e) => f.write_str(e), + ExpandErrorKind::UnresolvedBinding(binding) => { f.write_str("could not find binding ")?; f.write_str(binding) } - ExpandError::ConversionError => f.write_str("could not convert tokens"), - ExpandError::LimitExceeded => f.write_str("Expand exceed limit"), - ExpandError::LeftoverTokens => f.write_str("leftover tokens"), - ExpandError::CountError(e) => e.fmt(f), + ExpandErrorKind::LimitExceeded => f.write_str("Expand exceed limit"), + ExpandErrorKind::LeftoverTokens => f.write_str("leftover tokens"), } } } @@ -144,9 +151,7 @@ impl DeclarativeMacro { /// The old, `macro_rules! m {}` flavor. pub fn parse_macro_rules( tt: &tt::Subtree<Span>, - edition: impl Copy + Fn(SyntaxContextId) -> Edition, - // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) - new_meta_vars: bool, + ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition, ) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing @@ -156,7 +161,7 @@ impl DeclarativeMacro { let mut err = None; while src.len() > 0 { - let rule = match Rule::parse(edition, &mut src, new_meta_vars) { + let rule = match Rule::parse(ctx_edition, &mut src) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -186,9 +191,7 @@ impl DeclarativeMacro { pub fn parse_macro2( args: Option<&tt::Subtree<Span>>, body: &tt::Subtree<Span>, - edition: impl Copy + Fn(SyntaxContextId) -> Edition, - // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) - new_meta_vars: bool, + ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition, ) -> DeclarativeMacro { let mut rules = Vec::new(); let mut err = None; @@ -197,8 +200,8 @@ impl DeclarativeMacro { cov_mark::hit!(parse_macro_def_simple); let rule = (|| { - let lhs = MetaTemplate::parse_pattern(edition, args)?; - let rhs = MetaTemplate::parse_template(edition, body, new_meta_vars)?; + let lhs = MetaTemplate::parse_pattern(ctx_edition, args)?; + let rhs = MetaTemplate::parse_template(ctx_edition, body)?; Ok(crate::Rule { lhs, rhs }) })(); @@ -211,7 +214,7 @@ impl DeclarativeMacro { cov_mark::hit!(parse_macro_def_rules); let mut src = TtIter::new(body); while src.len() > 0 { - let rule = match Rule::parse(edition, &mut src, new_meta_vars) { + let rule = match Rule::parse(ctx_edition, &mut src) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -252,11 +255,10 @@ impl DeclarativeMacro { &self, tt: &tt::Subtree<Span>, marker: impl Fn(&mut Span) + Copy, - new_meta_vars: bool, call_site: Span, def_site_edition: Edition, ) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> { - expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site, def_site_edition) + expander::expand_rules(&self.rules, tt, marker, call_site, def_site_edition) } } @@ -264,7 +266,6 @@ impl Rule { fn parse( edition: impl Copy + Fn(SyntaxContextId) -> Edition, src: &mut TtIter<'_, Span>, - new_meta_vars: bool, ) -> Result<Self, ParseError> { let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; @@ -272,7 +273,7 @@ impl Rule { let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; let lhs = MetaTemplate::parse_pattern(edition, lhs)?; - let rhs = MetaTemplate::parse_template(edition, rhs, new_meta_vars)?; + let rhs = MetaTemplate::parse_template(edition, rhs)?; Ok(crate::Rule { lhs, rhs }) } @@ -360,14 +361,15 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> { } } -fn expect_fragment<S: Copy + fmt::Debug>( - tt_iter: &mut TtIter<'_, S>, +fn expect_fragment( + tt_iter: &mut TtIter<'_, Span>, entry_point: ::parser::PrefixEntryPoint, edition: ::parser::Edition, -) -> ExpandResult<Option<tt::TokenTree<S>>> { + delim_span: DelimSpan<Span>, +) -> ExpandResult<Option<tt::TokenTree<Span>>> { use ::parser; let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice()); - let parser_input = to_parser_input::to_parser_input(&buffer); + let parser_input = to_parser_input::to_parser_input(edition, &buffer); let tree_traversal = entry_point.parse(&parser_input, edition); let mut cursor = buffer.begin(); let mut error = false; @@ -392,7 +394,10 @@ fn expect_fragment<S: Copy + fmt::Debug>( } let err = if error || !cursor.is_root() { - Some(ExpandError::binding_error(format!("expected {entry_point:?}"))) + Some(ExpandError::binding_error( + buffer.begin().token_tree().map_or(delim_span.close, |tt| tt.span()), + format!("expected {entry_point:?}"), + )) } else { None }; diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs index 5c499c06b15..218c04640f1 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs @@ -4,8 +4,8 @@ use std::sync::Arc; use arrayvec::ArrayVec; +use intern::{sym, Symbol}; use span::{Edition, Span, SyntaxContextId}; -use syntax::SmolStr; use tt::iter::TtIter; use crate::ParseError; @@ -31,15 +31,14 @@ impl MetaTemplate { edition: impl Copy + Fn(SyntaxContextId) -> Edition, pattern: &tt::Subtree<Span>, ) -> Result<Self, ParseError> { - MetaTemplate::parse(edition, pattern, Mode::Pattern, false) + MetaTemplate::parse(edition, pattern, Mode::Pattern) } pub(crate) fn parse_template( edition: impl Copy + Fn(SyntaxContextId) -> Edition, template: &tt::Subtree<Span>, - new_meta_vars: bool, ) -> Result<Self, ParseError> { - MetaTemplate::parse(edition, template, Mode::Template, new_meta_vars) + MetaTemplate::parse(edition, template, Mode::Template) } pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> { @@ -50,13 +49,12 @@ impl MetaTemplate { edition: impl Copy + Fn(SyntaxContextId) -> Edition, tt: &tt::Subtree<Span>, mode: Mode, - new_meta_vars: bool, ) -> Result<Self, ParseError> { let mut src = TtIter::new(tt); let mut res = Vec::new(); while let Some(first) = src.peek_n(0) { - let op = next_op(edition, first, &mut src, mode, new_meta_vars)?; + let op = next_op(edition, first, &mut src, mode)?; res.push(op); } @@ -67,12 +65,12 @@ impl MetaTemplate { #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum Op { Var { - name: SmolStr, + name: Symbol, kind: Option<MetaVarKind>, id: Span, }, Ignore { - name: SmolStr, + name: Symbol, id: Span, }, Index { @@ -82,7 +80,7 @@ pub(crate) enum Op { depth: usize, }, Count { - name: SmolStr, + name: Symbol, // FIXME: `usize`` once we drop support for 1.76 depth: Option<usize>, }, @@ -138,8 +136,8 @@ impl PartialEq for Separator { use Separator::*; match (self, other) { - (Ident(a), Ident(b)) => a.text == b.text, - (Literal(a), Literal(b)) => a.text == b.text, + (Ident(a), Ident(b)) => a.sym == b.sym, + (Literal(a), Literal(b)) => a.symbol == b.symbol, (Puncts(a), Puncts(b)) if a.len() == b.len() => { let a_iter = a.iter().map(|a| a.char); let b_iter = b.iter().map(|b| b.char); @@ -161,7 +159,6 @@ fn next_op( first_peeked: &tt::TokenTree<Span>, src: &mut TtIter<'_, Span>, mode: Mode, - new_meta_vars: bool, ) -> Result<Op, ParseError> { let res = match first_peeked { tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { @@ -181,14 +178,14 @@ fn next_op( tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => { let (separator, kind) = parse_repeat(src)?; - let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; + let tokens = MetaTemplate::parse(edition, subtree, mode)?; Op::Repeat { tokens, separator: separator.map(Arc::new), kind } } tt::DelimiterKind::Brace => match mode { Mode::Template => { - parse_metavar_expr(new_meta_vars, &mut TtIter::new(subtree)).map_err( - |()| ParseError::unexpected("invalid metavariable expression"), - )? + parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| { + ParseError::unexpected("invalid metavariable expression") + })? } Mode::Pattern => { return Err(ParseError::unexpected( @@ -203,19 +200,23 @@ fn next_op( } }, tt::TokenTree::Leaf(leaf) => match leaf { - tt::Leaf::Ident(ident) if ident.text == "crate" => { + tt::Leaf::Ident(ident) if ident.sym == sym::crate_ => { // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. - Op::Ident(tt::Ident { text: "$crate".into(), span: ident.span }) + Op::Ident(tt::Ident { + sym: sym::dollar_crate.clone(), + span: ident.span, + is_raw: tt::IdentIsRaw::No, + }) } tt::Leaf::Ident(ident) => { let kind = eat_fragment_kind(edition, src, mode)?; - let name = ident.text.clone(); + let name = ident.sym.clone(); let id = ident.span; Op::Var { name, kind, id } } tt::Leaf::Literal(lit) if is_boolean_literal(lit) => { let kind = eat_fragment_kind(edition, src, mode)?; - let name = lit.text.clone(); + let name = lit.symbol.clone(); let id = lit.span; Op::Var { name, kind, id } } @@ -256,7 +257,7 @@ fn next_op( tt::TokenTree::Subtree(subtree) => { src.next().expect("first token already peeked"); - let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; + let tokens = MetaTemplate::parse(edition, subtree, mode)?; Op::Subtree { tokens, delimiter: subtree.delimiter } } }; @@ -273,7 +274,7 @@ fn eat_fragment_kind( let ident = src .expect_ident() .map_err(|()| ParseError::unexpected("missing fragment specifier"))?; - let kind = match ident.text.as_str() { + let kind = match ident.sym.as_str() { "path" => MetaVarKind::Path, "ty" => MetaVarKind::Ty, "pat" => match edition(ident.span.ctx) { @@ -299,7 +300,7 @@ fn eat_fragment_kind( } fn is_boolean_literal(lit: &tt::Literal<Span>) -> bool { - matches!(lit.text.as_str(), "true" | "false") + matches!(lit.symbol.as_str(), "true" | "false") } fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> { @@ -339,7 +340,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat Err(ParseError::InvalidRepeat) } -fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result<Op, ()> { +fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> { let func = src.expect_ident()?; let args = src.expect_subtree()?; @@ -349,23 +350,19 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result let mut args = TtIter::new(args); - let op = match &*func.text { - "ignore" => { - if new_meta_vars { - args.expect_dollar()?; - } + let op = match &func.sym { + s if sym::ignore == *s => { + args.expect_dollar()?; let ident = args.expect_ident()?; - Op::Ignore { name: ident.text.clone(), id: ident.span } + Op::Ignore { name: ident.sym.clone(), id: ident.span } } - "index" => Op::Index { depth: parse_depth(&mut args)? }, - "len" => Op::Len { depth: parse_depth(&mut args)? }, - "count" => { - if new_meta_vars { - args.expect_dollar()?; - } + s if sym::index == *s => Op::Index { depth: parse_depth(&mut args)? }, + s if sym::len == *s => Op::Len { depth: parse_depth(&mut args)? }, + s if sym::count == *s => { + args.expect_dollar()?; let ident = args.expect_ident()?; let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None }; - Op::Count { name: ident.text.clone(), depth } + Op::Count { name: ident.sym.clone(), depth } } _ => return Err(()), }; @@ -380,9 +377,11 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> { if src.len() == 0 { Ok(0) - } else if let tt::Leaf::Literal(lit) = src.expect_literal()? { + } else if let tt::Leaf::Literal(tt::Literal { symbol: text, suffix: None, .. }) = + src.expect_literal()? + { // Suffixes are not allowed. - lit.text.parse().map_err(|_| ()) + text.as_str().parse().map_err(|_| ()) } else { Err(()) } diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs index 73a04f00d93..a29efdd4ef7 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs @@ -2,18 +2,20 @@ use std::fmt; +use intern::Symbol; use rustc_hash::{FxHashMap, FxHashSet}; use span::{Edition, SpanAnchor, SpanData, SpanMap}; -use stdx::{never, non_empty_vec::NonEmptyVec}; +use stdx::{format_to, never, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, - AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, - SyntaxKind::*, + format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, + SyntaxKind::{self, *}, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; use tt::{ buffer::{Cursor, TokenBuffer}, iter::TtIter, + token_to_literal, }; use crate::to_parser_input::to_parser_input; @@ -50,7 +52,10 @@ pub(crate) mod dummy_test_span_utils { pub const DUMMY: Span = Span { range: TextRange::empty(TextSize::new(0)), anchor: span::SpanAnchor { - file_id: span::FileId::from_raw(0xe4e4e), + file_id: span::EditionedFileId::new( + span::FileId::from_raw(0xe4e4e), + span::Edition::CURRENT, + ), ast_id: span::ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContextId::ROOT, @@ -63,7 +68,10 @@ pub(crate) mod dummy_test_span_utils { Span { range, anchor: span::SpanAnchor { - file_id: span::FileId::from_raw(0xe4e4e), + file_id: span::EditionedFileId::new( + span::FileId::from_raw(0xe4e4e), + span::Edition::CURRENT, + ), ast_id: span::ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContextId::ROOT, @@ -145,7 +153,7 @@ where } => TokenBuffer::from_tokens(token_trees), _ => TokenBuffer::from_subtree(tt), }; - let parser_input = to_parser_input(&buffer); + let parser_input = to_parser_input(edition, &buffer); let parser_output = entry_point.parse(&parser_input, edition); let mut tree_sink = TtTreeSink::new(buffer.begin()); for event in parser_output.iter() { @@ -167,6 +175,7 @@ where /// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided /// anchor with the given context. pub fn parse_to_token_tree<Ctx>( + edition: Edition, anchor: SpanAnchor, ctx: Ctx, text: &str, @@ -175,7 +184,7 @@ where SpanData<Ctx>: Copy + fmt::Debug, Ctx: Copy, { - let lexed = parser::LexedStr::new(text); + let lexed = parser::LexedStr::new(edition, text); if lexed.errors().next().is_some() { return None; } @@ -185,11 +194,15 @@ where } /// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree. -pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>> +pub fn parse_to_token_tree_static_span<S>( + edition: Edition, + span: S, + text: &str, +) -> Option<tt::Subtree<S>> where S: Copy + fmt::Debug, { - let lexed = parser::LexedStr::new(text); + let lexed = parser::LexedStr::new(edition, text); if lexed.errors().next().is_some() { return None; } @@ -199,15 +212,12 @@ where } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep<S>( - tt: &tt::Subtree<S>, +pub fn parse_exprs_with_sep( + tt: &tt::Subtree<span::Span>, sep: char, - span: S, + span: span::Span, edition: Edition, -) -> Vec<tt::Subtree<S>> -where - S: Copy + fmt::Debug, -{ +) -> Vec<tt::Subtree<span::Span>> { if tt.token_trees.is_empty() { return Vec::new(); } @@ -216,7 +226,12 @@ where let mut res = Vec::new(); while iter.peek_n(0).is_some() { - let expanded = crate::expect_fragment(&mut iter, parser::PrefixEntryPoint::Expr, edition); + let expanded = crate::expect_fragment( + &mut iter, + parser::PrefixEntryPoint::Expr, + edition, + tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close }, + ); res.push(match expanded.value { None => break, @@ -317,18 +332,29 @@ where .into() } kind => { - macro_rules! make_leaf { - ($i:ident) => { - tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) } - .into() + macro_rules! make_ident { + () => { + tt::Ident { + span: conv.span_for(abs_range), + sym: Symbol::intern(&token.to_text(conv)), + is_raw: tt::IdentIsRaw::No, + } + .into() }; } let leaf: tt::Leaf<_> = match kind { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - UNDERSCORE => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), + T![true] | T![false] => make_ident!(), + IDENT => { + let text = token.to_text(conv); + tt::Ident::new(&text, conv.span_for(abs_range)).into() + } + UNDERSCORE => make_ident!(), + k if k.is_keyword() => make_ident!(), + k if k.is_literal() => { + let text = token.to_text(conv); + let span = conv.span_for(abs_range); + token_to_literal(&text, span).into() + } LIFETIME_IDENT => { let apostrophe = tt::Leaf::from(tt::Punct { char: '\'', @@ -339,11 +365,12 @@ where token_trees.push(apostrophe.into()); let ident = tt::Leaf::from(tt::Ident { - text: SmolStr::new(&token.to_text(conv)[1..]), + sym: Symbol::intern(&token.to_text(conv)[1..]), span: conv.span_for(TextRange::new( abs_range.start() + TextSize::of('\''), abs_range.end(), )), + is_raw: tt::IdentIsRaw::No, }); token_trees.push(ident.into()); continue; @@ -421,16 +448,10 @@ fn is_single_token_op(kind: SyntaxKind) -> bool { /// That is, strips leading `///` (or `/**`, etc) /// and strips the ending `*/` /// And then quote the string, which is needed to convert to `tt::Literal` -fn doc_comment_text(comment: &ast::Comment, mode: DocCommentDesugarMode) -> SmolStr { - let prefix_len = comment.prefix().len(); - let mut text = &comment.text()[prefix_len..]; - - // Remove ending "*/" - if comment.kind().shape == ast::CommentShape::Block { - text = &text[0..text.len() - 2]; - } - - let text = match mode { +/// +/// Note that proc-macros desugar with string literals where as macro_rules macros desugar with raw string literals. +pub fn desugar_doc_comment_text(text: &str, mode: DocCommentDesugarMode) -> (Symbol, tt::LitKind) { + match mode { DocCommentDesugarMode::Mbe => { let mut num_of_hashes = 0; let mut count = 0; @@ -444,14 +465,13 @@ fn doc_comment_text(comment: &ast::Comment, mode: DocCommentDesugarMode) -> Smol } // Quote raw string with delimiters - // Note that `tt::Literal` expect an escaped string - format!(r#"r{delim}"{text}"{delim}"#, delim = "#".repeat(num_of_hashes)) + (Symbol::intern(text), tt::LitKind::StrRaw(num_of_hashes)) } // Quote string with delimiters - // Note that `tt::Literal` expect an escaped string - DocCommentDesugarMode::ProcMacro => format!(r#""{}""#, text.escape_debug()), - }; - text.into() + DocCommentDesugarMode::ProcMacro => { + (Symbol::intern(&format_smolstr!("{}", text.escape_debug())), tt::LitKind::Str) + } + } } fn convert_doc_comment<S: Copy>( @@ -463,8 +483,13 @@ fn convert_doc_comment<S: Copy>( let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; - let mk_ident = - |s: &str| tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })); + let mk_ident = |s: &str| { + tt::TokenTree::from(tt::Leaf::from(tt::Ident { + sym: Symbol::intern(s), + span, + is_raw: tt::IdentIsRaw::No, + })) + }; let mk_punct = |c: char| { tt::TokenTree::from(tt::Leaf::from(tt::Punct { @@ -475,7 +500,15 @@ fn convert_doc_comment<S: Copy>( }; let mk_doc_literal = |comment: &ast::Comment| { - let lit = tt::Literal { text: doc_comment_text(comment, mode), span }; + let prefix_len = comment.prefix().len(); + let mut text = &comment.text()[prefix_len..]; + + // Remove ending "*/" + if comment.kind().shape == ast::CommentShape::Block { + text = &text[0..text.len() - 2]; + } + let (text, kind) = desugar_doc_comment_text(text, mode); + let lit = tt::Literal { symbol: text, span, kind, suffix: None }; tt::TokenTree::from(tt::Leaf::from(lit)) }; @@ -902,16 +935,22 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { impl<Ctx> TtTreeSink<'_, Ctx> where - SpanData<Ctx>: Copy, + SpanData<Ctx>: Copy + fmt::Debug, { /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { let (text, span) = match self.cursor.token_tree() { - Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => { - (lit.text.as_str(), lit.span) - } - _ => unreachable!(), + Some(tt::buffer::TokenTreeRef::Leaf( + tt::Leaf::Literal(tt::Literal { + symbol: text, + span, + kind: tt::LitKind::Float, + suffix: _, + }), + _, + )) => (text.as_str(), *span), + tt => unreachable!("{tt:?}"), }; // FIXME: Span splitting match text.split_once('.') { @@ -954,7 +993,7 @@ where } let mut last = self.cursor; - for _ in 0..n_tokens { + 'tokens: for _ in 0..n_tokens { let tmp: u8; if self.cursor.eof() { break; @@ -962,23 +1001,36 @@ where last = self.cursor; let (text, span) = loop { break match self.cursor.token_tree() { - Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { - // Mark the range if needed - let (text, span) = match leaf { - tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span), - tt::Leaf::Punct(punct) => { - assert!(punct.char.is_ascii()); - tmp = punct.char as u8; - ( - std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), - punct.span, - ) + Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => match leaf { + tt::Leaf::Ident(ident) => { + if ident.is_raw.yes() { + self.buf.push_str("r#"); + self.text_pos += TextSize::of("r#"); } - tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span), - }; - self.cursor = self.cursor.bump(); - (text, span) - } + let r = (ident.sym.as_str(), ident.span); + self.cursor = self.cursor.bump(); + r + } + tt::Leaf::Punct(punct) => { + assert!(punct.char.is_ascii()); + tmp = punct.char as u8; + let r = ( + std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), + punct.span, + ); + self.cursor = self.cursor.bump(); + r + } + tt::Leaf::Literal(lit) => { + let buf_l = self.buf.len(); + format_to!(self.buf, "{lit}"); + debug_assert_ne!(self.buf.len() - buf_l, 0); + self.text_pos += TextSize::new((self.buf.len() - buf_l) as u32); + self.token_map.push(self.text_pos, lit.span); + self.cursor = self.cursor.bump(); + continue 'tokens; + } + }, Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { self.cursor = self.cursor.subtree().unwrap(); match delim_to_str(subtree.delimiter.kind, false) { diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs index 3f70149aa5e..c35b28527a0 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs @@ -3,11 +3,15 @@ use std::fmt; +use span::Edition; use syntax::{SyntaxKind, SyntaxKind::*, T}; use tt::buffer::TokenBuffer; -pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>) -> parser::Input { +pub(crate) fn to_parser_input<S: Copy + fmt::Debug>( + edition: Edition, + buffer: &TokenBuffer<'_, S>, +) -> parser::Input { let mut res = parser::Input::default(); let mut current = buffer.begin(); @@ -35,35 +39,39 @@ pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>) Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { match leaf { tt::Leaf::Literal(lit) => { - let is_negated = lit.text.starts_with('-'); - let inner_text = &lit.text[if is_negated { 1 } else { 0 }..]; - - let kind = parser::LexedStr::single_token(inner_text) - .map(|(kind, _error)| kind) - .filter(|kind| { - kind.is_literal() - && (!is_negated || matches!(kind, FLOAT_NUMBER | INT_NUMBER)) - }) - .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &lit)); - + let kind = match lit.kind { + tt::LitKind::Byte => SyntaxKind::BYTE, + tt::LitKind::Char => SyntaxKind::CHAR, + tt::LitKind::Integer => SyntaxKind::INT_NUMBER, + tt::LitKind::Float => SyntaxKind::FLOAT_NUMBER, + tt::LitKind::Str | tt::LitKind::StrRaw(_) => SyntaxKind::STRING, + tt::LitKind::ByteStr | tt::LitKind::ByteStrRaw(_) => { + SyntaxKind::BYTE_STRING + } + tt::LitKind::CStr | tt::LitKind::CStrRaw(_) => SyntaxKind::C_STRING, + tt::LitKind::Err(_) => SyntaxKind::ERROR, + }; res.push(kind); - if kind == FLOAT_NUMBER && !inner_text.ends_with('.') { + if kind == FLOAT_NUMBER && !lit.symbol.as_str().ends_with('.') { // Tag the token as joint if it is float with a fractional part // we use this jointness to inform the parser about what token split // event to emit when we encounter a float literal in a field access res.was_joint(); } } - tt::Leaf::Ident(ident) => match ident.text.as_ref() { + tt::Leaf::Ident(ident) => match ident.sym.as_str() { "_" => res.push(T![_]), i if i.starts_with('\'') => res.push(LIFETIME_IDENT), - _ => match SyntaxKind::from_keyword(&ident.text) { + _ if ident.is_raw.yes() => res.push(IDENT), + "gen" if !edition.at_least_2024() => res.push(IDENT), + "dyn" if !edition.at_least_2018() => res.push_ident(DYN_KW), + "async" | "await" | "try" if !edition.at_least_2018() => res.push(IDENT), + text => match SyntaxKind::from_keyword(text) { Some(kind) => res.push(kind), None => { - let contextual_keyword = - SyntaxKind::from_contextual_keyword(&ident.text) - .unwrap_or(SyntaxKind::IDENT); + let contextual_keyword = SyntaxKind::from_contextual_keyword(text) + .unwrap_or(SyntaxKind::IDENT); res.push_ident(contextual_keyword); } }, diff --git a/src/tools/rust-analyzer/crates/parser/src/edition.rs b/src/tools/rust-analyzer/crates/parser/src/edition.rs index 26178544f9b..be0a2c794e5 100644 --- a/src/tools/rust-analyzer/crates/parser/src/edition.rs +++ b/src/tools/rust-analyzer/crates/parser/src/edition.rs @@ -4,6 +4,7 @@ use std::fmt; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(u8)] pub enum Edition { Edition2015, Edition2018, @@ -12,8 +13,23 @@ pub enum Edition { } impl Edition { - pub const CURRENT: Edition = Edition::Edition2021; pub const DEFAULT: Edition = Edition::Edition2015; + pub const LATEST: Edition = Edition::Edition2024; + pub const CURRENT: Edition = Edition::Edition2021; + /// The current latest stable edition, note this is usually not the right choice in code. + pub const CURRENT_FIXME: Edition = Edition::Edition2021; + + pub fn at_least_2024(self) -> bool { + self >= Edition::Edition2024 + } + + pub fn at_least_2021(self) -> bool { + self >= Edition::Edition2021 + } + + pub fn at_least_2018(self) -> bool { + self >= Edition::Edition2018 + } } #[derive(Debug)] diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs index 2930190cb33..7ae1e5f82e5 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs @@ -165,42 +165,6 @@ pub(crate) mod entry { } m.complete(p, ERROR); } - - pub(crate) fn eager_macro_input(p: &mut Parser<'_>) { - let m = p.start(); - - let closing_paren_kind = match p.current() { - T!['{'] => T!['}'], - T!['('] => T![')'], - T!['['] => T![']'], - _ => { - p.error("expected `{`, `[`, `(`"); - while !p.at(EOF) { - p.bump_any(); - } - m.complete(p, ERROR); - return; - } - }; - p.bump_any(); - while !p.at(EOF) && !p.at(closing_paren_kind) { - if expressions::expr(p).is_none() { - break; - } - if !p.at(EOF) && !p.at(closing_paren_kind) { - p.expect(T![,]); - } - } - p.expect(closing_paren_kind); - if p.at(EOF) { - m.complete(p, MACRO_EAGER_INPUT); - return; - } - while !p.at(EOF) { - p.bump_any(); - } - m.complete(p, ERROR); - } } } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs index 54ed5f0ba23..a678c1f3a70 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs @@ -51,6 +51,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![const], T![continue], T![do], + T![gen], T![for], T![if], T![let], @@ -100,6 +101,8 @@ pub(super) fn atom_expr( } T![loop] => loop_expr(p, None), T![while] => while_expr(p, None), + // test try_macro_fallback 2015 + // fn foo() { try!(Ok(())); } T![try] => try_block_expr(p, None), T![match] => match_expr(p), T![return] => return_expr(p), @@ -138,15 +141,37 @@ pub(super) fn atom_expr( // fn f() { const { } } // fn f() { async { } } // fn f() { async move { } } - T![const] | T![unsafe] | T![async] if la == T!['{'] => { + T![const] | T![unsafe] | T![async] | T![gen] if la == T!['{'] => { + let m = p.start(); + p.bump_any(); + stmt_list(p); + m.complete(p, BLOCK_EXPR) + } + // test gen_blocks 2024 + // pub fn main() { + // gen { yield ""; }; + // async gen { yield ""; }; + // gen move { yield ""; }; + // async gen move { yield ""; }; + // } + T![async] if la == T![gen] && p.nth(2) == T!['{'] => { + let m = p.start(); + p.bump(T![async]); + p.eat(T![gen]); + stmt_list(p); + m.complete(p, BLOCK_EXPR) + } + T![async] | T![gen] if la == T![move] && p.nth(2) == T!['{'] => { let m = p.start(); p.bump_any(); + p.bump(T![move]); stmt_list(p); m.complete(p, BLOCK_EXPR) } - T![async] if la == T![move] && p.nth(2) == T!['{'] => { + T![async] if la == T![gen] && p.nth(2) == T![move] && p.nth(3) == T!['{'] => { let m = p.start(); p.bump(T![async]); + p.bump(T![gen]); p.bump(T![move]); stmt_list(p); m.complete(p, BLOCK_EXPR) @@ -355,6 +380,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker { p.eat(T![const]); p.eat(T![static]); p.eat(T![async]); + p.eat(T![gen]); p.eat(T![move]); if !p.at(T![|]) { @@ -743,24 +769,6 @@ fn break_expr(p: &mut Parser<'_>, r: Restrictions) -> CompletedMarker { fn try_block_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker { assert!(p.at(T![try])); let m = m.unwrap_or_else(|| p.start()); - // Special-case `try!` as macro. - // This is a hack until we do proper edition support - if p.nth_at(1, T![!]) { - // test try_macro_fallback - // fn foo() { try!(Ok(())); } - let macro_call = p.start(); - let path = p.start(); - let path_segment = p.start(); - let name_ref = p.start(); - p.bump_remap(IDENT); - name_ref.complete(p, NAME_REF); - path_segment.complete(p, PATH_SEGMENT); - path.complete(p, PATH); - let _block_like = items::macro_call_after_excl(p); - macro_call.complete(p, MACRO_CALL); - return m.complete(p, MACRO_EXPR); - } - p.bump(T![try]); if p.at(T!['{']) { stmt_list(p); diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs index 6c05abc0238..cf80a535ac5 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs @@ -144,6 +144,12 @@ fn type_bound(p: &mut Parser<'_>) -> bool { match p.current() { LIFETIME_IDENT => lifetime(p), T![for] => types::for_type(p, false), + // test precise_capturing + // fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T> {} + T![use] => { + p.bump_any(); + generic_param_list(p) + } T![?] if p.nth_at(1, T![for]) => { // test question_for_type_trait_bound // fn f<T>() where T: ?for<> Sized {} diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs index 99bbf47654b..4e2a50d7a1f 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs @@ -112,11 +112,22 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> { // test_err async_without_semicolon // fn foo() { let _ = async {} } - if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) { + if p.at(T![async]) + && (!matches!(p.nth(1), T!['{'] | T![gen] | T![move] | T![|]) + || matches!((p.nth(1), p.nth(2)), (T![gen], T![fn]))) + { p.eat(T![async]); has_mods = true; } + // test_err gen_fn + // gen fn gen_fn() {} + // async gen fn async_gen_fn() {} + if p.at(T![gen]) && p.nth(1) == T![fn] { + p.eat(T![gen]); + has_mods = true; + } + // test_err unsafe_block_in_mod // fn foo(){} unsafe { } fn bar(){} if p.at(T![unsafe]) && p.nth(1) != T!['{'] { @@ -173,13 +184,6 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> { } } - // test existential_type - // existential type Foo: Fn() -> usize; - if p.at_contextual_kw(T![existential]) && p.nth(1) == T![type] { - p.bump_remap(T![existential]); - has_mods = true; - } - // items match p.current() { T![fn] => fn_(p, m), @@ -201,7 +205,7 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> { _ if has_visibility || has_mods => { if has_mods { - p.error("expected existential, fn, trait or impl"); + p.error("expected fn, trait or impl"); } else { p.error("expected an item"); } @@ -226,13 +230,8 @@ fn opt_item_without_modifiers(p: &mut Parser<'_>, m: Marker) -> Result<(), Marke IDENT if p.at_contextual_kw(T![union]) && p.nth(1) == IDENT => adt::union(p, m), T![macro] => macro_def(p, m), - // check if current token is "macro_rules" followed by "!" followed by an identifier or "try" - // try is keyword since the 2018 edition and the parser is not edition aware (yet!) - IDENT - if p.at_contextual_kw(T![macro_rules]) - && p.nth_at(1, BANG) - && (p.nth_at(2, IDENT) || p.nth_at(2, T![try])) => - { + // check if current token is "macro_rules" followed by "!" followed by an identifier + IDENT if p.at_contextual_kw(T![macro_rules]) && p.nth_at(1, BANG) && p.nth_at(2, IDENT) => { macro_rules(p, m) } @@ -330,23 +329,14 @@ pub(crate) fn extern_item_list(p: &mut Parser<'_>) { m.complete(p, EXTERN_ITEM_LIST); } +// test try_macro_rules 2015 +// macro_rules! try { () => {} } fn macro_rules(p: &mut Parser<'_>, m: Marker) { assert!(p.at_contextual_kw(T![macro_rules])); p.bump_remap(T![macro_rules]); p.expect(T![!]); - // Special-case `macro_rules! try`. - // This is a hack until we do proper edition support - - // test try_macro_rules - // macro_rules! try { () => {} } - if p.at(T![try]) { - let m = p.start(); - p.bump_remap(IDENT); - m.complete(p, NAME); - } else { - name(p); - } + name(p); match p.current() { // test macro_rules_non_brace @@ -384,7 +374,7 @@ fn macro_def(p: &mut Parser<'_>, m: Marker) { m.complete(p, MACRO_DEF); } -// test fn +// test fn_ // fn foo() {} fn fn_(p: &mut Parser<'_>, m: Marker) { p.bump(T![fn]); diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs index 18ec570cd56..f4e57d3d6f3 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs @@ -49,6 +49,7 @@ fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) { T![dyn] => dyn_trait_type(p), // Some path types are not allowed to have bounds (no plus) T![<] => path_type_bounds(p, allow_bounds), + T![ident] if !p.edition().at_least_2018() && is_dyn_weak(p) => dyn_trait_type_weak(p), _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds), LIFETIME_IDENT if p.nth_at(1, T![+]) => bare_dyn_trait_type(p), _ => { @@ -57,6 +58,25 @@ fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) { } } +fn is_dyn_weak(p: &Parser<'_>) -> bool { + const WEAK_DYN_PATH_FIRST: TokenSet = TokenSet::new(&[ + IDENT, + T![self], + T![super], + T![crate], + T![Self], + T![lifetime_ident], + T![?], + T![for], + T!['('], + ]); + + p.at_contextual_kw(T![dyn]) && { + let la = p.nth(1); + WEAK_DYN_PATH_FIRST.contains(la) && (la != T![:] || la != T![<]) + } +} + pub(super) fn ascription(p: &mut Parser<'_>) { assert!(p.at(T![:])); p.bump(T![:]); @@ -169,7 +189,7 @@ fn array_or_slice_type(p: &mut Parser<'_>) { m.complete(p, kind); } -// test reference_type; +// test reference_type // type A = &(); // type B = &'static (); // type C = &mut (); @@ -279,6 +299,23 @@ fn dyn_trait_type(p: &mut Parser<'_>) { m.complete(p, DYN_TRAIT_TYPE); } +// test dyn_trait_type_weak 2015 +// type DynPlain = dyn Path; +// type DynRef = &dyn Path; +// type DynLt = dyn 'a + Path; +// type DynQuestion = dyn ?Path; +// type DynFor = dyn for<'a> Path; +// type DynParen = dyn(Path); +// type Path = dyn::Path; +// type Generic = dyn<Path>; +fn dyn_trait_type_weak(p: &mut Parser<'_>) { + assert!(p.at_contextual_kw(T![dyn])); + let m = p.start(); + p.bump_remap(T![dyn]); + generic_params::bounds_without_colon(p); + m.complete(p, DYN_TRAIT_TYPE); +} + // test bare_dyn_types_with_leading_lifetime // type A = 'static + Trait; // type B = S<'static + Trait>; diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index 52b24b73725..13fc61074d0 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -13,6 +13,7 @@ use std::ops; use rustc_lexer::unescape::{EscapeError, Mode}; use crate::{ + Edition, SyntaxKind::{self, *}, T, }; @@ -30,9 +31,9 @@ struct LexError { } impl<'a> LexedStr<'a> { - pub fn new(text: &'a str) -> LexedStr<'a> { + pub fn new(edition: Edition, text: &'a str) -> LexedStr<'a> { let _p = tracing::info_span!("LexedStr::new").entered(); - let mut conv = Converter::new(text); + let mut conv = Converter::new(edition, text); if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { conv.res.push(SHEBANG, conv.offset); conv.offset = shebang_len; @@ -47,7 +48,7 @@ impl<'a> LexedStr<'a> { conv.finalize_with_eof() } - pub fn single_token(text: &'a str) -> Option<(SyntaxKind, Option<String>)> { + pub fn single_token(edition: Edition, text: &'a str) -> Option<(SyntaxKind, Option<String>)> { if text.is_empty() { return None; } @@ -57,7 +58,7 @@ impl<'a> LexedStr<'a> { return None; } - let mut conv = Converter::new(text); + let mut conv = Converter::new(edition, text); conv.extend_token(&token.kind, text); match &*conv.res.kind { [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))), @@ -129,13 +130,15 @@ impl<'a> LexedStr<'a> { struct Converter<'a> { res: LexedStr<'a>, offset: usize, + edition: Edition, } impl<'a> Converter<'a> { - fn new(text: &'a str) -> Self { + fn new(edition: Edition, text: &'a str) -> Self { Self { res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() }, offset: 0, + edition, } } @@ -175,6 +178,17 @@ impl<'a> Converter<'a> { rustc_lexer::TokenKind::Whitespace => WHITESPACE, rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE, + rustc_lexer::TokenKind::Ident + if ["async", "await", "dyn", "try"].contains(&token_text) + && !self.edition.at_least_2018() => + { + IDENT + } + rustc_lexer::TokenKind::Ident + if token_text == "gen" && !self.edition.at_least_2024() => + { + IDENT + } rustc_lexer::TokenKind::Ident => { SyntaxKind::from_keyword(token_text).unwrap_or(IDENT) } diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs index 738ed239a7c..679492066a3 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lib.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs @@ -82,8 +82,6 @@ pub enum TopEntryPoint { /// Edge case -- macros generally don't expand to attributes, with the /// exception of `cfg_attr` which does! MetaItem, - /// Edge case 2 -- eager macros expand their input to a delimited list of comma separated expressions - MacroEagerInput, } impl TopEntryPoint { @@ -97,7 +95,6 @@ impl TopEntryPoint { TopEntryPoint::Type => grammar::entry::top::type_, TopEntryPoint::Expr => grammar::entry::top::expr, TopEntryPoint::MetaItem => grammar::entry::top::meta_item, - TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input, }; let mut p = parser::Parser::new(input, edition); entry_point(&mut p); diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs index 5b901f911dc..7d3eb5de25f 100644 --- a/src/tools/rust-analyzer/crates/parser/src/parser.rs +++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs @@ -27,14 +27,14 @@ pub(crate) struct Parser<'t> { pos: usize, events: Vec<Event>, steps: Cell<u32>, - _edition: Edition, + edition: Edition, } static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000); impl<'t> Parser<'t> { pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> { - Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), _edition: edition } + Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), edition } } pub(crate) fn finish(self) -> Vec<Event> { @@ -277,6 +277,10 @@ impl<'t> Parser<'t> { fn push_event(&mut self, event: Event) { self.events.push(event); } + + pub(crate) fn edition(&self) -> Edition { + self.edition + } } /// See [`Parser::start`]. diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs index 7f49cc087ae..1cf81e79b03 100644 --- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs +++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs @@ -12,7 +12,7 @@ use std::mem; use crate::{ - LexedStr, Step, + Edition, LexedStr, Step, SyntaxKind::{self, *}, }; @@ -25,7 +25,7 @@ pub enum StrStep<'a> { } impl LexedStr<'_> { - pub fn to_input(&self) -> crate::Input { + pub fn to_input(&self, edition: Edition) -> crate::Input { let _p = tracing::info_span!("LexedStr::to_input").entered(); let mut res = crate::Input::default(); let mut was_joint = false; @@ -35,8 +35,11 @@ impl LexedStr<'_> { was_joint = false } else if kind == SyntaxKind::IDENT { let token_text = self.text(i); - let contextual_kw = - SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT); + let contextual_kw = if !edition.at_least_2018() && token_text == "dyn" { + SyntaxKind::DYN_KW + } else { + SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT) + }; res.push_ident(contextual_kw); } else { if was_joint { diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index ad3398453be..7bddf887401 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -9,6 +9,7 @@ pub enum SyntaxKind { TOMBSTONE, #[doc(hidden)] EOF, + DOLLAR, SEMICOLON, COMMA, L_PAREN, @@ -23,7 +24,6 @@ pub enum SyntaxKind { POUND, TILDE, QUESTION, - DOLLAR, AMP, PIPE, PLUS, @@ -61,6 +61,7 @@ pub enum SyntaxKind { SHR, SHLEQ, SHREQ, + SELF_TYPE_KW, ABSTRACT_KW, AS_KW, ASYNC_KW, @@ -80,6 +81,7 @@ pub enum SyntaxKind { FINAL_KW, FN_KW, FOR_KW, + GEN_KW, IF_KW, IMPL_KW, IN_KW, @@ -96,7 +98,6 @@ pub enum SyntaxKind { REF_KW, RETURN_KW, SELF_KW, - SELF_TYPE_KW, STATIC_KW, STRUCT_KW, SUPER_KW, @@ -112,173 +113,184 @@ pub enum SyntaxKind { WHERE_KW, WHILE_KW, YIELD_KW, + ASM_KW, AUTO_KW, BUILTIN_KW, DEFAULT_KW, - EXISTENTIAL_KW, - UNION_KW, - RAW_KW, + FORMAT_ARGS_KW, MACRO_RULES_KW, - YEET_KW, OFFSET_OF_KW, - ASM_KW, - FORMAT_ARGS_KW, - INT_NUMBER, - FLOAT_NUMBER, - CHAR, + RAW_KW, + UNION_KW, + YEET_KW, BYTE, - STRING, BYTE_STRING, + CHAR, C_STRING, + FLOAT_NUMBER, + INT_NUMBER, + RAW_BYTE_STRING, + RAW_C_STRING, + RAW_STRING, + STRING, + COMMENT, ERROR, IDENT, - WHITESPACE, LIFETIME_IDENT, - COMMENT, + NEWLINE, SHEBANG, - SOURCE_FILE, - STRUCT, - UNION, + WHITESPACE, + ABI, + ADT, + ARG_LIST, + ARRAY_EXPR, + ARRAY_TYPE, + ASM_EXPR, + ASSOC_ITEM, + ASSOC_ITEM_LIST, + ASSOC_TYPE_ARG, + ATTR, + AWAIT_EXPR, + BECOME_EXPR, + BIN_EXPR, + BLOCK_EXPR, + BOX_PAT, + BREAK_EXPR, + CALL_EXPR, + CAST_EXPR, + CLOSURE_BINDER, + CLOSURE_EXPR, + CONST, + CONST_ARG, + CONST_BLOCK_PAT, + CONST_PARAM, + CONTINUE_EXPR, + DYN_TRAIT_TYPE, ENUM, - FN, - RET_TYPE, + EXPR, + EXPR_STMT, + EXTERN_BLOCK, EXTERN_CRATE, - MODULE, - USE, - STATIC, - CONST, - TRAIT, - TRAIT_ALIAS, - IMPL, - TYPE_ALIAS, - MACRO_CALL, - MACRO_RULES, - MACRO_ARM, - TOKEN_TREE, - MACRO_DEF, - PAREN_TYPE, - TUPLE_TYPE, - MACRO_TYPE, - NEVER_TYPE, - PATH_TYPE, - PTR_TYPE, - ARRAY_TYPE, - SLICE_TYPE, - REF_TYPE, - INFER_TYPE, + EXTERN_ITEM, + EXTERN_ITEM_LIST, + FIELD_EXPR, + FIELD_LIST, + FN, FN_PTR_TYPE, + FORMAT_ARGS_ARG, + FORMAT_ARGS_EXPR, + FOR_EXPR, FOR_TYPE, - IMPL_TRAIT_TYPE, - DYN_TRAIT_TYPE, - OR_PAT, - PAREN_PAT, - REF_PAT, - BOX_PAT, + GENERIC_ARG, + GENERIC_ARG_LIST, + GENERIC_PARAM, + GENERIC_PARAM_LIST, IDENT_PAT, - WILDCARD_PAT, - REST_PAT, - PATH_PAT, - RECORD_PAT, - RECORD_PAT_FIELD_LIST, - RECORD_PAT_FIELD, - TUPLE_STRUCT_PAT, - TUPLE_PAT, - SLICE_PAT, - RANGE_PAT, - LITERAL_PAT, - MACRO_PAT, - CONST_BLOCK_PAT, - TUPLE_EXPR, - ARRAY_EXPR, - PAREN_EXPR, - PATH_EXPR, - CLOSURE_EXPR, IF_EXPR, - WHILE_EXPR, - LOOP_EXPR, - FOR_EXPR, - CONTINUE_EXPR, - BREAK_EXPR, + IMPL, + IMPL_TRAIT_TYPE, + INDEX_EXPR, + INFER_TYPE, + ITEM, + ITEM_LIST, LABEL, - BLOCK_EXPR, - STMT_LIST, - RETURN_EXPR, - BECOME_EXPR, - YIELD_EXPR, - YEET_EXPR, + LET_ELSE, LET_EXPR, - UNDERSCORE_EXPR, + LET_STMT, + LIFETIME, + LIFETIME_ARG, + LIFETIME_PARAM, + LITERAL, + LITERAL_PAT, + LOOP_EXPR, + MACRO_CALL, + MACRO_DEF, MACRO_EXPR, - MATCH_EXPR, - MATCH_ARM_LIST, + MACRO_ITEMS, + MACRO_PAT, + MACRO_RULES, + MACRO_STMTS, + MACRO_TYPE, MATCH_ARM, + MATCH_ARM_LIST, + MATCH_EXPR, MATCH_GUARD, - RECORD_EXPR, - RECORD_EXPR_FIELD_LIST, - RECORD_EXPR_FIELD, - OFFSET_OF_EXPR, - ASM_EXPR, - FORMAT_ARGS_EXPR, - FORMAT_ARGS_ARG, - CALL_EXPR, - INDEX_EXPR, + META, METHOD_CALL_EXPR, - FIELD_EXPR, - AWAIT_EXPR, - TRY_EXPR, - CAST_EXPR, - REF_EXPR, + MODULE, + NAME, + NAME_REF, + NEVER_TYPE, + OFFSET_OF_EXPR, + OR_PAT, + PARAM, + PARAM_LIST, + PAREN_EXPR, + PAREN_PAT, + PAREN_TYPE, + PAT, + PATH, + PATH_EXPR, + PATH_PAT, + PATH_SEGMENT, + PATH_TYPE, PREFIX_EXPR, + PTR_TYPE, RANGE_EXPR, - BIN_EXPR, - EXTERN_BLOCK, - EXTERN_ITEM_LIST, - VARIANT, - RECORD_FIELD_LIST, + RANGE_PAT, + RECORD_EXPR, + RECORD_EXPR_FIELD, + RECORD_EXPR_FIELD_LIST, RECORD_FIELD, - TUPLE_FIELD_LIST, + RECORD_FIELD_LIST, + RECORD_PAT, + RECORD_PAT_FIELD, + RECORD_PAT_FIELD_LIST, + REF_EXPR, + REF_PAT, + REF_TYPE, + RENAME, + REST_PAT, + RETURN_EXPR, + RET_TYPE, + SELF_PARAM, + SLICE_PAT, + SLICE_TYPE, + SOURCE_FILE, + STATIC, + STMT, + STMT_LIST, + STRUCT, + TOKEN_TREE, + TRAIT, + TRAIT_ALIAS, + TRY_EXPR, + TUPLE_EXPR, TUPLE_FIELD, - VARIANT_LIST, - ITEM_LIST, - ASSOC_ITEM_LIST, - ATTR, - META, + TUPLE_FIELD_LIST, + TUPLE_PAT, + TUPLE_STRUCT_PAT, + TUPLE_TYPE, + TYPE, + TYPE_ALIAS, + TYPE_ARG, + TYPE_BOUND, + TYPE_BOUND_LIST, + TYPE_PARAM, + UNDERSCORE_EXPR, + UNION, + USE, USE_TREE, USE_TREE_LIST, - PATH, - PATH_SEGMENT, - LITERAL, - RENAME, + VARIANT, + VARIANT_LIST, VISIBILITY, WHERE_CLAUSE, WHERE_PRED, - ABI, - NAME, - NAME_REF, - LET_STMT, - LET_ELSE, - EXPR_STMT, - GENERIC_PARAM_LIST, - GENERIC_PARAM, - LIFETIME_PARAM, - TYPE_PARAM, - RETURN_TYPE_ARG, - CONST_PARAM, - GENERIC_ARG_LIST, - LIFETIME, - LIFETIME_ARG, - TYPE_ARG, - ASSOC_TYPE_ARG, - CONST_ARG, - PARAM_LIST, - PARAM, - SELF_PARAM, - ARG_LIST, - TYPE_BOUND, - TYPE_BOUND_LIST, - MACRO_ITEMS, - MACRO_STMTS, - MACRO_EAGER_INPUT, + WHILE_EXPR, + WILDCARD_PAT, + YEET_EXPR, + YIELD_EXPR, #[doc(hidden)] __LAST, } @@ -287,7 +299,8 @@ impl SyntaxKind { pub fn is_keyword(self) -> bool { matches!( self, - ABSTRACT_KW + SELF_TYPE_KW + | ABSTRACT_KW | AS_KW | ASYNC_KW | AWAIT_KW @@ -306,6 +319,7 @@ impl SyntaxKind { | FINAL_KW | FN_KW | FOR_KW + | GEN_KW | IF_KW | IMPL_KW | IN_KW @@ -322,7 +336,6 @@ impl SyntaxKind { | REF_KW | RETURN_KW | SELF_KW - | SELF_TYPE_KW | STATIC_KW | STRUCT_KW | SUPER_KW @@ -338,23 +351,23 @@ impl SyntaxKind { | WHERE_KW | WHILE_KW | YIELD_KW + | ASM_KW | AUTO_KW | BUILTIN_KW | DEFAULT_KW - | EXISTENTIAL_KW - | UNION_KW - | RAW_KW + | FORMAT_ARGS_KW | MACRO_RULES_KW - | YEET_KW | OFFSET_OF_KW - | ASM_KW - | FORMAT_ARGS_KW + | RAW_KW + | UNION_KW + | YEET_KW ) } pub fn is_punct(self) -> bool { matches!( self, - SEMICOLON + DOLLAR + | SEMICOLON | COMMA | L_PAREN | R_PAREN @@ -368,7 +381,6 @@ impl SyntaxKind { | POUND | TILDE | QUESTION - | DOLLAR | AMP | PIPE | PLUS @@ -409,10 +421,22 @@ impl SyntaxKind { ) } pub fn is_literal(self) -> bool { - matches!(self, INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING | C_STRING) + matches!( + self, + BYTE | BYTE_STRING + | CHAR + | C_STRING + | FLOAT_NUMBER + | INT_NUMBER + | RAW_BYTE_STRING + | RAW_C_STRING + | RAW_STRING + | STRING + ) } pub fn from_keyword(ident: &str) -> Option<SyntaxKind> { let kw = match ident { + "Self" => SELF_TYPE_KW, "abstract" => ABSTRACT_KW, "as" => AS_KW, "async" => ASYNC_KW, @@ -432,6 +456,7 @@ impl SyntaxKind { "final" => FINAL_KW, "fn" => FN_KW, "for" => FOR_KW, + "gen" => GEN_KW, "if" => IF_KW, "impl" => IMPL_KW, "in" => IN_KW, @@ -448,7 +473,6 @@ impl SyntaxKind { "ref" => REF_KW, "return" => RETURN_KW, "self" => SELF_KW, - "Self" => SELF_TYPE_KW, "static" => STATIC_KW, "struct" => STRUCT_KW, "super" => SUPER_KW, @@ -470,23 +494,23 @@ impl SyntaxKind { } pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> { let kw = match ident { + "asm" => ASM_KW, "auto" => AUTO_KW, "builtin" => BUILTIN_KW, "default" => DEFAULT_KW, - "existential" => EXISTENTIAL_KW, - "union" => UNION_KW, - "raw" => RAW_KW, + "format_args" => FORMAT_ARGS_KW, "macro_rules" => MACRO_RULES_KW, - "yeet" => YEET_KW, "offset_of" => OFFSET_OF_KW, - "asm" => ASM_KW, - "format_args" => FORMAT_ARGS_KW, + "raw" => RAW_KW, + "union" => UNION_KW, + "yeet" => YEET_KW, _ => return None, }; Some(kw) } pub fn from_char(c: char) -> Option<SyntaxKind> { let tok = match c { + '$' => DOLLAR, ';' => SEMICOLON, ',' => COMMA, '(' => L_PAREN, @@ -501,7 +525,6 @@ impl SyntaxKind { '#' => POUND, '~' => TILDE, '?' => QUESTION, - '$' => DOLLAR, '&' => AMP, '|' => PIPE, '+' => PLUS, @@ -521,4 +544,4 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs index a38689791c4..e7bccb6685c 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests.rs +++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs @@ -9,7 +9,11 @@ use std::{ use expect_test::expect_file; -use crate::{LexedStr, TopEntryPoint}; +use crate::{Edition, LexedStr, TopEntryPoint}; + +#[rustfmt::skip] +#[path = "../test_data/generated/runner.rs"] +mod runner; #[test] fn lex_ok() { @@ -30,7 +34,7 @@ fn lex_err() { } fn lex(text: &str) -> String { - let lexed = LexedStr::new(text); + let lexed = LexedStr::new(Edition::CURRENT, text); let mut res = String::new(); for i in 0..lexed.len() { @@ -48,17 +52,7 @@ fn lex(text: &str) -> String { fn parse_ok() { for case in TestCase::list("parser/ok") { let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); - let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); - expect_file![case.rast].assert_eq(&actual); - } -} - -#[test] -fn parse_inline_ok() { - for case in TestCase::list("parser/inline/ok") { - let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); - let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); + let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text, Edition::CURRENT); assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual); } @@ -68,26 +62,16 @@ fn parse_inline_ok() { fn parse_err() { for case in TestCase::list("parser/err") { let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); - let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); + let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text, Edition::CURRENT); assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual) } } -#[test] -fn parse_inline_err() { - for case in TestCase::list("parser/inline/err") { - let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); - let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); - expect_file![case.rast].assert_eq(&actual) - } -} - -fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { - let lexed = LexedStr::new(text); - let input = lexed.to_input(); - let output = entry.parse(&input, crate::Edition::CURRENT); +fn parse(entry: TopEntryPoint, text: &str, edition: Edition) -> (String, bool) { + let lexed = LexedStr::new(edition, text); + let input = lexed.to_input(edition); + let output = entry.parse(&input, edition); let mut buf = String::new(); let mut errors = Vec::new(); @@ -167,3 +151,37 @@ impl TestCase { res } } + +#[track_caller] +fn run_and_expect_no_errors(path: &str) { + run_and_expect_no_errors_with_edition(path, Edition::CURRENT) +} + +#[track_caller] +fn run_and_expect_errors(path: &str) { + run_and_expect_errors_with_edition(path, Edition::CURRENT) +} + +#[track_caller] +fn run_and_expect_no_errors_with_edition(path: &str, edition: Edition) { + let path = PathBuf::from(path); + let text = std::fs::read_to_string(&path).unwrap(); + let (actual, errors) = parse(TopEntryPoint::SourceFile, &text, edition); + assert!(!errors, "errors in an OK file {}:\n{actual}", path.display()); + let mut p = PathBuf::from(".."); + p.push(path); + p.set_extension("rast"); + expect_file![p].assert_eq(&actual) +} + +#[track_caller] +fn run_and_expect_errors_with_edition(path: &str, edition: Edition) { + let path = PathBuf::from(path); + let text = std::fs::read_to_string(&path).unwrap(); + let (actual, errors) = parse(TopEntryPoint::SourceFile, &text, edition); + assert!(errors, "no errors in an ERR file {}:\n{actual}", path.display()); + let mut p = PathBuf::from(".."); + p.push(path); + p.set_extension("rast"); + expect_file![p].assert_eq(&actual) +} diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs index f92b39edb76..e2268eed60a 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs +++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs @@ -1,4 +1,4 @@ -use crate::{LexedStr, PrefixEntryPoint, Step}; +use crate::{Edition, LexedStr, PrefixEntryPoint, Step}; #[test] fn vis() { @@ -82,11 +82,11 @@ fn meta_item() { #[track_caller] fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) { - let lexed = LexedStr::new(input); - let input = lexed.to_input(); + let lexed = LexedStr::new(Edition::CURRENT, input); + let input = lexed.to_input(Edition::CURRENT); let mut n_tokens = 0; - for step in entry.parse(&input, crate::Edition::CURRENT).iter() { + for step in entry.parse(&input, Edition::CURRENT).iter() { match step { Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize, Step::FloatSplit { .. } => n_tokens += 1, diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs index 49dd9e293b8..c56bf0b6448 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs +++ b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs @@ -307,6 +307,6 @@ fn expr() { #[track_caller] fn check(entry: TopEntryPoint, input: &str, expect: expect_test::Expect) { - let (parsed, _errors) = super::parse(entry, input); + let (parsed, _errors) = super::parse(entry, input, crate::Edition::CURRENT); expect.assert_eq(&parsed) } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs new file mode 100644 index 00000000000..1907f03b44d --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -0,0 +1,832 @@ +mod ok { + use crate::tests::*; + #[test] + fn anonymous_const() { + run_and_expect_no_errors("test_data/parser/inline/ok/anonymous_const.rs"); + } + #[test] + fn arb_self_types() { + run_and_expect_no_errors("test_data/parser/inline/ok/arb_self_types.rs"); + } + #[test] + fn arg_with_attr() { run_and_expect_no_errors("test_data/parser/inline/ok/arg_with_attr.rs"); } + #[test] + fn array_attrs() { run_and_expect_no_errors("test_data/parser/inline/ok/array_attrs.rs"); } + #[test] + fn array_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/array_expr.rs"); } + #[test] + fn array_type() { run_and_expect_no_errors("test_data/parser/inline/ok/array_type.rs"); } + #[test] + fn as_precedence() { run_and_expect_no_errors("test_data/parser/inline/ok/as_precedence.rs"); } + #[test] + fn assoc_const_eq() { + run_and_expect_no_errors("test_data/parser/inline/ok/assoc_const_eq.rs"); + } + #[test] + fn assoc_item_list() { + run_and_expect_no_errors("test_data/parser/inline/ok/assoc_item_list.rs"); + } + #[test] + fn assoc_item_list_inner_attrs() { + run_and_expect_no_errors("test_data/parser/inline/ok/assoc_item_list_inner_attrs.rs"); + } + #[test] + fn assoc_type_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/assoc_type_bound.rs"); + } + #[test] + fn assoc_type_eq() { run_and_expect_no_errors("test_data/parser/inline/ok/assoc_type_eq.rs"); } + #[test] + fn associated_return_type_bounds() { + run_and_expect_no_errors("test_data/parser/inline/ok/associated_return_type_bounds.rs"); + } + #[test] + fn associated_type_bounds() { + run_and_expect_no_errors("test_data/parser/inline/ok/associated_type_bounds.rs"); + } + #[test] + fn async_trait_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/async_trait_bound.rs"); + } + #[test] + fn attr_on_expr_stmt() { + run_and_expect_no_errors("test_data/parser/inline/ok/attr_on_expr_stmt.rs"); + } + #[test] + fn await_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/await_expr.rs"); } + #[test] + fn bare_dyn_types_with_leading_lifetime() { + run_and_expect_no_errors( + "test_data/parser/inline/ok/bare_dyn_types_with_leading_lifetime.rs", + ); + } + #[test] + fn bare_dyn_types_with_paren_as_generic_args() { + run_and_expect_no_errors( + "test_data/parser/inline/ok/bare_dyn_types_with_paren_as_generic_args.rs", + ); + } + #[test] + fn become_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/become_expr.rs"); } + #[test] + fn bind_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/bind_pat.rs"); } + #[test] + fn binop_resets_statementness() { + run_and_expect_no_errors("test_data/parser/inline/ok/binop_resets_statementness.rs"); + } + #[test] + fn block() { run_and_expect_no_errors("test_data/parser/inline/ok/block.rs"); } + #[test] + fn block_items() { run_and_expect_no_errors("test_data/parser/inline/ok/block_items.rs"); } + #[test] + fn box_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/box_pat.rs"); } + #[test] + fn break_ambiguity() { + run_and_expect_no_errors("test_data/parser/inline/ok/break_ambiguity.rs"); + } + #[test] + fn break_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/break_expr.rs"); } + #[test] + fn builtin_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/builtin_expr.rs"); } + #[test] + fn call_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/call_expr.rs"); } + #[test] + fn cast_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/cast_expr.rs"); } + #[test] + fn closure_body_underscore_assignment() { + run_and_expect_no_errors( + "test_data/parser/inline/ok/closure_body_underscore_assignment.rs", + ); + } + #[test] + fn closure_params() { + run_and_expect_no_errors("test_data/parser/inline/ok/closure_params.rs"); + } + #[test] + fn closure_range_method_call() { + run_and_expect_no_errors("test_data/parser/inline/ok/closure_range_method_call.rs"); + } + #[test] + fn const_arg() { run_and_expect_no_errors("test_data/parser/inline/ok/const_arg.rs"); } + #[test] + fn const_arg_block() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_arg_block.rs"); + } + #[test] + fn const_arg_bool_literal() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_arg_bool_literal.rs"); + } + #[test] + fn const_arg_literal() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_arg_literal.rs"); + } + #[test] + fn const_arg_negative_number() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_arg_negative_number.rs"); + } + #[test] + fn const_block_pat() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_block_pat.rs"); + } + #[test] + fn const_closure() { run_and_expect_no_errors("test_data/parser/inline/ok/const_closure.rs"); } + #[test] + fn const_item() { run_and_expect_no_errors("test_data/parser/inline/ok/const_item.rs"); } + #[test] + fn const_param() { run_and_expect_no_errors("test_data/parser/inline/ok/const_param.rs"); } + #[test] + fn const_param_default_expression() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_param_default_expression.rs"); + } + #[test] + fn const_param_default_literal() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_param_default_literal.rs"); + } + #[test] + fn const_param_default_path() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_param_default_path.rs"); + } + #[test] + fn const_trait_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_trait_bound.rs"); + } + #[test] + fn continue_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/continue_expr.rs"); } + #[test] + fn crate_path() { run_and_expect_no_errors("test_data/parser/inline/ok/crate_path.rs"); } + #[test] + fn crate_visibility() { + run_and_expect_no_errors("test_data/parser/inline/ok/crate_visibility.rs"); + } + #[test] + fn crate_visibility_in() { + run_and_expect_no_errors("test_data/parser/inline/ok/crate_visibility_in.rs"); + } + #[test] + fn default_async_fn() { + run_and_expect_no_errors("test_data/parser/inline/ok/default_async_fn.rs"); + } + #[test] + fn default_async_unsafe_fn() { + run_and_expect_no_errors("test_data/parser/inline/ok/default_async_unsafe_fn.rs"); + } + #[test] + fn default_item() { run_and_expect_no_errors("test_data/parser/inline/ok/default_item.rs"); } + #[test] + fn default_unsafe_item() { + run_and_expect_no_errors("test_data/parser/inline/ok/default_unsafe_item.rs"); + } + #[test] + fn destructuring_assignment_struct_rest_pattern() { + run_and_expect_no_errors( + "test_data/parser/inline/ok/destructuring_assignment_struct_rest_pattern.rs", + ); + } + #[test] + fn destructuring_assignment_wildcard_pat() { + run_and_expect_no_errors( + "test_data/parser/inline/ok/destructuring_assignment_wildcard_pat.rs", + ); + } + #[test] + fn dot_dot_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/dot_dot_pat.rs"); } + #[test] + fn dyn_trait_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/dyn_trait_type.rs"); + } + #[test] + fn dyn_trait_type_weak() { + run_and_expect_no_errors_with_edition( + "test_data/parser/inline/ok/dyn_trait_type_weak.rs", + crate::Edition::Edition2015, + ); + } + #[test] + fn effect_blocks() { run_and_expect_no_errors("test_data/parser/inline/ok/effect_blocks.rs"); } + #[test] + fn exclusive_range_pat() { + run_and_expect_no_errors("test_data/parser/inline/ok/exclusive_range_pat.rs"); + } + #[test] + fn expr_literals() { run_and_expect_no_errors("test_data/parser/inline/ok/expr_literals.rs"); } + #[test] + fn expression_after_block() { + run_and_expect_no_errors("test_data/parser/inline/ok/expression_after_block.rs"); + } + #[test] + fn extern_block() { run_and_expect_no_errors("test_data/parser/inline/ok/extern_block.rs"); } + #[test] + fn extern_crate() { run_and_expect_no_errors("test_data/parser/inline/ok/extern_crate.rs"); } + #[test] + fn extern_crate_rename() { + run_and_expect_no_errors("test_data/parser/inline/ok/extern_crate_rename.rs"); + } + #[test] + fn extern_crate_self() { + run_and_expect_no_errors("test_data/parser/inline/ok/extern_crate_self.rs"); + } + #[test] + fn field_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/field_expr.rs"); } + #[test] + fn fn_() { run_and_expect_no_errors("test_data/parser/inline/ok/fn_.rs"); } + #[test] + fn fn_decl() { run_and_expect_no_errors("test_data/parser/inline/ok/fn_decl.rs"); } + #[test] + fn fn_def_param() { run_and_expect_no_errors("test_data/parser/inline/ok/fn_def_param.rs"); } + #[test] + fn fn_pointer_param_ident_path() { + run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_param_ident_path.rs"); + } + #[test] + fn fn_pointer_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_type.rs"); + } + #[test] + fn fn_pointer_type_with_ret() { + run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_type_with_ret.rs"); + } + #[test] + fn fn_pointer_unnamed_arg() { + run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_unnamed_arg.rs"); + } + #[test] + fn for_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/for_expr.rs"); } + #[test] + fn for_range_from() { + run_and_expect_no_errors("test_data/parser/inline/ok/for_range_from.rs"); + } + #[test] + fn for_type() { run_and_expect_no_errors("test_data/parser/inline/ok/for_type.rs"); } + #[test] + fn full_range_expr() { + run_and_expect_no_errors("test_data/parser/inline/ok/full_range_expr.rs"); + } + #[test] + fn function_ret_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/function_ret_type.rs"); + } + #[test] + fn function_type_params() { + run_and_expect_no_errors("test_data/parser/inline/ok/function_type_params.rs"); + } + #[test] + fn function_where_clause() { + run_and_expect_no_errors("test_data/parser/inline/ok/function_where_clause.rs"); + } + #[test] + fn gen_blocks() { + run_and_expect_no_errors_with_edition( + "test_data/parser/inline/ok/gen_blocks.rs", + crate::Edition::Edition2024, + ); + } + #[test] + fn generic_arg() { run_and_expect_no_errors("test_data/parser/inline/ok/generic_arg.rs"); } + #[test] + fn generic_param_attribute() { + run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_attribute.rs"); + } + #[test] + fn generic_param_list() { + run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_list.rs"); + } + #[test] + fn half_open_range_pat() { + run_and_expect_no_errors("test_data/parser/inline/ok/half_open_range_pat.rs"); + } + #[test] + fn if_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/if_expr.rs"); } + #[test] + fn impl_item() { run_and_expect_no_errors("test_data/parser/inline/ok/impl_item.rs"); } + #[test] + fn impl_item_const() { + run_and_expect_no_errors("test_data/parser/inline/ok/impl_item_const.rs"); + } + #[test] + fn impl_item_neg() { run_and_expect_no_errors("test_data/parser/inline/ok/impl_item_neg.rs"); } + #[test] + fn impl_trait_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/impl_trait_type.rs"); + } + #[test] + fn impl_type_params() { + run_and_expect_no_errors("test_data/parser/inline/ok/impl_type_params.rs"); + } + #[test] + fn index_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/index_expr.rs"); } + #[test] + fn label() { run_and_expect_no_errors("test_data/parser/inline/ok/label.rs"); } + #[test] + fn labeled_block() { run_and_expect_no_errors("test_data/parser/inline/ok/labeled_block.rs"); } + #[test] + fn lambda_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/lambda_expr.rs"); } + #[test] + fn lambda_ret_block() { + run_and_expect_no_errors("test_data/parser/inline/ok/lambda_ret_block.rs"); + } + #[test] + fn let_else() { run_and_expect_no_errors("test_data/parser/inline/ok/let_else.rs"); } + #[test] + fn let_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/let_expr.rs"); } + #[test] + fn let_stmt() { run_and_expect_no_errors("test_data/parser/inline/ok/let_stmt.rs"); } + #[test] + fn let_stmt_ascription() { + run_and_expect_no_errors("test_data/parser/inline/ok/let_stmt_ascription.rs"); + } + #[test] + fn let_stmt_init() { run_and_expect_no_errors("test_data/parser/inline/ok/let_stmt_init.rs"); } + #[test] + fn lifetime_arg() { run_and_expect_no_errors("test_data/parser/inline/ok/lifetime_arg.rs"); } + #[test] + fn lifetime_param() { + run_and_expect_no_errors("test_data/parser/inline/ok/lifetime_param.rs"); + } + #[test] + fn literal_pattern() { + run_and_expect_no_errors("test_data/parser/inline/ok/literal_pattern.rs"); + } + #[test] + fn loop_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/loop_expr.rs"); } + #[test] + fn macro_call_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/macro_call_type.rs"); + } + #[test] + fn macro_def() { run_and_expect_no_errors("test_data/parser/inline/ok/macro_def.rs"); } + #[test] + fn macro_def_curly() { + run_and_expect_no_errors("test_data/parser/inline/ok/macro_def_curly.rs"); + } + #[test] + fn macro_inside_generic_arg() { + run_and_expect_no_errors("test_data/parser/inline/ok/macro_inside_generic_arg.rs"); + } + #[test] + fn macro_rules_as_macro_name() { + run_and_expect_no_errors("test_data/parser/inline/ok/macro_rules_as_macro_name.rs"); + } + #[test] + fn macro_rules_non_brace() { + run_and_expect_no_errors("test_data/parser/inline/ok/macro_rules_non_brace.rs"); + } + #[test] + fn marco_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/marco_pat.rs"); } + #[test] + fn match_arm() { run_and_expect_no_errors("test_data/parser/inline/ok/match_arm.rs"); } + #[test] + fn match_arms_commas() { + run_and_expect_no_errors("test_data/parser/inline/ok/match_arms_commas.rs"); + } + #[test] + fn match_arms_inner_attribute() { + run_and_expect_no_errors("test_data/parser/inline/ok/match_arms_inner_attribute.rs"); + } + #[test] + fn match_arms_outer_attributes() { + run_and_expect_no_errors("test_data/parser/inline/ok/match_arms_outer_attributes.rs"); + } + #[test] + fn match_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/match_expr.rs"); } + #[test] + fn match_guard() { run_and_expect_no_errors("test_data/parser/inline/ok/match_guard.rs"); } + #[test] + fn metas() { run_and_expect_no_errors("test_data/parser/inline/ok/metas.rs"); } + #[test] + fn method_call_expr() { + run_and_expect_no_errors("test_data/parser/inline/ok/method_call_expr.rs"); + } + #[test] + fn mod_contents() { run_and_expect_no_errors("test_data/parser/inline/ok/mod_contents.rs"); } + #[test] + fn mod_item() { run_and_expect_no_errors("test_data/parser/inline/ok/mod_item.rs"); } + #[test] + fn mod_item_curly() { + run_and_expect_no_errors("test_data/parser/inline/ok/mod_item_curly.rs"); + } + #[test] + fn never_type() { run_and_expect_no_errors("test_data/parser/inline/ok/never_type.rs"); } + #[test] + fn no_dyn_trait_leading_for() { + run_and_expect_no_errors("test_data/parser/inline/ok/no_dyn_trait_leading_for.rs"); + } + #[test] + fn no_semi_after_block() { + run_and_expect_no_errors("test_data/parser/inline/ok/no_semi_after_block.rs"); + } + #[test] + fn nocontentexpr() { run_and_expect_no_errors("test_data/parser/inline/ok/nocontentexpr.rs"); } + #[test] + fn nocontentexpr_after_item() { + run_and_expect_no_errors("test_data/parser/inline/ok/nocontentexpr_after_item.rs"); + } + #[test] + fn or_pattern() { run_and_expect_no_errors("test_data/parser/inline/ok/or_pattern.rs"); } + #[test] + fn param_list() { run_and_expect_no_errors("test_data/parser/inline/ok/param_list.rs"); } + #[test] + fn param_list_opt_patterns() { + run_and_expect_no_errors("test_data/parser/inline/ok/param_list_opt_patterns.rs"); + } + #[test] + fn param_list_vararg() { + run_and_expect_no_errors("test_data/parser/inline/ok/param_list_vararg.rs"); + } + #[test] + fn param_outer_arg() { + run_and_expect_no_errors("test_data/parser/inline/ok/param_outer_arg.rs"); + } + #[test] + fn paren_type() { run_and_expect_no_errors("test_data/parser/inline/ok/paren_type.rs"); } + #[test] + fn path_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/path_expr.rs"); } + #[test] + fn path_fn_trait_args() { + run_and_expect_no_errors("test_data/parser/inline/ok/path_fn_trait_args.rs"); + } + #[test] + fn path_part() { run_and_expect_no_errors("test_data/parser/inline/ok/path_part.rs"); } + #[test] + fn path_type() { run_and_expect_no_errors("test_data/parser/inline/ok/path_type.rs"); } + #[test] + fn path_type_with_bounds() { + run_and_expect_no_errors("test_data/parser/inline/ok/path_type_with_bounds.rs"); + } + #[test] + fn placeholder_pat() { + run_and_expect_no_errors("test_data/parser/inline/ok/placeholder_pat.rs"); + } + #[test] + fn placeholder_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/placeholder_type.rs"); + } + #[test] + fn pointer_type_mut() { + run_and_expect_no_errors("test_data/parser/inline/ok/pointer_type_mut.rs"); + } + #[test] + fn postfix_range() { run_and_expect_no_errors("test_data/parser/inline/ok/postfix_range.rs"); } + #[test] + fn precise_capturing() { + run_and_expect_no_errors("test_data/parser/inline/ok/precise_capturing.rs"); + } + #[test] + fn pub_parens_typepath() { + run_and_expect_no_errors("test_data/parser/inline/ok/pub_parens_typepath.rs"); + } + #[test] + fn pub_tuple_field() { + run_and_expect_no_errors("test_data/parser/inline/ok/pub_tuple_field.rs"); + } + #[test] + fn qual_paths() { run_and_expect_no_errors("test_data/parser/inline/ok/qual_paths.rs"); } + #[test] + fn question_for_type_trait_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/question_for_type_trait_bound.rs"); + } + #[test] + fn range_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/range_pat.rs"); } + #[test] + fn record_field_attrs() { + run_and_expect_no_errors("test_data/parser/inline/ok/record_field_attrs.rs"); + } + #[test] + fn record_field_list() { + run_and_expect_no_errors("test_data/parser/inline/ok/record_field_list.rs"); + } + #[test] + fn record_lit() { run_and_expect_no_errors("test_data/parser/inline/ok/record_lit.rs"); } + #[test] + fn record_literal_field_with_attr() { + run_and_expect_no_errors("test_data/parser/inline/ok/record_literal_field_with_attr.rs"); + } + #[test] + fn record_pat_field() { + run_and_expect_no_errors("test_data/parser/inline/ok/record_pat_field.rs"); + } + #[test] + fn record_pat_field_list() { + run_and_expect_no_errors("test_data/parser/inline/ok/record_pat_field_list.rs"); + } + #[test] + fn ref_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/ref_expr.rs"); } + #[test] + fn ref_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/ref_pat.rs"); } + #[test] + fn reference_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/reference_type.rs"); + } + #[test] + fn return_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/return_expr.rs"); } + #[test] + fn self_param() { run_and_expect_no_errors("test_data/parser/inline/ok/self_param.rs"); } + #[test] + fn self_param_outer_attr() { + run_and_expect_no_errors("test_data/parser/inline/ok/self_param_outer_attr.rs"); + } + #[test] + fn singleton_tuple_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/singleton_tuple_type.rs"); + } + #[test] + fn slice_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/slice_pat.rs"); } + #[test] + fn slice_type() { run_and_expect_no_errors("test_data/parser/inline/ok/slice_type.rs"); } + #[test] + fn stmt_bin_expr_ambiguity() { + run_and_expect_no_errors("test_data/parser/inline/ok/stmt_bin_expr_ambiguity.rs"); + } + #[test] + fn stmt_postfix_expr_ambiguity() { + run_and_expect_no_errors("test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rs"); + } + #[test] + fn struct_item() { run_and_expect_no_errors("test_data/parser/inline/ok/struct_item.rs"); } + #[test] + fn trait_alias() { run_and_expect_no_errors("test_data/parser/inline/ok/trait_alias.rs"); } + #[test] + fn trait_alias_where_clause() { + run_and_expect_no_errors("test_data/parser/inline/ok/trait_alias_where_clause.rs"); + } + #[test] + fn trait_item() { run_and_expect_no_errors("test_data/parser/inline/ok/trait_item.rs"); } + #[test] + fn trait_item_bounds() { + run_and_expect_no_errors("test_data/parser/inline/ok/trait_item_bounds.rs"); + } + #[test] + fn trait_item_generic_params() { + run_and_expect_no_errors("test_data/parser/inline/ok/trait_item_generic_params.rs"); + } + #[test] + fn trait_item_where_clause() { + run_and_expect_no_errors("test_data/parser/inline/ok/trait_item_where_clause.rs"); + } + #[test] + fn try_block_expr() { + run_and_expect_no_errors("test_data/parser/inline/ok/try_block_expr.rs"); + } + #[test] + fn try_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/try_expr.rs"); } + #[test] + fn try_macro_fallback() { + run_and_expect_no_errors_with_edition( + "test_data/parser/inline/ok/try_macro_fallback.rs", + crate::Edition::Edition2015, + ); + } + #[test] + fn try_macro_rules() { + run_and_expect_no_errors_with_edition( + "test_data/parser/inline/ok/try_macro_rules.rs", + crate::Edition::Edition2015, + ); + } + #[test] + fn tuple_attrs() { run_and_expect_no_errors("test_data/parser/inline/ok/tuple_attrs.rs"); } + #[test] + fn tuple_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/tuple_expr.rs"); } + #[test] + fn tuple_field_attrs() { + run_and_expect_no_errors("test_data/parser/inline/ok/tuple_field_attrs.rs"); + } + #[test] + fn tuple_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/tuple_pat.rs"); } + #[test] + fn tuple_pat_fields() { + run_and_expect_no_errors("test_data/parser/inline/ok/tuple_pat_fields.rs"); + } + #[test] + fn tuple_struct() { run_and_expect_no_errors("test_data/parser/inline/ok/tuple_struct.rs"); } + #[test] + fn tuple_struct_where() { + run_and_expect_no_errors("test_data/parser/inline/ok/tuple_struct_where.rs"); + } + #[test] + fn type_alias() { run_and_expect_no_errors("test_data/parser/inline/ok/type_alias.rs"); } + #[test] + fn type_item_type_params() { + run_and_expect_no_errors("test_data/parser/inline/ok/type_item_type_params.rs"); + } + #[test] + fn type_item_where_clause() { + run_and_expect_no_errors("test_data/parser/inline/ok/type_item_where_clause.rs"); + } + #[test] + fn type_item_where_clause_deprecated() { + run_and_expect_no_errors("test_data/parser/inline/ok/type_item_where_clause_deprecated.rs"); + } + #[test] + fn type_param() { run_and_expect_no_errors("test_data/parser/inline/ok/type_param.rs"); } + #[test] + fn type_param_bounds() { + run_and_expect_no_errors("test_data/parser/inline/ok/type_param_bounds.rs"); + } + #[test] + fn type_param_default() { + run_and_expect_no_errors("test_data/parser/inline/ok/type_param_default.rs"); + } + #[test] + fn type_path_in_pattern() { + run_and_expect_no_errors("test_data/parser/inline/ok/type_path_in_pattern.rs"); + } + #[test] + fn typepathfn_with_coloncolon() { + run_and_expect_no_errors("test_data/parser/inline/ok/typepathfn_with_coloncolon.rs"); + } + #[test] + fn unary_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/unary_expr.rs"); } + #[test] + fn union_item() { run_and_expect_no_errors("test_data/parser/inline/ok/union_item.rs"); } + #[test] + fn unit_struct() { run_and_expect_no_errors("test_data/parser/inline/ok/unit_struct.rs"); } + #[test] + fn unit_type() { run_and_expect_no_errors("test_data/parser/inline/ok/unit_type.rs"); } + #[test] + fn use_item() { run_and_expect_no_errors("test_data/parser/inline/ok/use_item.rs"); } + #[test] + fn use_tree() { run_and_expect_no_errors("test_data/parser/inline/ok/use_tree.rs"); } + #[test] + fn use_tree_abs_star() { + run_and_expect_no_errors("test_data/parser/inline/ok/use_tree_abs_star.rs"); + } + #[test] + fn use_tree_alias() { + run_and_expect_no_errors("test_data/parser/inline/ok/use_tree_alias.rs"); + } + #[test] + fn use_tree_list() { run_and_expect_no_errors("test_data/parser/inline/ok/use_tree_list.rs"); } + #[test] + fn use_tree_path() { run_and_expect_no_errors("test_data/parser/inline/ok/use_tree_path.rs"); } + #[test] + fn use_tree_path_star() { + run_and_expect_no_errors("test_data/parser/inline/ok/use_tree_path_star.rs"); + } + #[test] + fn use_tree_path_use_tree() { + run_and_expect_no_errors("test_data/parser/inline/ok/use_tree_path_use_tree.rs"); + } + #[test] + fn use_tree_star() { run_and_expect_no_errors("test_data/parser/inline/ok/use_tree_star.rs"); } + #[test] + fn value_parameters_no_patterns() { + run_and_expect_no_errors("test_data/parser/inline/ok/value_parameters_no_patterns.rs"); + } + #[test] + fn variant_discriminant() { + run_and_expect_no_errors("test_data/parser/inline/ok/variant_discriminant.rs"); + } + #[test] + fn where_clause() { run_and_expect_no_errors("test_data/parser/inline/ok/where_clause.rs"); } + #[test] + fn where_pred_for() { + run_and_expect_no_errors("test_data/parser/inline/ok/where_pred_for.rs"); + } + #[test] + fn while_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/while_expr.rs"); } + #[test] + fn yeet_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/yeet_expr.rs"); } + #[test] + fn yield_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/yield_expr.rs"); } +} +mod err { + use crate::tests::*; + #[test] + fn angled_path_without_qual() { + run_and_expect_errors("test_data/parser/inline/err/angled_path_without_qual.rs"); + } + #[test] + fn anonymous_static() { + run_and_expect_errors("test_data/parser/inline/err/anonymous_static.rs"); + } + #[test] + fn arg_list_recovery() { + run_and_expect_errors("test_data/parser/inline/err/arg_list_recovery.rs"); + } + #[test] + fn array_type_missing_semi() { + run_and_expect_errors("test_data/parser/inline/err/array_type_missing_semi.rs"); + } + #[test] + fn async_without_semicolon() { + run_and_expect_errors("test_data/parser/inline/err/async_without_semicolon.rs"); + } + #[test] + fn comma_after_functional_update_syntax() { + run_and_expect_errors( + "test_data/parser/inline/err/comma_after_functional_update_syntax.rs", + ); + } + #[test] + fn crate_visibility_empty_recover() { + run_and_expect_errors("test_data/parser/inline/err/crate_visibility_empty_recover.rs"); + } + #[test] + fn empty_param_slot() { + run_and_expect_errors("test_data/parser/inline/err/empty_param_slot.rs"); + } + #[test] + fn empty_segment() { run_and_expect_errors("test_data/parser/inline/err/empty_segment.rs"); } + #[test] + fn fn_pointer_type_missing_fn() { + run_and_expect_errors("test_data/parser/inline/err/fn_pointer_type_missing_fn.rs"); + } + #[test] + fn gen_fn() { run_and_expect_errors("test_data/parser/inline/err/gen_fn.rs"); } + #[test] + fn generic_arg_list_recover() { + run_and_expect_errors("test_data/parser/inline/err/generic_arg_list_recover.rs"); + } + #[test] + fn generic_param_list_recover() { + run_and_expect_errors("test_data/parser/inline/err/generic_param_list_recover.rs"); + } + #[test] + fn impl_type() { run_and_expect_errors("test_data/parser/inline/err/impl_type.rs"); } + #[test] + fn let_else_right_curly_brace() { + run_and_expect_errors("test_data/parser/inline/err/let_else_right_curly_brace.rs"); + } + #[test] + fn macro_rules_as_macro_name() { + run_and_expect_errors("test_data/parser/inline/err/macro_rules_as_macro_name.rs"); + } + #[test] + fn match_arms_recovery() { + run_and_expect_errors("test_data/parser/inline/err/match_arms_recovery.rs"); + } + #[test] + fn method_call_missing_argument_list() { + run_and_expect_errors("test_data/parser/inline/err/method_call_missing_argument_list.rs"); + } + #[test] + fn misplaced_label_err() { + run_and_expect_errors("test_data/parser/inline/err/misplaced_label_err.rs"); + } + #[test] + fn missing_fn_param_type() { + run_and_expect_errors("test_data/parser/inline/err/missing_fn_param_type.rs"); + } + #[test] + fn pointer_type_no_mutability() { + run_and_expect_errors("test_data/parser/inline/err/pointer_type_no_mutability.rs"); + } + #[test] + fn pub_expr() { run_and_expect_errors("test_data/parser/inline/err/pub_expr.rs"); } + #[test] + fn record_literal_before_ellipsis_recovery() { + run_and_expect_errors( + "test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs", + ); + } + #[test] + fn record_literal_field_eq_recovery() { + run_and_expect_errors("test_data/parser/inline/err/record_literal_field_eq_recovery.rs"); + } + #[test] + fn record_literal_missing_ellipsis_recovery() { + run_and_expect_errors( + "test_data/parser/inline/err/record_literal_missing_ellipsis_recovery.rs", + ); + } + #[test] + fn record_pat_field_eq_recovery() { + run_and_expect_errors("test_data/parser/inline/err/record_pat_field_eq_recovery.rs"); + } + #[test] + fn recover_from_missing_assoc_item_binding() { + run_and_expect_errors( + "test_data/parser/inline/err/recover_from_missing_assoc_item_binding.rs", + ); + } + #[test] + fn recover_from_missing_const_default() { + run_and_expect_errors("test_data/parser/inline/err/recover_from_missing_const_default.rs"); + } + #[test] + fn struct_field_recover() { + run_and_expect_errors("test_data/parser/inline/err/struct_field_recover.rs"); + } + #[test] + fn top_level_let() { run_and_expect_errors("test_data/parser/inline/err/top_level_let.rs"); } + #[test] + fn tuple_expr_leading_comma() { + run_and_expect_errors("test_data/parser/inline/err/tuple_expr_leading_comma.rs"); + } + #[test] + fn tuple_field_list_recovery() { + run_and_expect_errors("test_data/parser/inline/err/tuple_field_list_recovery.rs"); + } + #[test] + fn tuple_pat_leading_comma() { + run_and_expect_errors("test_data/parser/inline/err/tuple_pat_leading_comma.rs"); + } + #[test] + fn unsafe_block_in_mod() { + run_and_expect_errors("test_data/parser/inline/err/unsafe_block_in_mod.rs"); + } + #[test] + fn use_tree_list_err_recovery() { + run_and_expect_errors("test_data/parser/inline/err/use_tree_list_err_recovery.rs"); + } +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast index bd5ec4b7c29..249bfeeeeee 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast @@ -10,4 +10,4 @@ SOURCE_FILE IDENT "Foo" SEMICOLON ";" WHITESPACE "\n" -error 6: expected existential, fn, trait or impl +error 6: expected fn, trait or impl diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast index 1cdc6e6e719..d6d2e75cca6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast @@ -69,7 +69,7 @@ SOURCE_FILE WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 24: expected existential, fn, trait or impl -error 41: expected existential, fn, trait or impl +error 24: expected fn, trait or impl +error 41: expected fn, trait or impl error 56: expected a block error 75: expected a loop or block diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast index 96e471a69a7..76464bf7cc2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast @@ -39,7 +39,7 @@ SOURCE_FILE L_CURLY "{" R_CURLY "}" WHITESPACE "\n" -error 6: expected existential, fn, trait or impl +error 6: expected fn, trait or impl error 38: expected a name error 40: missing type for `const` or `static` error 40: expected SEMICOLON diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast index 4b2a740362e..a56d692335f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast @@ -11,5 +11,5 @@ SOURCE_FILE WHITESPACE " " STRING "\"C\"" WHITESPACE "\n" -error 10: expected existential, fn, trait or impl -error 21: expected existential, fn, trait or impl +error 10: expected fn, trait or impl +error 21: expected fn, trait or impl diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rast index 0529e9750e7..0529e9750e7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rs index 802d6cc14a4..802d6cc14a4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0016_angled_path_without_qual.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/anonymous_static.rast index 823db94f58a..823db94f58a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/anonymous_static.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/anonymous_static.rs index df8cecb4326..df8cecb4326 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/anonymous_static.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/arg_list_recovery.rast index cd5aa680c65..cd5aa680c65 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/arg_list_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/arg_list_recovery.rs index 175a31f8b58..175a31f8b58 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/arg_list_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/array_type_missing_semi.rast index ed739a7e398..ed739a7e398 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/array_type_missing_semi.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/array_type_missing_semi.rs index a9485144320..a9485144320 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/array_type_missing_semi.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/async_without_semicolon.rast index bf20d5fa4f6..bf20d5fa4f6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/async_without_semicolon.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/async_without_semicolon.rs index 9a423248c27..9a423248c27 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/async_without_semicolon.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_functional_update_syntax.rast index 0e2fe5988d6..0e2fe5988d6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_functional_update_syntax.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_functional_update_syntax.rs index 14cf96719b4..14cf96719b4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_functional_update_syntax.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0018_crate_visibility_empty_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/crate_visibility_empty_recover.rast index 0fe4ca42d79..0fe4ca42d79 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0018_crate_visibility_empty_recover.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/crate_visibility_empty_recover.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0018_crate_visibility_empty_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/crate_visibility_empty_recover.rs index e8cf9e6696d..e8cf9e6696d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0018_crate_visibility_empty_recover.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/crate_visibility_empty_recover.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_param_slot.rast index 39e35a81ee2..39e35a81ee2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_param_slot.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_param_slot.rs index 0adf7b8d2f0..0adf7b8d2f0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_param_slot.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_segment.rast index b03f5ad9f7e..b03f5ad9f7e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_segment.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_segment.rs index 7510664e102..7510664e102 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/empty_segment.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/fn_pointer_type_missing_fn.rast index eb059529396..eb059529396 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/fn_pointer_type_missing_fn.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/fn_pointer_type_missing_fn.rs index f014914ff9f..f014914ff9f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/fn_pointer_type_missing_fn.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast new file mode 100644 index 00000000000..9609ece77df --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast @@ -0,0 +1,51 @@ +SOURCE_FILE + MACRO_CALL + PATH + PATH_SEGMENT + NAME_REF + IDENT "gen" + WHITESPACE " " + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "gen_fn" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" + ERROR + ASYNC_KW "async" + WHITESPACE " " + MACRO_CALL + PATH + PATH_SEGMENT + NAME_REF + IDENT "gen" + WHITESPACE " " + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "async_gen_fn" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" +error 3: expected BANG +error 3: expected `{`, `[`, `(` +error 3: expected SEMICOLON +error 24: expected fn, trait or impl +error 28: expected BANG +error 28: expected `{`, `[`, `(` +error 28: expected SEMICOLON diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs new file mode 100644 index 00000000000..80882e0a404 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs @@ -0,0 +1,2 @@ +gen fn gen_fn() {} +async gen fn async_gen_fn() {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_arg_list_recover.rast index 4cf5a3386b9..4cf5a3386b9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_arg_list_recover.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_arg_list_recover.rs index 7d849aa1bee..7d849aa1bee 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_arg_list_recover.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_param_list_recover.rast index 0a1ed01fbe6..0a1ed01fbe6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_param_list_recover.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_param_list_recover.rs index 2b5149bb0dc..2b5149bb0dc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_param_list_recover.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/impl_type.rast index dbeb878a2e8..dbeb878a2e8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/impl_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/impl_type.rs index b8c7b65e31d..b8c7b65e31d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/impl_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/let_else_right_curly_brace.rast index 6ec580212b4..6ec580212b4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/let_else_right_curly_brace.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/let_else_right_curly_brace.rs index 30d52fea3b2..30d52fea3b2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0017_let_else_right_curly_brace.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/let_else_right_curly_brace.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_macro_rules_as_macro_name.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/macro_rules_as_macro_name.rast index 79d428a41c8..79d428a41c8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_macro_rules_as_macro_name.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/macro_rules_as_macro_name.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_macro_rules_as_macro_name.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/macro_rules_as_macro_name.rs index e8d402443d6..e8d402443d6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_macro_rules_as_macro_name.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/macro_rules_as_macro_name.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/match_arms_recovery.rast index 5b191945e45..5b191945e45 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/match_arms_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/match_arms_recovery.rs index 173103b2e37..173103b2e37 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0034_match_arms_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/match_arms_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/method_call_missing_argument_list.rast index 02544b5e532..02544b5e532 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/method_call_missing_argument_list.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/method_call_missing_argument_list.rs index 7c8baecaa9c..7c8baecaa9c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/method_call_missing_argument_list.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/misplaced_label_err.rast index ea5203fb96e..ea5203fb96e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/misplaced_label_err.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/misplaced_label_err.rs index a2164c5105c..a2164c5105c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/misplaced_label_err.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/missing_fn_param_type.rast index ea50ad35d74..ea50ad35d74 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/missing_fn_param_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/missing_fn_param_type.rs index 4a95b908435..4a95b908435 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/missing_fn_param_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pointer_type_no_mutability.rast index 354c4135a4d..354c4135a4d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pointer_type_no_mutability.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pointer_type_no_mutability.rs index fae70513133..fae70513133 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pointer_type_no_mutability.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pub_expr.rast index 0ae9f64e7e6..0ae9f64e7e6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pub_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pub_expr.rs index 2976f68625f..2976f68625f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/pub_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast index 741b7845e7f..741b7845e7f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs index a4e5b2f6933..a4e5b2f6933 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_field_eq_recovery.rast index ad4deeb0b67..ad4deeb0b67 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_field_eq_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_field_eq_recovery.rs index 1eb1aa9b926..1eb1aa9b926 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_field_eq_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_missing_ellipsis_recovery.rast index 0c5b618e6f0..0c5b618e6f0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_missing_ellipsis_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_missing_ellipsis_recovery.rs index 1b594e8ab96..1b594e8ab96 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_missing_ellipsis_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_pat_field_eq_recovery.rast index 6940a84b683..6940a84b683 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_pat_field_eq_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_pat_field_eq_recovery.rs index c4949d6e12e..c4949d6e12e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_pat_field_eq_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_assoc_item_binding.rast index fc59db84e77..fc59db84e77 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_assoc_item_binding.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_assoc_item_binding.rs index e484e433a09..e484e433a09 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_assoc_item_binding.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_const_default.rast index 49f163b164a..49f163b164a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_const_default.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_const_default.rs index 5bab13da92b..5bab13da92b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/recover_from_missing_const_default.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rast index 458d7f4e2fa..458d7f4e2fa 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rs index da32227adcd..da32227adcd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/top_level_let.rast index 5ddef5f3f03..5ddef5f3f03 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/top_level_let.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/top_level_let.rs index 3d3e7dd56c7..3d3e7dd56c7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/top_level_let.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0019_tuple_expr_leading_comma.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_expr_leading_comma.rast index 3fbc0da4002..3fbc0da4002 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0019_tuple_expr_leading_comma.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_expr_leading_comma.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0019_tuple_expr_leading_comma.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_expr_leading_comma.rs index 12fab59a776..12fab59a776 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0019_tuple_expr_leading_comma.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_expr_leading_comma.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_field_list_recovery.rast index 6b0bfa007e3..6b0bfa007e3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_field_list_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_field_list_recovery.rs index ecb4d8bda14..ecb4d8bda14 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_field_list_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0020_tuple_pat_leading_comma.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_pat_leading_comma.rast index 9c8837292d2..9c8837292d2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0020_tuple_pat_leading_comma.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_pat_leading_comma.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0020_tuple_pat_leading_comma.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_pat_leading_comma.rs index de168521e1d..de168521e1d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0020_tuple_pat_leading_comma.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/tuple_pat_leading_comma.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/unsafe_block_in_mod.rast index 77c2b56adf2..77c2b56adf2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/unsafe_block_in_mod.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/unsafe_block_in_mod.rs index 26141e90491..26141e90491 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/unsafe_block_in_mod.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/use_tree_list_err_recovery.rast index b576d872e13..b576d872e13 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/use_tree_list_err_recovery.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/use_tree_list_err_recovery.rs index 9885e6ab273..9885e6ab273 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/use_tree_list_err_recovery.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast deleted file mode 100644 index cdbc40fe0b2..00000000000 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast +++ /dev/null @@ -1,87 +0,0 @@ -SOURCE_FILE@0..106 - STRUCT@0..11 - STRUCT_KW@0..6 "struct" - WHITESPACE@6..7 " " - NAME@7..10 - IDENT@7..10 "Foo" - SEMICOLON@10..11 ";" - WHITESPACE@11..12 "\n" - STRUCT@12..25 - STRUCT_KW@12..18 "struct" - WHITESPACE@18..19 " " - NAME@19..22 - IDENT@19..22 "Foo" - WHITESPACE@22..23 " " - RECORD_FIELD_LIST@23..25 - L_CURLY@23..24 "{" - R_CURLY@24..25 "}" - WHITESPACE@25..26 "\n" - STRUCT@26..39 - STRUCT_KW@26..32 "struct" - WHITESPACE@32..33 " " - NAME@33..36 - IDENT@33..36 "Foo" - TUPLE_FIELD_LIST@36..38 - L_PAREN@36..37 "(" - R_PAREN@37..38 ")" - SEMICOLON@38..39 ";" - WHITESPACE@39..40 "\n" - STRUCT@40..66 - STRUCT_KW@40..46 "struct" - WHITESPACE@46..47 " " - NAME@47..50 - IDENT@47..50 "Foo" - TUPLE_FIELD_LIST@50..65 - L_PAREN@50..51 "(" - TUPLE_FIELD@51..57 - PATH_TYPE@51..57 - PATH@51..57 - PATH_SEGMENT@51..57 - NAME_REF@51..57 - IDENT@51..57 "String" - COMMA@57..58 "," - WHITESPACE@58..59 " " - TUPLE_FIELD@59..64 - PATH_TYPE@59..64 - PATH@59..64 - PATH_SEGMENT@59..64 - NAME_REF@59..64 - IDENT@59..64 "usize" - R_PAREN@64..65 ")" - SEMICOLON@65..66 ";" - WHITESPACE@66..67 "\n" - STRUCT@67..105 - STRUCT_KW@67..73 "struct" - WHITESPACE@73..74 " " - NAME@74..77 - IDENT@74..77 "Foo" - WHITESPACE@77..78 " " - RECORD_FIELD_LIST@78..105 - L_CURLY@78..79 "{" - WHITESPACE@79..84 "\n " - RECORD_FIELD@84..90 - NAME@84..85 - IDENT@84..85 "a" - COLON@85..86 ":" - WHITESPACE@86..87 " " - PATH_TYPE@87..90 - PATH@87..90 - PATH_SEGMENT@87..90 - NAME_REF@87..90 - IDENT@87..90 "i32" - COMMA@90..91 "," - WHITESPACE@91..96 "\n " - RECORD_FIELD@96..102 - NAME@96..97 - IDENT@96..97 "b" - COLON@97..98 ":" - WHITESPACE@98..99 " " - PATH_TYPE@99..102 - PATH@99..102 - PATH_SEGMENT@99..102 - NAME_REF@99..102 - IDENT@99..102 "f32" - COMMA@102..103 "," - WHITESPACE@103..104 "\n" - R_CURLY@104..105 "}" - WHITESPACE@105..106 "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast deleted file mode 100644 index b73780261ba..00000000000 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast +++ /dev/null @@ -1,31 +0,0 @@ -SOURCE_FILE - TYPE_ALIAS - EXISTENTIAL_KW "existential" - WHITESPACE " " - TYPE_KW "type" - WHITESPACE " " - NAME - IDENT "Foo" - COLON ":" - WHITESPACE " " - TYPE_BOUND_LIST - TYPE_BOUND - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Fn" - PARAM_LIST - L_PAREN "(" - R_PAREN ")" - WHITESPACE " " - RET_TYPE - THIN_ARROW "->" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "usize" - SEMICOLON ";" - WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs deleted file mode 100644 index 23baf7145cc..00000000000 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs +++ /dev/null @@ -1 +0,0 @@ -existential type Foo: Fn() -> usize; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/anonymous_const.rast index d81b4ff2670..d81b4ff2670 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/anonymous_const.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/anonymous_const.rs index c1d5cdfc621..c1d5cdfc621 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/anonymous_const.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arb_self_types.rast index a0b56262962..a0b56262962 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arb_self_types.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arb_self_types.rs index 6a170d5ac1d..6a170d5ac1d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arb_self_types.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arg_with_attr.rast index ae1074c3680..ae1074c3680 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arg_with_attr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arg_with_attr.rs index 5daf1d7b0be..5daf1d7b0be 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/arg_with_attr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_attrs.rast index 6eb8af33119..6eb8af33119 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_attrs.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_attrs.rs index 2ac310924fd..2ac310924fd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_attrs.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_expr.rast index 60395948c18..60395948c18 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_expr.rs index 4dc1999d140..4dc1999d140 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_type.rast index 0d50144b730..0d50144b730 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_type.rs index 27eb22f2238..27eb22f2238 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/array_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/as_precedence.rast index 4079d2a9915..4079d2a9915 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/as_precedence.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/as_precedence.rs index 70559c5ef5a..70559c5ef5a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/as_precedence.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_const_eq.rast index fa2733e7f96..fa2733e7f96 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_const_eq.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_const_eq.rs index b43c4e36acd..b43c4e36acd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_const_eq.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list.rast index def7373c9de..def7373c9de 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list.rs index f108514879d..f108514879d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list_inner_attrs.rast index 9cb3c8a5c3b..9cb3c8a5c3b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list_inner_attrs.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list_inner_attrs.rs index 915e2c93272..915e2c93272 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_item_list_inner_attrs.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_bound.rast index f2e4e01069c..f2e4e01069c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_bound.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_bound.rs index daae97e4fd5..daae97e4fd5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_bound.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_eq.rast index 97043184036..97043184036 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_eq.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_eq.rs index 3591417473e..3591417473e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_eq.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rast index 2fa52068c9b..2fa52068c9b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rs index 42029ac5927..42029ac5927 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_type_bounds.rast index 8cbc98c51ca..8cbc98c51ca 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_type_bounds.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_type_bounds.rs index 0f7a2d16083..0f7a2d16083 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_type_bounds.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/async_trait_bound.rast index ebf758286a7..ebf758286a7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/async_trait_bound.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/async_trait_bound.rs index 04d44175d77..04d44175d77 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/async_trait_bound.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/attr_on_expr_stmt.rast index 81b7f2b3cbb..81b7f2b3cbb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/attr_on_expr_stmt.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/attr_on_expr_stmt.rs index b28c078f935..b28c078f935 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/attr_on_expr_stmt.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/await_expr.rast index af713a22072..af713a22072 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/await_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/await_expr.rs index fe9a3211bb1..fe9a3211bb1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/await_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_bare_dyn_types_with_leading_lifetime.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_leading_lifetime.rast index d7e67fbcd15..d7e67fbcd15 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_bare_dyn_types_with_leading_lifetime.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_leading_lifetime.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_bare_dyn_types_with_leading_lifetime.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_leading_lifetime.rs index 3e9a9a29ddc..3e9a9a29ddc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_bare_dyn_types_with_leading_lifetime.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_leading_lifetime.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_paren_as_generic_args.rast index d5f97bad898..d5f97bad898 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_paren_as_generic_args.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_paren_as_generic_args.rs index 800002b1b82..800002b1b82 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_paren_as_generic_args.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/become_expr.rast index c544cf4e5e3..c544cf4e5e3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/become_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/become_expr.rs index 918a83ca6e8..918a83ca6e8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/become_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bind_pat.rast index eb1c32474a2..eb1c32474a2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bind_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bind_pat.rs index 820a9e72ce5..820a9e72ce5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bind_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/binop_resets_statementness.rast index f667c1972dd..f667c1972dd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/binop_resets_statementness.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/binop_resets_statementness.rs index e325e46676f..e325e46676f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/binop_resets_statementness.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block.rast index a23364d152c..a23364d152c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block.rs index 81f44c53307..81f44c53307 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block_items.rast index 2e4b515cab9..2e4b515cab9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block_items.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block_items.rs index d9868718c79..d9868718c79 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/block_items.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/box_pat.rast index df22decde12..df22decde12 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/box_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/box_pat.rs index 9d458aa1e6b..9d458aa1e6b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/box_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_ambiguity.rast index cbf5e84e8cd..cbf5e84e8cd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_ambiguity.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_ambiguity.rs index 560eb05b949..560eb05b949 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_ambiguity.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_expr.rast index 06c053d0f82..06c053d0f82 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_expr.rs index 1b40946365f..1b40946365f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/break_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_builtin_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/builtin_expr.rast index 361900b6d3e..361900b6d3e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_builtin_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/builtin_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_builtin_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/builtin_expr.rs index 14431b0210e..14431b0210e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_builtin_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/builtin_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/call_expr.rast index 19cc8d5ac7c..19cc8d5ac7c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/call_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/call_expr.rs index ffbf46d6d1f..ffbf46d6d1f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/call_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/cast_expr.rast index d53dde53811..d53dde53811 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/cast_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/cast_expr.rs index bfe8e4b362a..bfe8e4b362a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/cast_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_body_underscore_assignment.rast index 8970922c977..8970922c977 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_body_underscore_assignment.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_body_underscore_assignment.rs index 9a34b63d29c..9a34b63d29c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_body_underscore_assignment.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_params.rast index 318eb89deaa..318eb89deaa 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_params.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_params.rs index 6ca8dd2d6a9..6ca8dd2d6a9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_params.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_closure_range_method_call.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_range_method_call.rast index 542711339d1..542711339d1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_closure_range_method_call.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_range_method_call.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_closure_range_method_call.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_range_method_call.rs index a81d3c37133..a81d3c37133 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_closure_range_method_call.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_range_method_call.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg.rast index 1e030071723..1e030071723 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg.rs index 8b5e5dbe13c..8b5e5dbe13c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_block.rast index ea8866da277..ea8866da277 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_block.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_block.rs index 1c279db289c..1c279db289c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_block.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_bool_literal.rast index aea23e463da..aea23e463da 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_bool_literal.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_bool_literal.rs index 4b92e2d487a..4b92e2d487a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_bool_literal.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_literal.rast index 03d414e3366..03d414e3366 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_literal.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_literal.rs index 7eacada73ad..7eacada73ad 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_literal.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_negative_number.rast index e504badbd3d..e504badbd3d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_negative_number.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_negative_number.rs index d0a87bdc039..d0a87bdc039 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_arg_negative_number.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_block_pat.rast index 593867a7b12..593867a7b12 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_block_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_block_pat.rs index 6ecdee849b7..6ecdee849b7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_block_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_closure.rast index 06442a1d0f1..06442a1d0f1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_closure.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_closure.rs index 0c05cc70bd3..0c05cc70bd3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_closure.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_item.rast index 909983c9a2a..909983c9a2a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_item.rs index 6d5f5be65dc..6d5f5be65dc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param.rast index 24595a1a185..24595a1a185 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param.rs index 8cdb3b70367..8cdb3b70367 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_expression.rast index d6501137498..d6501137498 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_expression.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_expression.rs index 551bde0b008..551bde0b008 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_expression.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_literal.rast index 6de10353bf0..6de10353bf0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_literal.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_literal.rs index 879ecffa75d..879ecffa75d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_literal.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_path.rast index 3f5fb47d287..3f5fb47d287 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_path.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_path.rs index f3da43ca06c..f3da43ca06c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_path.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_trait_bound.rast index 646873881bc..646873881bc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_trait_bound.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_trait_bound.rs index 8eb8f84c91f..8eb8f84c91f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_trait_bound.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/continue_expr.rast index 5d80a57a2e4..5d80a57a2e4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/continue_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/continue_expr.rs index 474cc3f0e0c..474cc3f0e0c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/continue_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_path.rast index f71367ae1e4..f71367ae1e4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_path.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_path.rs index 1bbb5930bf6..1bbb5930bf6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_path.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility.rast index 8738292a9f7..8738292a9f7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility.rs index a790a485f9f..a790a485f9f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility_in.rast index ac45c569567..ac45c569567 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility_in.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility_in.rs index 2856dbd848f..2856dbd848f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/crate_visibility_in.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_fn.rast index 06b37e23938..06b37e23938 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_fn.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_fn.rs index 78c3b4d85fb..78c3b4d85fb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_fn.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_unsafe_fn.rast index 7c2f7b34c74..7c2f7b34c74 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_unsafe_fn.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_unsafe_fn.rs index 05c20a68fe8..05c20a68fe8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_async_unsafe_fn.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_item.rast index 7a8e8cf1dd2..7a8e8cf1dd2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_item.rs index a6836cbd577..a6836cbd577 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_unsafe_item.rast index b180d0b72cc..b180d0b72cc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_unsafe_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_unsafe_item.rs index 96340f84ab3..96340f84ab3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/default_unsafe_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_struct_rest_pattern.rast index fb8aa5accb5..fb8aa5accb5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_struct_rest_pattern.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_struct_rest_pattern.rs index 22a5b5f3e31..22a5b5f3e31 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_struct_rest_pattern.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_wildcard_pat.rast index 5f53d34510e..5f53d34510e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_wildcard_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_wildcard_pat.rs index 91acfb3a0ae..91acfb3a0ae 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/destructuring_assignment_wildcard_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dot_dot_pat.rast index 4d4011e6b4a..4d4011e6b4a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dot_dot_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dot_dot_pat.rs index 3262f27e140..3262f27e140 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dot_dot_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type.rast index e37d43aac6c..e37d43aac6c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type.rs index c3ecabb992b..c3ecabb992b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast new file mode 100644 index 00000000000..dcc66dc1e2b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast @@ -0,0 +1,186 @@ +SOURCE_FILE + TYPE_ALIAS + COMMENT "// 2015" + WHITESPACE "\n" + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "DynPlain" + WHITESPACE " " + EQ "=" + WHITESPACE " " + DYN_TRAIT_TYPE + DYN_KW "dyn" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" + SEMICOLON ";" + WHITESPACE "\n" + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "DynRef" + WHITESPACE " " + EQ "=" + WHITESPACE " " + REF_TYPE + AMP "&" + DYN_TRAIT_TYPE + DYN_KW "dyn" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" + SEMICOLON ";" + WHITESPACE "\n" + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "DynLt" + WHITESPACE " " + EQ "=" + WHITESPACE " " + DYN_TRAIT_TYPE + DYN_KW "dyn" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + LIFETIME + LIFETIME_IDENT "'a" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" + SEMICOLON ";" + WHITESPACE "\n" + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "DynQuestion" + WHITESPACE " " + EQ "=" + WHITESPACE " " + DYN_TRAIT_TYPE + DYN_KW "dyn" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + QUESTION "?" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" + SEMICOLON ";" + WHITESPACE "\n" + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "DynFor" + WHITESPACE " " + EQ "=" + WHITESPACE " " + DYN_TRAIT_TYPE + DYN_KW "dyn" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + FOR_TYPE + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" + SEMICOLON ";" + WHITESPACE "\n" + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "DynParen" + WHITESPACE " " + EQ "=" + WHITESPACE " " + DYN_TRAIT_TYPE + DYN_KW "dyn" + TYPE_BOUND_LIST + TYPE_BOUND + L_PAREN "(" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "Path" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_TYPE + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "dyn" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "Path" + SEMICOLON ";" + WHITESPACE "\n" + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "Generic" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "dyn" + GENERIC_ARG_LIST + L_ANGLE "<" + TYPE_ARG + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" + R_ANGLE ">" + SEMICOLON ";" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rs new file mode 100644 index 00000000000..c4ef1f2b7ac --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rs @@ -0,0 +1,9 @@ +// 2015 +type DynPlain = dyn Path; +type DynRef = &dyn Path; +type DynLt = dyn 'a + Path; +type DynQuestion = dyn ?Path; +type DynFor = dyn for<'a> Path; +type DynParen = dyn(Path); +type Path = dyn::Path; +type Generic = dyn<Path>; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/effect_blocks.rast index f14080c90ea..f14080c90ea 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/effect_blocks.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/effect_blocks.rs index c57d24b2f7b..c57d24b2f7b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/effect_blocks.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_exclusive_range_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/exclusive_range_pat.rast index fd2c422d0d1..fd2c422d0d1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_exclusive_range_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/exclusive_range_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_exclusive_range_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/exclusive_range_pat.rs index e80505d8bd4..e80505d8bd4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0207_exclusive_range_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/exclusive_range_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expr_literals.rast index fe73d9dfe4a..fe73d9dfe4a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expr_literals.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expr_literals.rs index e7f235a83b9..e7f235a83b9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expr_literals.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expression_after_block.rast index 553ac356d73..553ac356d73 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expression_after_block.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expression_after_block.rs index 76007e3ee77..76007e3ee77 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/expression_after_block.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_block.rast index 7a3cd6a0dfd..7a3cd6a0dfd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_block.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_block.rs index bee5ac8453e..bee5ac8453e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_block.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate.rast index 0a660957d15..0a660957d15 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate.rs index 49af74e1b74..49af74e1b74 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_rename.rast index 5a5aca96f91..5a5aca96f91 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_rename.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_rename.rs index fc76e17dda4..fc76e17dda4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_rename.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_self.rast index edea4245f20..edea4245f20 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_self.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_self.rs index c969ed10936..c969ed10936 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/extern_crate_self.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/field_expr.rast index dd27dc48964..dd27dc48964 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/field_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/field_expr.rs index 98dbe45a7ec..98dbe45a7ec 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/field_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_.rast index a88b3393f29..a88b3393f29 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_.rs index 8f3b7ef112a..8f3b7ef112a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_decl.rast index f7fac807f49..f7fac807f49 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_decl.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_decl.rs index c9f74f7f5c8..c9f74f7f5c8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_decl.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_def_param.rast index ce425a1afde..ce425a1afde 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_def_param.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_def_param.rs index 7b4c6265829..7b4c6265829 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_def_param.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_param_ident_path.rast index ee8465e6ca9..ee8465e6ca9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_param_ident_path.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_param_ident_path.rs index 80a1701fd66..80a1701fd66 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_param_ident_path.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type.rast index 608b0be160a..608b0be160a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type.rs index 9493da83d56..9493da83d56 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type_with_ret.rast index 372c867ae6d..372c867ae6d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type_with_ret.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type_with_ret.rs index e3ba5e87f5d..e3ba5e87f5d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_type_with_ret.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_unnamed_arg.rast index 8a525c6e05e..8a525c6e05e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_unnamed_arg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_unnamed_arg.rs index 1ebbe5b0355..1ebbe5b0355 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_pointer_unnamed_arg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_expr.rast index 6bc3c0fb057..6bc3c0fb057 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_expr.rs index 972197d2a17..972197d2a17 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_range_from.rast index 90cf3101c13..90cf3101c13 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_range_from.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_range_from.rs index af0d40a7aa2..af0d40a7aa2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_range_from.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast index 7600457a9b8..7600457a9b8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rs index 8ac7b9e1038..8ac7b9e1038 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/full_range_expr.rast index 9ffc0763041..9ffc0763041 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/full_range_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/full_range_expr.rs index ae21ad94cb3..ae21ad94cb3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/full_range_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_ret_type.rast index 6687c843fbb..6687c843fbb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_ret_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_ret_type.rs index d22d8cada69..d22d8cada69 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_ret_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_type_params.rast index 3858e3eed57..3858e3eed57 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_type_params.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_type_params.rs index 9df40ed396c..9df40ed396c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_type_params.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_where_clause.rast index a000d7e5928..a000d7e5928 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_where_clause.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_where_clause.rs index f0920b2a8ad..f0920b2a8ad 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/function_where_clause.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rast new file mode 100644 index 00000000000..6e8df9897e1 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rast @@ -0,0 +1,101 @@ +SOURCE_FILE + FN + COMMENT "// 2024" + WHITESPACE "\n" + VISIBILITY + PUB_KW "pub" + WHITESPACE " " + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + BLOCK_EXPR + GEN_KW "gen" + WHITESPACE " " + STMT_LIST + L_CURLY "{" + WHITESPACE " " + EXPR_STMT + YIELD_EXPR + YIELD_KW "yield" + WHITESPACE " " + LITERAL + STRING "\"\"" + SEMICOLON ";" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + BLOCK_EXPR + ASYNC_KW "async" + WHITESPACE " " + GEN_KW "gen" + WHITESPACE " " + STMT_LIST + L_CURLY "{" + WHITESPACE " " + EXPR_STMT + YIELD_EXPR + YIELD_KW "yield" + WHITESPACE " " + LITERAL + STRING "\"\"" + SEMICOLON ";" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + BLOCK_EXPR + GEN_KW "gen" + WHITESPACE " " + MOVE_KW "move" + WHITESPACE " " + STMT_LIST + L_CURLY "{" + WHITESPACE " " + EXPR_STMT + YIELD_EXPR + YIELD_KW "yield" + WHITESPACE " " + LITERAL + STRING "\"\"" + SEMICOLON ";" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + BLOCK_EXPR + ASYNC_KW "async" + WHITESPACE " " + GEN_KW "gen" + WHITESPACE " " + MOVE_KW "move" + WHITESPACE " " + STMT_LIST + L_CURLY "{" + WHITESPACE " " + EXPR_STMT + YIELD_EXPR + YIELD_KW "yield" + WHITESPACE " " + LITERAL + STRING "\"\"" + SEMICOLON ";" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rs new file mode 100644 index 00000000000..669b434973c --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rs @@ -0,0 +1,7 @@ +// 2024 +pub fn main() { + gen { yield ""; }; + async gen { yield ""; }; + gen move { yield ""; }; + async gen move { yield ""; }; +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_arg.rast index 5a01f154bad..5a01f154bad 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_arg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_arg.rs index f2ccc558bb5..f2ccc558bb5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_arg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_attribute.rast index 28a216e8730..28a216e8730 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_attribute.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_attribute.rs index 0509f81da7e..0509f81da7e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_attribute.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_list.rast index becb77e042f..becb77e042f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_list.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_list.rs index b250bc6bf07..b250bc6bf07 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_param_list.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/half_open_range_pat.rast index c85a6859911..c85a6859911 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/half_open_range_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/half_open_range_pat.rs index f7e2d07922e..f7e2d07922e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/half_open_range_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/if_expr.rast index e2e964e44d1..e2e964e44d1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/if_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/if_expr.rs index 40f227ba3a4..40f227ba3a4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/if_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item.rast index 879676309a2..879676309a2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item.rs index 647799d7c14..647799d7c14 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_const.rast index f7c7aaabc7c..f7c7aaabc7c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_const.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_const.rs index 3252d6f362a..3252d6f362a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_const.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_neg.rast index 805052fbcae..805052fbcae 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_neg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_neg.rs index a7bd4b048d6..a7bd4b048d6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_item_neg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_trait_type.rast index c7478da942b..c7478da942b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_trait_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_trait_type.rs index 54c5a7c4604..54c5a7c4604 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_trait_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_type_params.rast index 24977a22a5f..24977a22a5f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_type_params.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_type_params.rs index cb0a105c29f..cb0a105c29f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/impl_type_params.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/index_expr.rast index 6969259fc08..6969259fc08 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/index_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/index_expr.rs index b9ba78a6cbd..b9ba78a6cbd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/index_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/label.rast index 48d0bde845a..48d0bde845a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/label.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/label.rs index 48e83f263b9..48e83f263b9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/label.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/labeled_block.rast index 181251d4f4f..181251d4f4f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/labeled_block.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/labeled_block.rs index 18b4ff4b1ba..18b4ff4b1ba 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/labeled_block.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast index c25ad8430d0..c25ad8430d0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rs index 75516d25845..75516d25845 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_ret_block.rast index 93238bd8f17..93238bd8f17 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_ret_block.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_ret_block.rs index 061118d3aab..061118d3aab 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_ret_block.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_else.rast index ce7f1a35e8d..ce7f1a35e8d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_else.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_else.rs index 8303de06f1e..8303de06f1e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_else.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_expr.rast index dcffcb1ce2f..dcffcb1ce2f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_expr.rs index 0131d5e3382..0131d5e3382 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast index de9d0fc19ee..de9d0fc19ee 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs index 8003999fd08..8003999fd08 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_ascription.rast index ac8e1d93cb6..ac8e1d93cb6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_ascription.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_ascription.rs index a94161dffa2..a94161dffa2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_ascription.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_init.rast index 1b639915878..1b639915878 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_init.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_init.rs index 232c0db411e..232c0db411e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt_init.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_arg.rast index dbd7ff3061a..dbd7ff3061a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_arg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_arg.rs index 41715aa273f..41715aa273f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_arg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_param.rast index c595031f358..c595031f358 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_param.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_param.rs index 2bb38ece8c6..2bb38ece8c6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lifetime_param.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/literal_pattern.rast index c83ea7ade71..c83ea7ade71 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/literal_pattern.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/literal_pattern.rs index 6dfd67b4ce9..6dfd67b4ce9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/literal_pattern.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/loop_expr.rast index ab3b49b0d6c..ab3b49b0d6c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/loop_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/loop_expr.rs index 9f078fa4816..9f078fa4816 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/loop_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_call_type.rast index 8165cb7d9ba..8165cb7d9ba 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_call_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_call_type.rs index edb470c8999..edb470c8999 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_call_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def.rast index f73229b2e30..f73229b2e30 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def.rs index a014ae5464e..a014ae5464e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def_curly.rast index 5cf305d2658..5cf305d2658 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def_curly.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def_curly.rs index 5ed0c777dc9..5ed0c777dc9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_def_curly.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_inside_generic_arg.rast index 88f8a73450f..88f8a73450f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_inside_generic_arg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_inside_generic_arg.rs index 8d43a53d97f..8d43a53d97f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_inside_generic_arg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_macro_rules_as_macro_name.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_as_macro_name.rast index b997250ab4d..b997250ab4d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_macro_rules_as_macro_name.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_as_macro_name.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_macro_rules_as_macro_name.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_as_macro_name.rs index 4c2ea378cbf..4c2ea378cbf 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_macro_rules_as_macro_name.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_as_macro_name.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_non_brace.rast index 45cd4d2aa12..45cd4d2aa12 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_non_brace.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_non_brace.rs index 6033a28cd64..6033a28cd64 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/macro_rules_non_brace.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/marco_pat.rast index cedaa904533..cedaa904533 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/marco_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/marco_pat.rs index 811181d9ba8..811181d9ba8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/marco_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast index 8189cf0a8e5..8189cf0a8e5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rs index 9e009e24f4e..9e009e24f4e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_commas.rast index 94897c2d204..94897c2d204 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_commas.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_commas.rs index 1f25d577a95..1f25d577a95 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_commas.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_inner_attribute.rast index 6fd9f424676..6fd9f424676 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_inner_attribute.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_inner_attribute.rs index 54a67c9d7b7..54a67c9d7b7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_inner_attribute.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_outer_attributes.rast index 0f7580c1a33..0f7580c1a33 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_outer_attributes.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_outer_attributes.rs index 676db42d1a7..676db42d1a7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arms_outer_attributes.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_expr.rast index 0d6cd390ea0..0d6cd390ea0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_expr.rs index c4021dc1048..c4021dc1048 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_guard.rast index 96318b52195..96318b52195 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_guard.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_guard.rs index cfe05ce4e18..cfe05ce4e18 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_guard.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0213_metas.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/metas.rast index b1ac60b530e..b1ac60b530e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0213_metas.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/metas.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0213_metas.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/metas.rs index 57b7bb7170d..57b7bb7170d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0213_metas.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/metas.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/method_call_expr.rast index b28b8eb673a..b28b8eb673a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/method_call_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/method_call_expr.rs index 48bb6381e80..48bb6381e80 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/method_call_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_contents.rast index 5f60e03d446..5f60e03d446 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_contents.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_contents.rs index 24a15c5c5c9..24a15c5c5c9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_contents.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item.rast index 4d505916cf6..4d505916cf6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item.rs index f21af614da0..f21af614da0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item_curly.rast index d5e3f3493c3..d5e3f3493c3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item_curly.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item_curly.rs index 16b1b43e877..16b1b43e877 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/mod_item_curly.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/never_type.rast index 53dbf3999b2..53dbf3999b2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/never_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/never_type.rs index de399fcf4a2..de399fcf4a2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/never_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast index 30a2842e538..30a2842e538 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rs index 47a71fd1915..47a71fd1915 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_semi_after_block.rast index f89cc15e721..f89cc15e721 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_semi_after_block.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_semi_after_block.rs index 4919665cb57..4919665cb57 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_semi_after_block.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr.rast index 8bd90a7f675..8bd90a7f675 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr.rs index bbf09e367cf..bbf09e367cf 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr_after_item.rast index f5ee12fe967..f5ee12fe967 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr_after_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr_after_item.rs index eadc7fffb13..eadc7fffb13 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/nocontentexpr_after_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/or_pattern.rast index 6a2046d9e99..6a2046d9e99 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/or_pattern.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/or_pattern.rs index a2631660550..a2631660550 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/or_pattern.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list.rast index d240a52f6ff..d240a52f6ff 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list.rs index 9d55bedbba7..9d55bedbba7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_opt_patterns.rast index e9d93a0d0a4..e9d93a0d0a4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_opt_patterns.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_opt_patterns.rs index 9b93442c0f2..9b93442c0f2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_opt_patterns.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_vararg.rast index 338d53995ae..338d53995ae 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_vararg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_vararg.rs index 533096cd5bb..533096cd5bb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_vararg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_outer_arg.rast index c63ea020a3f..c63ea020a3f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_outer_arg.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_outer_arg.rs index c238be791bd..c238be791bd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_outer_arg.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/paren_type.rast index 29995bb752f..29995bb752f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/paren_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/paren_type.rs index 6e1b2510124..6e1b2510124 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/paren_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_expr.rast index 48e123ab11b..48e123ab11b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_expr.rs index 333ebabef48..333ebabef48 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_fn_trait_args.rast index fd83daf841f..fd83daf841f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_fn_trait_args.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_fn_trait_args.rs index 17ed20e5b13..17ed20e5b13 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_fn_trait_args.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_part.rast index 4ccda19a8d7..4ccda19a8d7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_part.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_part.rs index f6e32c7c149..f6e32c7c149 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_part.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type.rast index d498d372189..d498d372189 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type.rs index bf94f32e196..bf94f32e196 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type_with_bounds.rast index a059e124ae4..a059e124ae4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type_with_bounds.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type_with_bounds.rs index 4bb0f63b739..4bb0f63b739 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_type_with_bounds.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_pat.rast index d39c3df2b7e..d39c3df2b7e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_pat.rs index 4d719c4335b..4d719c4335b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_type.rast index d9db1c34bf8..d9db1c34bf8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_type.rs index 7952dbd57d6..7952dbd57d6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/placeholder_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pointer_type_mut.rast index bfe7ed5b400..bfe7ed5b400 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pointer_type_mut.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pointer_type_mut.rs index 04b2bb9ba5c..04b2bb9ba5c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pointer_type_mut.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/postfix_range.rast index 3a59cf7b832..3a59cf7b832 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/postfix_range.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/postfix_range.rs index e7b7cfc6b1a..e7b7cfc6b1a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/postfix_range.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rast new file mode 100644 index 00000000000..cf52f1e4799 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rast @@ -0,0 +1,69 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "captures" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + COLON ":" + WHITESPACE " " + LIFETIME + LIFETIME_IDENT "'a" + COMMA "," + WHITESPACE " " + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'b" + COLON ":" + WHITESPACE " " + LIFETIME + LIFETIME_IDENT "'b" + COMMA "," + WHITESPACE " " + TYPE_PARAM + NAME + IDENT "T" + R_ANGLE ">" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + RET_TYPE + THIN_ARROW "->" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Sized" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + TYPE_BOUND + USE_KW "use" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'b" + COMMA "," + WHITESPACE " " + TYPE_PARAM + NAME + IDENT "T" + R_ANGLE ">" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rs new file mode 100644 index 00000000000..ec208d5062b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rs @@ -0,0 +1 @@ +fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T> {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_parens_typepath.rast index 2dede835987..2dede835987 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_parens_typepath.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_parens_typepath.rs index d4c1638226e..d4c1638226e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_parens_typepath.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_tuple_field.rast index c78d16f064c..c78d16f064c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_tuple_field.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_tuple_field.rs index 6f725fb7b98..6f725fb7b98 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/pub_tuple_field.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/qual_paths.rast index 8c66cfe599f..8c66cfe599f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/qual_paths.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/qual_paths.rs index d140692e210..d140692e210 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/qual_paths.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast index 56e2d1095d2..56e2d1095d2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs index f80dd90d446..f80dd90d446 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/range_pat.rast index d9981c50719..d9981c50719 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/range_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/range_pat.rs index b54354211d2..b54354211d2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/range_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_attrs.rast index 639ee0eb777..639ee0eb777 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_attrs.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_attrs.rs index d7f0b4382da..d7f0b4382da 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_attrs.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast index 065d7e7e81f..065d7e7e81f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs index a3bd7787db7..a3bd7787db7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rast index 00948c322f4..00948c322f4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rs index 86411fbb7dc..86411fbb7dc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_literal_field_with_attr.rast index a1df70841e8..a1df70841e8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_literal_field_with_attr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_literal_field_with_attr.rs index a6c7760c767..a6c7760c767 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_literal_field_with_attr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field.rast index f3d2fde4669..f3d2fde4669 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field.rs index 53cfdc22dd8..53cfdc22dd8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field_list.rast index f69ae1d6445..f69ae1d6445 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field_list.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field_list.rs index 0bfaae7c4d0..0bfaae7c4d0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_pat_field_list.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_expr.rast index 108b0802c33..108b0802c33 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_expr.rs index c5262f4469b..c5262f4469b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_pat.rast index 4516fd01132..4516fd01132 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_pat.rs index de41f5cae0f..de41f5cae0f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/reference_type.rast index b5c9d7a8dfb..b5c9d7a8dfb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/reference_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/reference_type.rs index 3ac0badabc5..3ac0badabc5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/reference_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_expr.rast index 62cff122026..62cff122026 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_expr.rs index 5733666b605..5733666b605 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param.rast index 67e28236397..67e28236397 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param.rs index 80c0a43f5de..80c0a43f5de 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param_outer_attr.rast index db583f7d526..db583f7d526 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param_outer_attr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param_outer_attr.rs index 35155057a68..35155057a68 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/self_param_outer_attr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/singleton_tuple_type.rast index 0129955d136..0129955d136 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/singleton_tuple_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/singleton_tuple_type.rs index cb66bad2470..cb66bad2470 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/singleton_tuple_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast index dff72ba886f..dff72ba886f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rs index 855ba89b1e9..855ba89b1e9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_type.rast index 0bcb3152481..0bcb3152481 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_type.rs index 4da1af82705..4da1af82705 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_bin_expr_ambiguity.rast index e387e14d14f..e387e14d14f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_bin_expr_ambiguity.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_bin_expr_ambiguity.rs index 7e8bd87bf1c..7e8bd87bf1c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_bin_expr_ambiguity.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rast index 950421feb70..950421feb70 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rs index 2edd578f9d7..2edd578f9d7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_item.rast index 78f96820723..78f96820723 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_item.rs index 5f1a34f49b9..5f1a34f49b9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rast index c45f8708980..c45f8708980 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rs index 71d76789fae..71d76789fae 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rast index 8f678247731..8f678247731 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rs index a90d54b0109..a90d54b0109 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item.rast index dd7f76eb939..dd7f76eb939 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item.rs index dcd9a71144f..dcd9a71144f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_bounds.rast index bab83145622..bab83145622 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_bounds.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_bounds.rs index e6ad2b56af8..e6ad2b56af8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_bounds.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_generic_params.rast index 01f212e7184..01f212e7184 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_generic_params.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_generic_params.rs index 4a51926a6b5..4a51926a6b5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_generic_params.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_where_clause.rast index 46cd8ee6654..46cd8ee6654 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_where_clause.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_where_clause.rs index 52a6a806f35..52a6a806f35 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_item_where_clause.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_block_expr.rast index aec8fbf4775..aec8fbf4775 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_block_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_block_expr.rs index 0f1b41eb64b..0f1b41eb64b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_block_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_expr.rast index c3aa8c15ded..c3aa8c15ded 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_expr.rs index 8b74f7bc814..8b74f7bc814 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_fallback.rast index 0adb678fa65..38e21b845dc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_fallback.rast @@ -1,5 +1,7 @@ SOURCE_FILE FN + COMMENT "// 2015" + WHITESPACE "\n" FN_KW "fn" WHITESPACE " " NAME diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_fallback.rs index 61a6b46a0b3..6ad5ea4357c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_fallback.rs @@ -1 +1,2 @@ +// 2015 fn foo() { try!(Ok(())); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_rules.rast index e6916ae976e..e95fe762584 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_rules.rast @@ -1,5 +1,7 @@ SOURCE_FILE MACRO_RULES + COMMENT "// 2015" + WHITESPACE "\n" MACRO_RULES_KW "macro_rules" BANG "!" WHITESPACE " " diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_rules.rs index 2e2ab6e60b6..35694649ece 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/try_macro_rules.rs @@ -1 +1,2 @@ +// 2015 macro_rules! try { () => {} } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_attrs.rast index 39857b23c6e..39857b23c6e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_attrs.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_attrs.rs index f84b7ab31dc..f84b7ab31dc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_attrs.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_expr.rast index ac5a7170315..ac5a7170315 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_expr.rs index e4f774280ca..e4f774280ca 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_field_attrs.rast index 1699602f4fb..1699602f4fb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_field_attrs.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_field_attrs.rs index 648ffe56548..648ffe56548 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_field_attrs.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast index 1a01e0f6938..1a01e0f6938 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rs index fbd7f48f66b..fbd7f48f66b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast index 55baf2fdcb4..55baf2fdcb4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rs index 8ec6f4ca93e..8ec6f4ca93e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct.rast index 6e5f6c2d2bb..6e5f6c2d2bb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct.rs index b4e05717ed3..b4e05717ed3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct_where.rast index aab77416570..aab77416570 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct_where.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct_where.rs index a602e001827..a602e001827 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_struct_where.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_alias.rast index c5da799741f..c5da799741f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_alias.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_alias.rs index 04c0344fa36..04c0344fa36 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_alias.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_type_params.rast index 60ac3b3c4af..60ac3b3c4af 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_type_params.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_type_params.rs index defd110c492..defd110c492 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_type_params.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause.rast index 31c87d1b309..31c87d1b309 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause.rs index 2d30e852187..2d30e852187 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause_deprecated.rast index 7210b738958..7210b738958 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause_deprecated.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause_deprecated.rs index a602d07f03b..a602d07f03b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause_deprecated.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param.rast index becb77e042f..becb77e042f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param.rs index b250bc6bf07..b250bc6bf07 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast index dee860c2418..dee860c2418 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs index 5da3083b9c5..5da3083b9c5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_default.rast index cf7236f62e6..cf7236f62e6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_default.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_default.rs index 540eacb0277..540eacb0277 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_param_default.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rast index 297f7575ca6..297f7575ca6 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rs index ebe26834d88..ebe26834d88 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/typepathfn_with_coloncolon.rast index 67277d0639a..67277d0639a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/typepathfn_with_coloncolon.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/typepathfn_with_coloncolon.rs index 8c54f6704b3..8c54f6704b3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/typepathfn_with_coloncolon.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unary_expr.rast index 525b2674587..525b2674587 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unary_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unary_expr.rs index f1c3f7118b2..f1c3f7118b2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unary_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/union_item.rast index af608fc4acb..af608fc4acb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/union_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/union_item.rs index 5edf50de3bd..5edf50de3bd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/union_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_struct.rast index 438dea6f408..438dea6f408 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_struct.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_struct.rs index 28377c2760b..28377c2760b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_struct.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_type.rast index b3df3153546..b3df3153546 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_type.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_type.rs index c039cf7d324..c039cf7d324 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/unit_type.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_item.rast index 25761ed8c77..25761ed8c77 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_item.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_item.rs index 48ac87b14a0..48ac87b14a0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_item.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree.rast index 98231cdc217..98231cdc217 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree.rs index 3cc39434820..3cc39434820 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_abs_star.rast index b4dc1f25d9a..b4dc1f25d9a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_abs_star.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_abs_star.rs index caae0ba026a..caae0ba026a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_abs_star.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_alias.rast index ef0dd6ba11c..ef0dd6ba11c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_alias.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_alias.rs index 19a6906a268..19a6906a268 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_alias.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_list.rast index f3b1129f277..f3b1129f277 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_list.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_list.rs index 6fa175f5429..6fa175f5429 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_list.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path.rast index ede22dbaf5d..ede22dbaf5d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path.rs index 5b22f88523b..5b22f88523b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_star.rast index d255adb5a47..d255adb5a47 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_star.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_star.rs index dd601cffe5d..dd601cffe5d 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_star.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_use_tree.rast index ed3cafae13b..ed3cafae13b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_use_tree.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_use_tree.rs index c3086f51a2e..c3086f51a2e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_path_use_tree.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_star.rast index 8662423f5d2..8662423f5d2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_star.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_star.rs index b8c613440d5..b8c613440d5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_star.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/value_parameters_no_patterns.rast index 902b06484c8..902b06484c8 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/value_parameters_no_patterns.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/value_parameters_no_patterns.rs index 93636e926e1..93636e926e1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/value_parameters_no_patterns.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/variant_discriminant.rast index 9f0c5a76108..9f0c5a76108 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/variant_discriminant.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/variant_discriminant.rs index c8c5c0f174e..c8c5c0f174e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/variant_discriminant.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_clause.rast index a3cbe457e1a..a3cbe457e1a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_clause.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_clause.rs index 19d7e571b0c..19d7e571b0c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_clause.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast index 8407e99f614..8407e99f614 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rs index 423bc105bd7..423bc105bd7 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/while_expr.rast index 16c522414af..16c522414af 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/while_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/while_expr.rs index 2f818816025..2f818816025 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/while_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yeet_expr.rast index 24931bfcd7c..24931bfcd7c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yeet_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yeet_expr.rs index 624f86c9dc0..624f86c9dc0 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yeet_expr.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yield_expr.rast index 31aa58de299..31aa58de299 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yield_expr.rast diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yield_expr.rs index 596e221f741..596e221f741 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yield_expr.rs diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml index 7f633d91ecc..345fb9f8ae9 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml @@ -28,6 +28,7 @@ span.workspace = true # InternIds for the syntax context base-db.workspace = true la-arena.workspace = true +intern.workspace = true [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 3a915e668bb..54c1475b8b1 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -13,7 +13,6 @@ use base_db::Env; use paths::{AbsPath, AbsPathBuf}; use span::Span; use std::{fmt, io, sync::Arc}; -use tt::SmolStr; use serde::{Deserialize, Serialize}; @@ -66,7 +65,7 @@ impl MacroDylib { pub struct ProcMacro { process: Arc<ProcMacroProcessSrv>, dylib_path: Arc<AbsPathBuf>, - name: SmolStr, + name: Box<str>, kind: ProcMacroKind, } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs index fa3ba9bbfcd..6a99b5ed1cc 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs @@ -19,8 +19,10 @@ pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; pub const HAS_GLOBAL_SPANS: u32 = 3; pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; +/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field +pub const EXTENDED_LEAF_DATA: u32 = 5; -pub const CURRENT_API_VERSION: u32 = RUST_ANALYZER_SPAN_SUPPORT; +pub const CURRENT_API_VERSION: u32 = EXTENDED_LEAF_DATA; #[derive(Debug, Serialize, Deserialize)] pub enum Request { @@ -155,7 +157,7 @@ type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) #[cfg(test)] mod tests { - use base_db::FileId; + use intern::{sym, Symbol}; use la_arena::RawIdx; use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; use text_size::{TextRange, TextSize}; @@ -165,46 +167,52 @@ mod tests { fn fixture_token_tree() -> Subtree<Span> { let anchor = SpanAnchor { - file_id: FileId::from_raw(0), + file_id: span::EditionedFileId::new( + span::FileId::from_raw(0xe4e4e), + span::Edition::CURRENT, + ), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), }; let token_trees = Box::new([ TokenTree::Leaf( Ident { - text: "struct".into(), + sym: Symbol::intern("struct"), span: Span { range: TextRange::at(TextSize::new(0), TextSize::of("struct")), anchor, ctx: SyntaxContextId::ROOT, }, + is_raw: tt::IdentIsRaw::No, } .into(), ), TokenTree::Leaf( Ident { - text: "Foo".into(), + sym: Symbol::intern("Foo"), span: Span { - range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), + range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")), anchor, ctx: SyntaxContextId::ROOT, }, + is_raw: tt::IdentIsRaw::Yes, } .into(), ), TokenTree::Leaf(Leaf::Literal(Literal { - text: "Foo".into(), - + symbol: Symbol::intern("Foo"), span: Span { - range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), + range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")), anchor, ctx: SyntaxContextId::ROOT, }, + kind: tt::LitKind::Str, + suffix: None, })), TokenTree::Leaf(Leaf::Punct(Punct { char: '@', span: Span { - range: TextRange::at(TextSize::new(11), TextSize::of('@')), + range: TextRange::at(TextSize::new(13), TextSize::of('@')), anchor, ctx: SyntaxContextId::ROOT, }, @@ -213,18 +221,27 @@ mod tests { TokenTree::Subtree(Subtree { delimiter: Delimiter { open: Span { - range: TextRange::at(TextSize::new(12), TextSize::of('{')), + range: TextRange::at(TextSize::new(14), TextSize::of('{')), anchor, ctx: SyntaxContextId::ROOT, }, close: Span { - range: TextRange::at(TextSize::new(13), TextSize::of('}')), + range: TextRange::at(TextSize::new(19), TextSize::of('}')), anchor, ctx: SyntaxContextId::ROOT, }, kind: DelimiterKind::Brace, }, - token_trees: Box::new([]), + token_trees: Box::new([TokenTree::Leaf(Leaf::Literal(Literal { + symbol: sym::INTEGER_0.clone(), + span: Span { + range: TextRange::at(TextSize::new(15), TextSize::of("0u32")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + kind: tt::LitKind::Integer, + suffix: Some(sym::u32.clone()), + }))]), }), ]); @@ -236,7 +253,7 @@ mod tests { ctx: SyntaxContextId::ROOT, }, close: Span { - range: TextRange::empty(TextSize::new(13)), + range: TextRange::empty(TextSize::new(19)), anchor, ctx: SyntaxContextId::ROOT, }, @@ -249,32 +266,35 @@ mod tests { #[test] fn test_proc_macro_rpc_works() { let tt = fixture_token_tree(); - let mut span_data_table = Default::default(); - let task = ExpandMacro { - data: ExpandMacroData { - macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), - macro_name: Default::default(), - attributes: None, - has_global_spans: ExpnGlobals { - serialize: true, - def_site: 0, - call_site: 0, - mixed_site: 0, + for v in RUST_ANALYZER_SPAN_SUPPORT..=CURRENT_API_VERSION { + let mut span_data_table = Default::default(); + let task = ExpandMacro { + data: ExpandMacroData { + macro_body: FlatTree::new(&tt, v, &mut span_data_table), + macro_name: Default::default(), + attributes: None, + has_global_spans: ExpnGlobals { + serialize: true, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, + span_data_table: Vec::new(), }, - span_data_table: Vec::new(), - }, - lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(), - env: Default::default(), - current_dir: Default::default(), - }; + lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(), + env: Default::default(), + current_dir: Default::default(), + }; - let json = serde_json::to_string(&task).unwrap(); - // println!("{}", json); - let back: ExpandMacro = serde_json::from_str(&json).unwrap(); + let json = serde_json::to_string(&task).unwrap(); + // println!("{}", json); + let back: ExpandMacro = serde_json::from_str(&json).unwrap(); - assert_eq!( - tt, - back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table) - ); + assert_eq!( + tt, + back.data.macro_body.to_subtree_resolved(v, &span_data_table), + "version: {v}" + ); + } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs index 11fd7596f2b..a8661f59b28 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs @@ -37,13 +37,14 @@ use std::collections::VecDeque; +use intern::Symbol; use la_arena::RawIdx; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; -use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; use text_size::TextRange; -use crate::msg::ENCODE_CLOSE_SPAN_VERSION; +use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}; pub type SpanDataIndexMap = indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>; @@ -52,7 +53,7 @@ pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> { map.iter() .flat_map(|span| { [ - span.anchor.file_id.index(), + span.anchor.file_id.as_u32(), span.anchor.ast_id.into_raw().into_u32(), span.range.start().into(), span.range.end().into(), @@ -69,7 +70,7 @@ pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { let &[file_id, ast_id, start, end, e] = span else { unreachable!() }; Span { anchor: SpanAnchor { - file_id: FileId::from_raw(file_id), + file_id: EditionedFileId::from_raw(file_id), ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)), }, range: TextRange::new(start.into(), end.into()), @@ -108,6 +109,8 @@ struct SubtreeRepr { struct LiteralRepr { id: TokenId, text: u32, + suffix: u32, + kind: u16, } struct PunctRepr { @@ -119,6 +122,7 @@ struct PunctRepr { struct IdentRepr { id: TokenId, text: u32, + is_raw: bool, } impl FlatTree { @@ -138,6 +142,7 @@ impl FlatTree { ident: Vec::new(), token_tree: Vec::new(), text: Vec::new(), + version, }; w.write(subtree); @@ -147,9 +152,17 @@ impl FlatTree { } else { write_vec(w.subtree, SubtreeRepr::write) }, - literal: write_vec(w.literal, LiteralRepr::write), + literal: if version >= EXTENDED_LEAF_DATA { + write_vec(w.literal, LiteralRepr::write_with_kind) + } else { + write_vec(w.literal, LiteralRepr::write) + }, punct: write_vec(w.punct, PunctRepr::write), - ident: write_vec(w.ident, IdentRepr::write), + ident: if version >= EXTENDED_LEAF_DATA { + write_vec(w.ident, IdentRepr::write_with_rawness) + } else { + write_vec(w.ident, IdentRepr::write) + }, token_tree: w.token_tree, text: w.text, } @@ -167,6 +180,7 @@ impl FlatTree { ident: Vec::new(), token_tree: Vec::new(), text: Vec::new(), + version, }; w.write(subtree); @@ -176,9 +190,17 @@ impl FlatTree { } else { write_vec(w.subtree, SubtreeRepr::write) }, - literal: write_vec(w.literal, LiteralRepr::write), + literal: if version >= EXTENDED_LEAF_DATA { + write_vec(w.literal, LiteralRepr::write_with_kind) + } else { + write_vec(w.literal, LiteralRepr::write) + }, punct: write_vec(w.punct, PunctRepr::write), - ident: write_vec(w.ident, IdentRepr::write), + ident: if version >= EXTENDED_LEAF_DATA { + write_vec(w.ident, IdentRepr::write_with_rawness) + } else { + write_vec(w.ident, IdentRepr::write) + }, token_tree: w.token_tree, text: w.text, } @@ -195,12 +217,21 @@ impl FlatTree { } else { read_vec(self.subtree, SubtreeRepr::read) }, - literal: read_vec(self.literal, LiteralRepr::read), + literal: if version >= EXTENDED_LEAF_DATA { + read_vec(self.literal, LiteralRepr::read_with_kind) + } else { + read_vec(self.literal, LiteralRepr::read) + }, punct: read_vec(self.punct, PunctRepr::read), - ident: read_vec(self.ident, IdentRepr::read), + ident: if version >= EXTENDED_LEAF_DATA { + read_vec(self.ident, IdentRepr::read_with_rawness) + } else { + read_vec(self.ident, IdentRepr::read) + }, token_tree: self.token_tree, text: self.text, span_data_table, + version, } .read() } @@ -212,12 +243,21 @@ impl FlatTree { } else { read_vec(self.subtree, SubtreeRepr::read) }, - literal: read_vec(self.literal, LiteralRepr::read), + literal: if version >= EXTENDED_LEAF_DATA { + read_vec(self.literal, LiteralRepr::read_with_kind) + } else { + read_vec(self.literal, LiteralRepr::read) + }, punct: read_vec(self.punct, PunctRepr::read), - ident: read_vec(self.ident, IdentRepr::read), + ident: if version >= EXTENDED_LEAF_DATA { + read_vec(self.ident, IdentRepr::read_with_rawness) + } else { + read_vec(self.ident, IdentRepr::read) + }, token_tree: self.token_tree, text: self.text, span_data_table: &(), + version, } .read() } @@ -280,14 +320,20 @@ impl LiteralRepr { [self.id.0, self.text] } fn read([id, text]: [u32; 2]) -> LiteralRepr { - LiteralRepr { id: TokenId(id), text } + LiteralRepr { id: TokenId(id), text, kind: 0, suffix: !0 } + } + fn write_with_kind(self) -> [u32; 4] { + [self.id.0, self.text, self.kind as u32, self.suffix] + } + fn read_with_kind([id, text, kind, suffix]: [u32; 4]) -> LiteralRepr { + LiteralRepr { id: TokenId(id), text, kind: kind as u16, suffix } } } impl PunctRepr { fn write(self) -> [u32; 3] { let spacing = match self.spacing { - tt::Spacing::Alone => 0, + tt::Spacing::Alone | tt::Spacing::JointHidden => 0, tt::Spacing::Joint => 1, }; [self.id.0, self.char as u32, spacing] @@ -307,7 +353,13 @@ impl IdentRepr { [self.id.0, self.text] } fn read(data: [u32; 2]) -> IdentRepr { - IdentRepr { id: TokenId(data[0]), text: data[1] } + IdentRepr { id: TokenId(data[0]), text: data[1], is_raw: false } + } + fn write_with_rawness(self) -> [u32; 3] { + [self.id.0, self.text, self.is_raw as u32] + } + fn read_with_rawness([id, text, is_raw]: [u32; 3]) -> IdentRepr { + IdentRepr { id: TokenId(id), text, is_raw: is_raw == 1 } } } @@ -339,8 +391,9 @@ impl InternableSpan for Span { struct Writer<'a, 'span, S: InternableSpan> { work: VecDeque<(usize, &'a tt::Subtree<S>)>, - string_table: FxHashMap<&'a str, u32>, + string_table: FxHashMap<std::borrow::Cow<'a, str>, u32>, span_data_table: &'span mut S::Table, + version: u32, subtree: Vec<SubtreeRepr>, literal: Vec<LiteralRepr>, @@ -378,9 +431,33 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { tt::TokenTree::Leaf(leaf) => match leaf { tt::Leaf::Literal(lit) => { let idx = self.literal.len() as u32; - let text = self.intern(&lit.text); let id = self.token_id_of(lit.span); - self.literal.push(LiteralRepr { id, text }); + let (text, suffix) = if self.version >= EXTENDED_LEAF_DATA { + ( + self.intern(lit.symbol.as_str()), + lit.suffix.as_ref().map(|s| self.intern(s.as_str())).unwrap_or(!0), + ) + } else { + (self.intern_owned(format!("{lit}")), !0) + }; + self.literal.push(LiteralRepr { + id, + text, + kind: u16::from_le_bytes(match lit.kind { + tt::LitKind::Err(_) => [0, 0], + tt::LitKind::Byte => [1, 0], + tt::LitKind::Char => [2, 0], + tt::LitKind::Integer => [3, 0], + tt::LitKind::Float => [4, 0], + tt::LitKind::Str => [5, 0], + tt::LitKind::StrRaw(r) => [6, r], + tt::LitKind::ByteStr => [7, 0], + tt::LitKind::ByteStrRaw(r) => [8, r], + tt::LitKind::CStr => [9, 0], + tt::LitKind::CStrRaw(r) => [10, r], + }), + suffix, + }); idx << 2 | 0b01 } tt::Leaf::Punct(punct) => { @@ -391,9 +468,15 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { } tt::Leaf::Ident(ident) => { let idx = self.ident.len() as u32; - let text = self.intern(&ident.text); let id = self.token_id_of(ident.span); - self.ident.push(IdentRepr { id, text }); + let text = if self.version >= EXTENDED_LEAF_DATA { + self.intern(ident.sym.as_str()) + } else if ident.is_raw.yes() { + self.intern_owned(format!("r#{}", ident.sym.as_str(),)) + } else { + self.intern(ident.sym.as_str()) + }; + self.ident.push(IdentRepr { id, text, is_raw: ident.is_raw.yes() }); idx << 2 | 0b11 } }, @@ -415,15 +498,25 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { pub(crate) fn intern(&mut self, text: &'a str) -> u32 { let table = &mut self.text; - *self.string_table.entry(text).or_insert_with(|| { + *self.string_table.entry(text.into()).or_insert_with(|| { let idx = table.len(); table.push(text.to_owned()); idx as u32 }) } + + pub(crate) fn intern_owned(&mut self, text: String) -> u32 { + let table = &mut self.text; + *self.string_table.entry(text.clone().into()).or_insert_with(|| { + let idx = table.len(); + table.push(text); + idx as u32 + }) + } } struct Reader<'span, S: InternableSpan> { + version: u32, subtree: Vec<SubtreeRepr>, literal: Vec<LiteralRepr>, punct: Vec<PunctRepr>, @@ -457,10 +550,38 @@ impl<'span, S: InternableSpan> Reader<'span, S> { // that this unwrap doesn't fire. 0b00 => res[idx].take().unwrap().into(), 0b01 => { + use tt::LitKind::*; let repr = &self.literal[idx]; - tt::Leaf::Literal(tt::Literal { - text: self.text[repr.text as usize].as_str().into(), - span: read_span(repr.id), + let text = self.text[repr.text as usize].as_str(); + let span = read_span(repr.id); + tt::Leaf::Literal(if self.version >= EXTENDED_LEAF_DATA { + tt::Literal { + symbol: Symbol::intern(text), + span, + kind: match u16::to_le_bytes(repr.kind) { + [0, _] => Err(()), + [1, _] => Byte, + [2, _] => Char, + [3, _] => Integer, + [4, _] => Float, + [5, _] => Str, + [6, r] => StrRaw(r), + [7, _] => ByteStr, + [8, r] => ByteStrRaw(r), + [9, _] => CStr, + [10, r] => CStrRaw(r), + _ => unreachable!(), + }, + suffix: if repr.suffix != !0 { + Some(Symbol::intern( + self.text[repr.suffix as usize].as_str(), + )) + } else { + None + }, + } + } else { + tt::token_to_literal(text, span) }) .into() } @@ -475,9 +596,23 @@ impl<'span, S: InternableSpan> Reader<'span, S> { } 0b11 => { let repr = &self.ident[idx]; + let text = self.text[repr.text as usize].as_str(); + let (is_raw, text) = if self.version >= EXTENDED_LEAF_DATA { + ( + if repr.is_raw { + tt::IdentIsRaw::Yes + } else { + tt::IdentIsRaw::No + }, + text, + ) + } else { + tt::IdentIsRaw::split_from_symbol(text) + }; tt::Leaf::Ident(tt::Ident { - text: self.text[repr.text as usize].as_str().into(), + sym: Symbol::intern(text), span: read_span(repr.id), + is_raw, }) .into() } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 735f781c439..673b5bd78a8 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -25,6 +25,7 @@ base-db.workspace = true span.workspace = true proc-macro-api.workspace = true ra-ap-rustc_lexer.workspace = true +intern.workspace = true [dev-dependencies] expect-test = "1.4.0" @@ -34,7 +35,7 @@ proc-macro-test.path = "./proc-macro-test" [features] sysroot-abi = [] -in-rust-tree = ["mbe/in-rust-tree", "sysroot-abi"] +in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index a1707364f3c..749a7760592 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -1,6 +1,5 @@ //! Exports a few trivial procedural macros for testing. - #![feature(proc_macro_span, proc_macro_def_site)] #![allow(clippy::all)] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index e6281035e1a..f0aa6b3f93f 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -130,14 +130,13 @@ impl ProcMacroSrvSpan for TokenId { type Server = server_impl::token_id::TokenIdServer; fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { - Self::Server { interner: &server_impl::SYMBOL_INTERNER, call_site, def_site, mixed_site } + Self::Server { call_site, def_site, mixed_site } } } impl ProcMacroSrvSpan for Span { type Server = server_impl::rust_analyzer_span::RaSpanServer; fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { Self::Server { - interner: &server_impl::SYMBOL_INTERNER, call_site, def_site, mixed_site, diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs index e8b340a43d3..c9a86216905 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs @@ -14,9 +14,9 @@ mod token_stream; pub use token_stream::TokenStream; pub mod rust_analyzer_span; -mod symbol; +// mod symbol; pub mod token_id; -pub use symbol::*; +// pub use symbol::*; use tt::Spacing; fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> { @@ -49,58 +49,39 @@ fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing { #[allow(unused)] fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { match spacing { - Spacing::Alone => proc_macro::Spacing::Alone, + Spacing::Alone | Spacing::JointHidden => proc_macro::Spacing::Alone, Spacing::Joint => proc_macro::Spacing::Joint, } } -/// Invokes the callback with a `&[&str]` consisting of each part of the -/// literal's representation. This is done to allow the `ToString` and -/// `Display` implementations to borrow references to symbol values, and -/// both be optimized to reduce overhead. -fn literal_with_stringify_parts<S, R>( - literal: &bridge::Literal<S, Symbol>, - interner: SymbolInternerRef, - f: impl FnOnce(&[&str]) -> R, -) -> R { - /// Returns a string containing exactly `num` '#' characters. - /// Uses a 256-character source string literal which is always safe to - /// index with a `u8` index. - fn get_hashes_str(num: u8) -> &'static str { - const HASHES: &str = "\ - ################################################################\ - ################################################################\ - ################################################################\ - ################################################################\ - "; - const _: () = assert!(HASHES.len() == 256); - &HASHES[..num as usize] +fn literal_kind_to_external(kind: tt::LitKind) -> bridge::LitKind { + match kind { + tt::LitKind::Byte => bridge::LitKind::Byte, + tt::LitKind::Char => bridge::LitKind::Char, + tt::LitKind::Integer => bridge::LitKind::Integer, + tt::LitKind::Float => bridge::LitKind::Float, + tt::LitKind::Str => bridge::LitKind::Str, + tt::LitKind::StrRaw(r) => bridge::LitKind::StrRaw(r), + tt::LitKind::ByteStr => bridge::LitKind::ByteStr, + tt::LitKind::ByteStrRaw(r) => bridge::LitKind::ByteStrRaw(r), + tt::LitKind::CStr => bridge::LitKind::CStr, + tt::LitKind::CStrRaw(r) => bridge::LitKind::CStrRaw(r), + tt::LitKind::Err(_) => bridge::LitKind::ErrWithGuar, } +} - { - let symbol = &*literal.symbol.text(interner); - let suffix = &*literal.suffix.map(|s| s.text(interner)).unwrap_or_default(); - match literal.kind { - bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]), - bridge::LitKind::Char => f(&["'", symbol, "'", suffix]), - bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]), - bridge::LitKind::StrRaw(n) => { - let hashes = get_hashes_str(n); - f(&["r", hashes, "\"", symbol, "\"", hashes, suffix]) - } - bridge::LitKind::ByteStr => f(&["b\"", symbol, "\"", suffix]), - bridge::LitKind::ByteStrRaw(n) => { - let hashes = get_hashes_str(n); - f(&["br", hashes, "\"", symbol, "\"", hashes, suffix]) - } - bridge::LitKind::CStr => f(&["c\"", symbol, "\"", suffix]), - bridge::LitKind::CStrRaw(n) => { - let hashes = get_hashes_str(n); - f(&["cr", hashes, "\"", symbol, "\"", hashes, suffix]) - } - bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => { - f(&[symbol, suffix]) - } - } +fn literal_kind_to_internal(kind: bridge::LitKind) -> tt::LitKind { + match kind { + bridge::LitKind::Byte => tt::LitKind::Byte, + bridge::LitKind::Char => tt::LitKind::Char, + bridge::LitKind::Str => tt::LitKind::Str, + bridge::LitKind::StrRaw(r) => tt::LitKind::StrRaw(r), + bridge::LitKind::ByteStr => tt::LitKind::ByteStr, + bridge::LitKind::ByteStrRaw(r) => tt::LitKind::ByteStrRaw(r), + bridge::LitKind::CStr => tt::LitKind::CStr, + bridge::LitKind::CStrRaw(r) => tt::LitKind::CStrRaw(r), + bridge::LitKind::Integer => tt::LitKind::Integer, + bridge::LitKind::Float => tt::LitKind::Float, + bridge::LitKind::ErrWithGuar => tt::LitKind::Err(()), } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index bb174ba1b22..8b9eb3beb6e 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -10,13 +10,14 @@ use std::{ ops::{Bound, Range}, }; +use intern::Symbol; use proc_macro::bridge::{self, server}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; use tt::{TextRange, TextSize}; use crate::server_impl::{ - delim_to_external, delim_to_internal, literal_with_stringify_parts, - token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal, + token_stream::TokenStreamBuilder, }; mod tt { pub use tt::*; @@ -36,7 +37,6 @@ pub struct SourceFile; pub struct FreeFunctions; pub struct RaSpanServer { - pub(crate) interner: SymbolInternerRef, // FIXME: Report this back to the caller to track as dependencies pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>, // FIXME: Report this back to the caller to track as dependencies @@ -126,15 +126,10 @@ impl server::FreeFunctions for RaSpanServer { let lit = &lit[start_offset..lit.len() - end_offset]; let suffix = match suffix { "" | "_" => None, - suffix => Some(Symbol::intern(self.interner, suffix)), + suffix => Some(Symbol::intern(suffix)), }; - Ok(bridge::Literal { - kind, - symbol: Symbol::intern(self.interner, lit), - suffix, - span: self.call_site, - }) + Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site }) } fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) { @@ -170,21 +165,25 @@ impl server::TokenStream for RaSpanServer { } bridge::TokenTree::Ident(ident) => { - let text = ident.sym.text(self.interner); - let text = - if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; - let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let text = ident.sym; + let ident: tt::Ident = tt::Ident { + sym: text, + span: ident.span, + is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No }, + }; let leaf = tt::Leaf::from(ident); let tree = tt::TokenTree::from(leaf); Self::TokenStream::from_iter(iter::once(tree)) } bridge::TokenTree::Literal(literal) => { - let text = literal_with_stringify_parts(&literal, self.interner, |parts| { - ::tt::SmolStr::from_iter(parts.iter().copied()) - }); + let literal = tt::Literal { + symbol: literal.symbol, + suffix: literal.suffix, + span: literal.span, + kind: literal_kind_to_internal(literal.kind), + }; - let literal = tt::Literal { text, span: literal.span }; let leaf: tt::Leaf = tt::Leaf::from(literal); let tree = tt::TokenTree::from(leaf); Self::TokenStream::from_iter(iter::once(tree)) @@ -250,23 +249,18 @@ impl server::TokenStream for RaSpanServer { .into_iter() .map(|tree| match tree { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(match ident.text.strip_prefix("r#") { - Some(text) => bridge::Ident { - sym: Symbol::intern(self.interner, text), - is_raw: true, - span: ident.span, - }, - None => bridge::Ident { - sym: Symbol::intern(self.interner, &ident.text), - is_raw: false, - span: ident.span, - }, + bridge::TokenTree::Ident(bridge::Ident { + sym: ident.sym, + is_raw: ident.is_raw.yes(), + span: ident.span, }) } tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { span: lit.span, - ..server::FreeFunctions::literal_from_str(self, &lit.text).unwrap() + kind: literal_kind_to_external(lit.kind), + symbol: lit.symbol, + suffix: lit.suffix, }) } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { @@ -465,12 +459,95 @@ impl server::Server for RaSpanServer { } fn intern_symbol(ident: &str) -> Self::Symbol { - // FIXME: should be `self.interner` once the proc-macro api allows it. - Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + Symbol::intern(ident) } fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { - // FIXME: should be `self.interner` once the proc-macro api allows it. - f(symbol.text(&SYMBOL_INTERNER).as_str()) + f(symbol.as_str()) + } +} + +#[cfg(test)] +mod tests { + use span::{EditionedFileId, FileId, SyntaxContextId}; + + use super::*; + + #[test] + fn test_ra_server_to_string() { + let span = Span { + range: TextRange::empty(TextSize::new(0)), + anchor: span::SpanAnchor { + file_id: EditionedFileId::current_edition(FileId::from_raw(0)), + ast_id: span::ErasedFileAstId::from_raw(0.into()), + }, + ctx: SyntaxContextId::ROOT, + }; + let s = TokenStream { + token_trees: vec![ + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + sym: Symbol::intern("struct"), + span, + is_raw: tt::IdentIsRaw::No, + })), + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + sym: Symbol::intern("T"), + span: span, + is_raw: tt::IdentIsRaw::No, + })), + tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: span, + close: span, + kind: tt::DelimiterKind::Brace, + }, + token_trees: Box::new([]), + }), + ], + }; + + assert_eq!(s.to_string(), "struct T {}"); + } + + #[test] + fn test_ra_server_from_str() { + let span = Span { + range: TextRange::empty(TextSize::new(0)), + anchor: span::SpanAnchor { + file_id: EditionedFileId::current_edition(FileId::from_raw(0)), + ast_id: span::ErasedFileAstId::from_raw(0.into()), + }, + ctx: SyntaxContextId::ROOT, + }; + let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: span, + close: span, + kind: tt::DelimiterKind::Parenthesis, + }, + token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + is_raw: tt::IdentIsRaw::No, + sym: Symbol::intern("a"), + span, + }))]), + }); + + let t1 = TokenStream::from_str("(a)", span).unwrap(); + assert_eq!(t1.token_trees.len(), 1); + assert_eq!(t1.token_trees[0], subtree_paren_a); + + let t2 = TokenStream::from_str("(a);", span).unwrap(); + assert_eq!(t2.token_trees.len(), 2); + assert_eq!(t2.token_trees[0], subtree_paren_a); + + let underscore = TokenStream::from_str("_", span).unwrap(); + assert_eq!( + underscore.token_trees[0], + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + sym: Symbol::intern("_"), + span, + is_raw: tt::IdentIsRaw::No, + })) + ); } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/symbol.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/symbol.rs index 540d06457f2..6863ce95997 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/symbol.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/symbol.rs @@ -1,7 +1,6 @@ //! Symbol interner for proc-macro-srv use std::{cell::RefCell, collections::HashMap, thread::LocalKey}; -use tt::SmolStr; thread_local! { pub(crate) static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default(); diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs index 12edacbe39d..7720c6d83c3 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs @@ -5,11 +5,12 @@ use std::{ ops::{Bound, Range}, }; +use intern::Symbol; use proc_macro::bridge::{self, server}; use crate::server_impl::{ - delim_to_external, delim_to_internal, literal_with_stringify_parts, - token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal, + token_stream::TokenStreamBuilder, }; mod tt { pub use proc_macro_api::msg::TokenId; @@ -25,10 +26,8 @@ mod tt { } type Group = tt::Subtree; type TokenTree = tt::TokenTree; -#[allow(unused)] type Punct = tt::Punct; type Spacing = tt::Spacing; -#[allow(unused)] type Literal = tt::Literal; type Span = tt::TokenId; type TokenStream = crate::server_impl::TokenStream<Span>; @@ -38,7 +37,6 @@ pub struct SourceFile; pub struct FreeFunctions; pub struct TokenIdServer { - pub(crate) interner: SymbolInternerRef, pub call_site: Span, pub def_site: Span, pub mixed_site: Span, @@ -119,15 +117,10 @@ impl server::FreeFunctions for TokenIdServer { let lit = &lit[start_offset..lit.len() - end_offset]; let suffix = match suffix { "" | "_" => None, - suffix => Some(Symbol::intern(self.interner, suffix)), + suffix => Some(Symbol::intern(suffix)), }; - Ok(bridge::Literal { - kind, - symbol: Symbol::intern(self.interner, lit), - suffix, - span: self.call_site, - }) + Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site }) } fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {} @@ -161,21 +154,23 @@ impl server::TokenStream for TokenIdServer { } bridge::TokenTree::Ident(ident) => { - let text = ident.sym.text(self.interner); - let text = - if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; - let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let ident: tt::Ident = tt::Ident { + sym: ident.sym, + span: ident.span, + is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No }, + }; let leaf = tt::Leaf::from(ident); let tree = TokenTree::from(leaf); Self::TokenStream::from_iter(iter::once(tree)) } bridge::TokenTree::Literal(literal) => { - let text = literal_with_stringify_parts(&literal, self.interner, |parts| { - ::tt::SmolStr::from_iter(parts.iter().copied()) - }); - - let literal = tt::Literal { text, span: literal.span }; + let literal = Literal { + symbol: literal.symbol, + suffix: literal.suffix, + span: literal.span, + kind: literal_kind_to_internal(literal.kind), + }; let leaf = tt::Leaf::from(literal); let tree = TokenTree::from(leaf); @@ -183,7 +178,7 @@ impl server::TokenStream for TokenIdServer { } bridge::TokenTree::Punct(p) => { - let punct = tt::Punct { + let punct = Punct { char: p.ch as char, spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, span: p.span, @@ -238,16 +233,17 @@ impl server::TokenStream for TokenIdServer { .map(|tree| match tree { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { bridge::TokenTree::Ident(bridge::Ident { - sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), - is_raw: ident.text.starts_with("r#"), + sym: ident.sym, + is_raw: ident.is_raw.yes(), span: ident.span, }) } tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { span: lit.span, - ..server::FreeFunctions::literal_from_str(self, &lit.text) - .unwrap_or_else(|_| panic!("`{}`", lit.text)) + kind: literal_kind_to_external(lit.kind), + symbol: lit.symbol, + suffix: lit.suffix, }) } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { @@ -364,11 +360,11 @@ impl server::Server for TokenIdServer { } fn intern_symbol(ident: &str) -> Self::Symbol { - Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + Symbol::intern(ident) } fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { - f(symbol.text(&SYMBOL_INTERNER).as_str()) + f(symbol.as_str()) } } @@ -381,12 +377,14 @@ mod tests { let s = TokenStream { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "struct".into(), + sym: Symbol::intern("struct"), span: tt::TokenId(0), + is_raw: tt::IdentIsRaw::No, })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "T".into(), + sym: Symbol::intern("T"), span: tt::TokenId(0), + is_raw: tt::IdentIsRaw::No, })), tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { @@ -411,7 +409,8 @@ mod tests { kind: tt::DelimiterKind::Parenthesis, }, token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "a".into(), + is_raw: tt::IdentIsRaw::No, + sym: Symbol::intern("a"), span: tt::TokenId(0), }))]), }); @@ -428,8 +427,9 @@ mod tests { assert_eq!( underscore.token_trees[0], tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "_".into(), + sym: Symbol::intern("_"), span: tt::TokenId(0), + is_raw: tt::IdentIsRaw::No, })) ); } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs index b1a448427c6..cdf93fa4251 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs @@ -127,7 +127,8 @@ pub(super) mod token_stream { impl<S: Copy + fmt::Debug> TokenStream<S> { pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> { let subtree = - mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?; + mbe::parse_to_token_tree_static_span(span::Edition::CURRENT_FIXME, call_site, src) + .ok_or("lexing error")?; Ok(TokenStream::with_subtree(subtree)) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs index 63342825380..dc6e71163b2 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs @@ -21,20 +21,20 @@ fn test_derive_error() { assert_expand( "DeriveError", r#"struct S;"#, - expect![[r##" + expect![[r#" SUBTREE $$ 1 1 IDENT compile_error 1 PUNCH ! [alone] 1 SUBTREE () 1 1 - LITERAL "#[derive(DeriveError)] struct S ;"1 - PUNCH ; [alone] 1"##]], - expect![[r##" + LITERAL Str #[derive(DeriveError)] struct S ; 1 + PUNCH ; [alone] 1"#]], + expect![[r#" SUBTREE $$ 42:2@0..100#0 42:2@0..100#0 IDENT compile_error 42:2@0..100#0 PUNCH ! [alone] 42:2@0..100#0 SUBTREE () 42:2@0..100#0 42:2@0..100#0 - LITERAL "#[derive(DeriveError)] struct S ;"42:2@0..100#0 - PUNCH ; [alone] 42:2@0..100#0"##]], + LITERAL Str #[derive(DeriveError)] struct S ; 42:2@0..100#0 + PUNCH ; [alone] 42:2@0..100#0"#]], ); } @@ -47,18 +47,18 @@ fn test_fn_like_macro_noop() { SUBTREE $$ 1 1 IDENT ident 1 PUNCH , [alone] 1 - LITERAL 01 + LITERAL Integer 0 1 PUNCH , [alone] 1 - LITERAL 11 + LITERAL Integer 1 1 PUNCH , [alone] 1 SUBTREE [] 1 1"#]], expect![[r#" SUBTREE $$ 42:2@0..100#0 42:2@0..100#0 IDENT ident 42:2@0..5#0 PUNCH , [alone] 42:2@5..6#0 - LITERAL 042:2@7..8#0 + LITERAL Integer 0 42:2@7..8#0 PUNCH , [alone] 42:2@8..9#0 - LITERAL 142:2@10..11#0 + LITERAL Integer 1 42:2@10..11#0 PUNCH , [alone] 42:2@11..12#0 SUBTREE [] 42:2@13..14#0 42:2@14..15#0"#]], ); @@ -135,22 +135,22 @@ fn test_fn_like_mk_literals() { r#""#, expect![[r#" SUBTREE $$ 1 1 - LITERAL b"byte_string"1 - LITERAL 'c'1 - LITERAL "string"1 - LITERAL 3.14f641 - LITERAL 3.141 - LITERAL 123i641 - LITERAL 1231"#]], + LITERAL ByteStr byte_string 1 + LITERAL Char c 1 + LITERAL Str string 1 + LITERAL Float 3.14f64 1 + LITERAL Float 3.14 1 + LITERAL Integer 123i64 1 + LITERAL Integer 123 1"#]], expect![[r#" SUBTREE $$ 42:2@0..100#0 42:2@0..100#0 - LITERAL b"byte_string"42:2@0..100#0 - LITERAL 'c'42:2@0..100#0 - LITERAL "string"42:2@0..100#0 - LITERAL 3.14f6442:2@0..100#0 - LITERAL 3.1442:2@0..100#0 - LITERAL 123i6442:2@0..100#0 - LITERAL 12342:2@0..100#0"#]], + LITERAL ByteStr byte_string 42:2@0..100#0 + LITERAL Char c 42:2@0..100#0 + LITERAL Str string 42:2@0..100#0 + LITERAL Float 3.14f64 42:2@0..100#0 + LITERAL Float 3.14 42:2@0..100#0 + LITERAL Integer 123i64 42:2@0..100#0 + LITERAL Integer 123 42:2@0..100#0"#]], ); } @@ -175,50 +175,50 @@ fn test_fn_like_macro_clone_literals() { assert_expand( "fn_like_clone_tokens", r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###, - expect![[r###" + expect![[r#" SUBTREE $$ 1 1 - LITERAL 1u161 + LITERAL Integer 1u16 1 PUNCH , [alone] 1 - LITERAL 2_u321 + LITERAL Integer 2_u32 1 PUNCH , [alone] 1 PUNCH - [alone] 1 - LITERAL 4i641 + LITERAL Integer 4i64 1 PUNCH , [alone] 1 - LITERAL 3.14f321 + LITERAL Float 3.14f32 1 PUNCH , [alone] 1 - LITERAL "hello bridge"1 + LITERAL Str hello bridge 1 PUNCH , [alone] 1 - LITERAL "suffixed"suffix1 + LITERAL Str suffixedsuffix 1 PUNCH , [alone] 1 - LITERAL r##"raw"##1 + LITERAL StrRaw(2) raw 1 PUNCH , [alone] 1 - LITERAL 'a'1 + LITERAL Char a 1 PUNCH , [alone] 1 - LITERAL b'b'1 + LITERAL Byte b 1 PUNCH , [alone] 1 - LITERAL c"null"1"###]], - expect![[r###" + LITERAL CStr null 1"#]], + expect![[r#" SUBTREE $$ 42:2@0..100#0 42:2@0..100#0 - LITERAL 1u1642:2@0..4#0 + LITERAL Integer 1u16 42:2@0..4#0 PUNCH , [alone] 42:2@4..5#0 - LITERAL 2_u3242:2@6..11#0 + LITERAL Integer 2_u32 42:2@6..11#0 PUNCH , [alone] 42:2@11..12#0 PUNCH - [alone] 42:2@13..14#0 - LITERAL 4i6442:2@14..18#0 + LITERAL Integer 4i64 42:2@14..18#0 PUNCH , [alone] 42:2@18..19#0 - LITERAL 3.14f3242:2@20..27#0 + LITERAL Float 3.14f32 42:2@20..27#0 PUNCH , [alone] 42:2@27..28#0 - LITERAL "hello bridge"42:2@29..43#0 + LITERAL Str hello bridge 42:2@29..43#0 PUNCH , [alone] 42:2@43..44#0 - LITERAL "suffixed"suffix42:2@45..61#0 + LITERAL Str suffixedsuffix 42:2@45..61#0 PUNCH , [alone] 42:2@61..62#0 - LITERAL r##"raw"##42:2@63..73#0 + LITERAL StrRaw(2) raw 42:2@63..73#0 PUNCH , [alone] 42:2@73..74#0 - LITERAL 'a'42:2@75..78#0 + LITERAL Char a 42:2@75..78#0 PUNCH , [alone] 42:2@78..79#0 - LITERAL b'b'42:2@80..84#0 + LITERAL Byte b 42:2@80..84#0 PUNCH , [alone] 42:2@84..85#0 - LITERAL c"null"42:2@86..93#0"###]], + LITERAL CStr null 42:2@86..93#0"#]], ); } @@ -231,20 +231,20 @@ fn test_attr_macro() { "attr_error", r#"mod m {}"#, r#"some arguments"#, - expect![[r##" + expect![[r#" SUBTREE $$ 1 1 IDENT compile_error 1 PUNCH ! [alone] 1 SUBTREE () 1 1 - LITERAL "#[attr_error(some arguments)] mod m {}"1 - PUNCH ; [alone] 1"##]], - expect![[r##" + LITERAL Str #[attr_error(some arguments)] mod m {} 1 + PUNCH ; [alone] 1"#]], + expect![[r#" SUBTREE $$ 42:2@0..100#0 42:2@0..100#0 IDENT compile_error 42:2@0..100#0 PUNCH ! [alone] 42:2@0..100#0 SUBTREE () 42:2@0..100#0 42:2@0..100#0 - LITERAL "#[attr_error(some arguments)] mod m {}"42:2@0..100#0 - PUNCH ; [alone] 42:2@0..100#0"##]], + LITERAL Str #[attr_error(some arguments)] mod m {} 42:2@0..100#0 + PUNCH ; [alone] 42:2@0..100#0"#]], ); } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 03b1117a5bd..70eff51cade 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -2,14 +2,14 @@ use expect_test::Expect; use proc_macro_api::msg::TokenId; -use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use span::{EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; use tt::TextRange; use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv}; fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree_static_span(call_site, src).unwrap(), + mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(), ) } @@ -19,7 +19,7 @@ fn parse_string_spanned( src: &str, ) -> crate::server_impl::TokenStream<Span> { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree(anchor, call_site, src).unwrap(), + mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(), ) } @@ -68,7 +68,7 @@ fn assert_expand_impl( let def_site = Span { range: TextRange::new(0.into(), 150.into()), anchor: SpanAnchor { - file_id: FileId::from_raw(41), + file_id: EditionedFileId::current_edition(FileId::from_raw(41)), ast_id: ErasedFileAstId::from_raw(From::from(1)), }, ctx: SyntaxContextId::ROOT, @@ -76,7 +76,7 @@ fn assert_expand_impl( let call_site = Span { range: TextRange::new(0.into(), 100.into()), anchor: SpanAnchor { - file_id: FileId::from_raw(42), + file_id: EditionedFileId::current_edition(FileId::from_raw(42)), ast_id: ErasedFileAstId::from_raw(From::from(2)), }, ctx: SyntaxContextId::ROOT, diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 097ee1f75cd..8b34bd3fad1 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -25,6 +25,7 @@ itertools.workspace = true # local deps base-db.workspace = true +intern.workspace = true span.workspace = true cfg.workspace = true paths = { workspace = true, features = ["serde1"] } diff --git a/src/tools/rust-analyzer/crates/project-model/src/cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/cfg.rs index b409bc1ce7a..e921e3de722 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cfg.rs @@ -4,6 +4,7 @@ use std::{fmt, str::FromStr}; use cfg::{CfgDiff, CfgOptions}; +use intern::Symbol; use rustc_hash::FxHashMap; use serde::Serialize; @@ -44,8 +45,10 @@ impl Extend<CfgFlag> for CfgOptions { fn extend<T: IntoIterator<Item = CfgFlag>>(&mut self, iter: T) { for cfg_flag in iter { match cfg_flag { - CfgFlag::Atom(it) => self.insert_atom(it.into()), - CfgFlag::KeyValue { key, value } => self.insert_key_value(key.into(), value.into()), + CfgFlag::Atom(it) => self.insert_atom(Symbol::intern(&it)), + CfgFlag::KeyValue { key, value } => { + self.insert_key_value(Symbol::intern(&key), Symbol::intern(&value)) + } } } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index 88fb10a68c6..049acc290bb 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -75,14 +75,26 @@ pub(crate) fn cargo_config_env( } // if successful we receive `env.key.value = "value" per entry tracing::debug!("Discovering cargo config env by {:?}", cargo_config); - utf8_stdout(cargo_config).map(parse_output_cargo_config_env).unwrap_or_default() + utf8_stdout(cargo_config) + .map(parse_output_cargo_config_env) + .inspect(|env| { + tracing::debug!("Discovered cargo config env: {:?}", env); + }) + .inspect_err(|err| { + tracing::debug!("Failed to discover cargo config env: {:?}", err); + }) + .unwrap_or_default() } fn parse_output_cargo_config_env(stdout: String) -> FxHashMap<String, String> { stdout .lines() .filter_map(|l| l.strip_prefix("env.")) - .filter_map(|l| l.split_once(".value = ")) + .filter_map(|l| { + l.split_once(" = ") + // cargo used to report it with this, keep it for a couple releases around + .or_else(|| l.split_once(".value = ")) + }) .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned())) .collect() } diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index 4a916e570be..cf0a6ad4025 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -224,6 +224,7 @@ impl ProjectJson { Crate { display_name: crate_data .display_name + .as_deref() .map(CrateDisplayName::from_canonical_name), root_module, edition: crate_data.edition.into(), @@ -275,17 +276,32 @@ impl ProjectJson { self.manifest.as_ref() } + pub fn crate_by_buildfile(&self, path: &AbsPath) -> Option<Build> { + // this is fast enough for now, but it's unfortunate that this is O(crates). + let path: &std::path::Path = path.as_ref(); + self.crates + .iter() + .filter(|krate| krate.is_workspace_member) + .filter_map(|krate| krate.build.clone()) + .find(|build| build.build_file.as_std_path() == path) + } + /// Returns the path to the project's manifest or root folder, if no manifest exists. pub fn manifest_or_root(&self) -> &AbsPath { self.manifest.as_ref().map_or(&self.project_root, |manifest| manifest.as_ref()) } + /// Returns the path to the project's root folder. + pub fn project_root(&self) -> &AbsPath { + &self.project_root + } + pub fn runnables(&self) -> &[Runnable] { &self.runnables } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] pub struct ProjectJsonData { sysroot: Option<Utf8PathBuf>, sysroot_src: Option<Utf8PathBuf>, @@ -294,7 +310,7 @@ pub struct ProjectJsonData { runnables: Vec<RunnableData>, } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] struct CrateData { display_name: Option<String>, root_module: Utf8PathBuf, @@ -318,7 +334,7 @@ struct CrateData { build: Option<BuildData>, } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] #[serde(rename = "edition")] enum EditionData { #[serde(rename = "2015")] @@ -331,7 +347,7 @@ enum EditionData { Edition2024, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] pub struct BuildData { label: String, build_file: Utf8PathBuf, @@ -418,7 +434,7 @@ pub(crate) struct Dep { pub(crate) name: CrateName, } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] struct CrateSource { include_dirs: Vec<Utf8PathBuf>, exclude_dirs: Vec<Utf8PathBuf>, diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 2762de5997a..8f5457bf99a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -1,12 +1,14 @@ use std::ops::Deref; -use base_db::{CrateGraph, FileId, ProcMacroPaths}; +use base_db::{CrateGraph, ProcMacroPaths}; use cargo_metadata::Metadata; use cfg::{CfgAtom, CfgDiff}; use expect_test::{expect_file, ExpectFile}; +use intern::sym; use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::de::DeserializeOwned; +use span::FileId; use triomphe::Arc; use crate::{ @@ -180,7 +182,7 @@ fn check_crate_graph(crate_graph: CrateGraph, expect: ExpectFile) { #[test] fn cargo_hello_world_project_model_with_wildcard_overrides() { let cfg_overrides = CfgOverrides { - global: CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(), + global: CfgDiff::new(Vec::new(), vec![CfgAtom::Flag(sym::test.clone())]).unwrap(), selective: Default::default(), }; let (crate_graph, _proc_macros) = @@ -199,7 +201,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { global: Default::default(), selective: std::iter::once(( "libc".to_owned(), - CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(), + CfgDiff::new(Vec::new(), vec![CfgAtom::Flag(sym::test.clone())]).unwrap(), )) .collect(), }; diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 5e27ce29873..31d1c77fd07 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -6,14 +6,15 @@ use std::{collections::VecDeque, fmt, fs, iter, sync}; use anyhow::Context; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileId, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; +use intern::{sym, Symbol}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; -use span::Edition; +use span::{Edition, FileId}; use toolchain::Tool; use tracing::instrument; use triomphe::Arc; @@ -30,6 +31,7 @@ use crate::{ utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, }; +use tracing::{debug, error, info}; pub type FileLoader<'a> = &'a mut dyn for<'b> FnMut(&'b AbsPath) -> Option<FileId>; @@ -249,7 +251,7 @@ impl ProjectWorkspace { }; let rustc = rustc_dir.and_then(|rustc_dir| { - tracing::info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source"); + info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source"); match CargoWorkspace::fetch_metadata( &rustc_dir, cargo_toml.parent(), @@ -521,6 +523,14 @@ impl ProjectWorkspace { } } + pub fn workspace_root(&self) -> &AbsPath { + match &self.kind { + ProjectWorkspaceKind::Cargo { cargo, .. } => cargo.workspace_root(), + ProjectWorkspaceKind::Json(project) => project.project_root(), + ProjectWorkspaceKind::DetachedFile { file, .. } => file.parent(), + } + } + pub fn manifest(&self) -> Option<&ManifestPath> { match &self.kind { ProjectWorkspaceKind::Cargo { cargo, .. } => Some(cargo.manifest_path()), @@ -766,9 +776,9 @@ impl ProjectWorkspace { }; if matches!(sysroot.mode(), SysrootMode::Stitched(_)) && crate_graph.patch_cfg_if() { - tracing::debug!("Patched std to depend on cfg-if") + debug!("Patched std to depend on cfg-if") } else { - tracing::debug!("Did not patch std to depend on cfg-if") + debug!("Did not patch std to depend on cfg-if") } (crate_graph, proc_macros) } @@ -893,7 +903,10 @@ fn project_json_to_crate_graph( .collect(); override_cfg.apply( &mut cfg_options, - display_name.as_ref().map(|it| it.canonical_name()).unwrap_or_default(), + display_name + .as_ref() + .map(|it| it.canonical_name().as_str()) + .unwrap_or_default(), ); let crate_graph_crate_id = crate_graph.add_crate_root( file_id, @@ -913,10 +926,18 @@ fn project_json_to_crate_graph( CrateOrigin::Local { repo: None, name: None } }, ); + debug!( + ?crate_graph_crate_id, + crate = display_name.as_ref().map(|name| name.canonical_name().as_str()), + "added root to crate graph" + ); if *is_proc_macro { if let Some(path) = proc_macro_dylib_path.clone() { let node = Ok(( - display_name.as_ref().map(|it| it.canonical_name().to_owned()), + display_name + .as_ref() + .map(|it| it.canonical_name().as_str().to_owned()) + .unwrap_or_else(|| format!("crate{}", idx.0)), path, )); proc_macros.insert(crate_graph_crate_id, node); @@ -927,6 +948,7 @@ fn project_json_to_crate_graph( ) .collect(); + debug!(map = ?idx_to_crate_id); for (from_idx, krate) in project.crates() { if let Some(&from) = idx_to_crate_id.get(&from_idx) { public_deps.add_to_crate_graph(crate_graph, from); @@ -977,8 +999,8 @@ fn cargo_to_crate_graph( if cargo[pkg].is_local { // Add test cfg for local crates - cfg_options.insert_atom("test".into()); - cfg_options.insert_atom("rust_analyzer".into()); + cfg_options.insert_atom(sym::test.clone()); + cfg_options.insert_atom(sym::rust_analyzer.clone()); } override_cfg.apply(&mut cfg_options, &cargo[pkg].name); @@ -1013,12 +1035,12 @@ fn cargo_to_crate_graph( if pkg_data.is_local { CrateOrigin::Local { repo: pkg_data.repository.clone(), - name: Some(pkg_data.name.clone()), + name: Some(Symbol::intern(&pkg_data.name)), } } else { CrateOrigin::Library { repo: pkg_data.repository.clone(), - name: pkg_data.name.clone(), + name: Symbol::intern(&pkg_data.name), } }, ); @@ -1144,21 +1166,19 @@ fn detached_file_to_crate_graph( sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load); let mut cfg_options = CfgOptions::from_iter(rustc_cfg); - cfg_options.insert_atom("test".into()); - cfg_options.insert_atom("rust_analyzer".into()); + cfg_options.insert_atom(sym::test.clone()); + cfg_options.insert_atom(sym::rust_analyzer.clone()); override_cfg.apply(&mut cfg_options, ""); let cfg_options = Arc::new(cfg_options); let file_id = match load(detached_file) { Some(file_id) => file_id, None => { - tracing::error!("Failed to load detached file {:?}", detached_file); + error!("Failed to load detached file {:?}", detached_file); return (crate_graph, FxHashMap::default()); } }; - let display_name = detached_file - .file_stem() - .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned())); + let display_name = detached_file.file_stem().map(CrateDisplayName::from_canonical_name); let detached_file_crate = crate_graph.add_crate_root( file_id, Edition::CURRENT, @@ -1231,7 +1251,7 @@ fn handle_rustc_crates( file_id, &rustc_workspace[tgt].name, kind, - CrateOrigin::Rustc { name: rustc_workspace[pkg].name.clone() }, + CrateOrigin::Rustc { name: Symbol::intern(&rustc_workspace[pkg].name) }, ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -1307,7 +1327,7 @@ fn add_target_crate_root( let cfg_options = { let mut opts = cfg_options; for feature in pkg.active_features.iter() { - opts.insert_key_value("feature".into(), feature.into()); + opts.insert_key_value(sym::feature.clone(), Symbol::intern(feature)); } if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) { opts.extend(cfgs.iter().cloned()); @@ -1328,7 +1348,7 @@ fn add_target_crate_root( let crate_id = crate_graph.add_crate_root( file_id, edition, - Some(CrateDisplayName::from_canonical_name(cargo_name.to_owned())), + Some(CrateDisplayName::from_canonical_name(cargo_name)), Some(pkg.version.to_string()), Arc::new(cfg_options), potential_cfg_options.map(Arc::new), @@ -1338,8 +1358,8 @@ fn add_target_crate_root( ); if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { - Some(it) => it.cloned().map(|path| Ok((Some(cargo_name.to_owned()), path))), - None => Some(Err("crate has not yet been built".to_owned())), + Some(it) => it.cloned().map(|path| Ok((cargo_name.to_owned(), path))), + None => Some(Err("proc-macro crate is missing its build data".to_owned())), }; if let Some(proc_macro) = proc_macro { proc_macros.insert(crate_id, proc_macro); @@ -1349,7 +1369,7 @@ fn add_target_crate_root( crate_id } -#[derive(Default)] +#[derive(Default, Debug)] struct SysrootPublicDeps { deps: Vec<(CrateName, CrateId, bool)>, } @@ -1381,8 +1401,8 @@ fn sysroot_to_crate_graph( &CfgOverrides { global: CfgDiff::new( vec![ - CfgAtom::Flag("debug_assertions".into()), - CfgAtom::Flag("miri".into()), + CfgAtom::Flag(sym::debug_assertions.clone()), + CfgAtom::Flag(sym::miri.clone()), ], vec![], ) @@ -1394,14 +1414,14 @@ fn sysroot_to_crate_graph( let mut pub_deps = vec![]; let mut libproc_macro = None; - let diff = CfgDiff::new(vec![], vec![CfgAtom::Flag("test".into())]).unwrap(); + let diff = CfgDiff::new(vec![], vec![CfgAtom::Flag(sym::test.clone())]).unwrap(); for (cid, c) in cg.iter_mut() { // uninject `test` flag so `core` keeps working. Arc::make_mut(&mut c.cfg_options).apply_diff(diff.clone()); // patch the origin if c.origin.is_local() { let lang_crate = LangCrateOrigin::from( - c.display_name.as_ref().map_or("", |it| it.canonical_name()), + c.display_name.as_ref().map_or("", |it| it.canonical_name().as_str()), ); c.origin = CrateOrigin::Lang(lang_crate); match lang_crate { @@ -1449,8 +1469,8 @@ fn sysroot_to_crate_graph( let cfg_options = Arc::new({ let mut cfg_options = CfgOptions::default(); cfg_options.extend(rustc_cfg); - cfg_options.insert_atom("debug_assertions".into()); - cfg_options.insert_atom("miri".into()); + cfg_options.insert_atom(sym::debug_assertions.clone()); + cfg_options.insert_atom(sym::miri.clone()); cfg_options }); let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = stitched @@ -1458,11 +1478,10 @@ fn sysroot_to_crate_graph( .filter_map(|krate| { let file_id = load(&stitched[krate].root)?; - let display_name = - CrateDisplayName::from_canonical_name(stitched[krate].name.clone()); + let display_name = CrateDisplayName::from_canonical_name(&stitched[krate].name); let crate_id = crate_graph.add_crate_root( file_id, - Edition::CURRENT, + Edition::CURRENT_FIXME, Some(display_name), None, cfg_options.clone(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index 93fb55ede8e..bc1b13a6497 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -54,6 +54,7 @@ hir-def.workspace = true hir-ty.workspace = true hir.workspace = true ide-db.workspace = true +intern.workspace = true # This should only be used in CLI ide-ssr.workspace = true ide.workspace = true diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 1985093bc5c..6a980a153c9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -175,6 +175,7 @@ fn run_server() -> anyhow::Result<()> { return Err(e.into()); } }; + tracing::info!("InitializeParams: {}", initialize_params); let lsp_types::InitializeParams { root_uri, @@ -264,7 +265,10 @@ fn run_server() -> anyhow::Result<()> { return Err(e.into()); } - if !config.has_linked_projects() && config.detached_files().is_empty() { + if config.discover_workspace_config().is_none() + && !config.has_linked_projects() + && config.detached_files().is_empty() + { config.rediscover_workspaces(); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs index 212294b5d32..9610808c27e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs @@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { code_action_provider: Some(config.caps().code_action_capabilities()), code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), document_formatting_provider: Some(OneOf::Left(true)), - document_range_formatting_provider: match config.rustfmt() { + document_range_formatting_provider: match config.rustfmt(None) { RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)), _ => Some(OneOf::Left(false)), }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 90316f3b89d..380105d2c21 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -25,7 +25,7 @@ use ide_db::{ salsa::{self, debug::DebugQueryTable, ParallelDatabase}, SourceDatabase, SourceDatabaseExt, }, - LineIndexDatabase, SnippetCap, + EditionedFileId, LineIndexDatabase, SnippetCap, }; use itertools::Itertools; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; @@ -35,7 +35,7 @@ use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSourc use rayon::prelude::*; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{AstNode, SyntaxNode}; -use vfs::{AbsPathBuf, FileId, Vfs, VfsPath}; +use vfs::{AbsPathBuf, Vfs, VfsPath}; use crate::cli::{ flags::{self, OutputFormat}, @@ -120,7 +120,7 @@ impl flags::AnalysisStats { for file_id in source_root.iter() { if let Some(p) = source_root.path_for_file(&file_id) { if let Some((_, Some("rs"))) = p.name_and_extension() { - db.file_item_tree(file_id.into()); + db.file_item_tree(EditionedFileId::current_edition(file_id).into()); num_item_trees += 1; } } @@ -140,7 +140,7 @@ impl flags::AnalysisStats { let module = krate.root_module(); let file_id = module.definition_source_file_id(db); let file_id = file_id.original_file(db); - let source_root = db.file_source_root(file_id); + let source_root = db.file_source_root(file_id.into()); let source_root = db.source_root(source_root); if !source_root.is_library || self.with_deps { num_crates += 1; @@ -332,7 +332,7 @@ impl flags::AnalysisStats { ws: &ProjectWorkspace, db: &RootDatabase, vfs: &Vfs, - mut file_ids: Vec<FileId>, + mut file_ids: Vec<EditionedFileId>, verbosity: Verbosity, ) { let cargo_config = CargoConfig { @@ -367,11 +367,10 @@ impl flags::AnalysisStats { for &file_id in &file_ids { let sema = hir::Semantics::new(db); - let _ = db.parse(file_id); - let parse = sema.parse(file_id); - let file_txt = db.file_text(file_id); - let path = vfs.file_path(file_id).as_path().unwrap(); + let parse = sema.parse_guess_edition(file_id.into()); + let file_txt = db.file_text(file_id.into()); + let path = vfs.file_path(file_id.into()).as_path().unwrap(); for node in parse.syntax().descendants() { let expr = match syntax::ast::Expr::cast(node.clone()) { @@ -398,7 +397,7 @@ impl flags::AnalysisStats { let range = sema.original_range(expected_tail.syntax()).range; let original_text: String = db - .file_text(file_id) + .file_text(file_id.into()) .chars() .skip(usize::from(range.start())) .take(usize::from(range.end()) - usize::from(range.start())) @@ -423,7 +422,7 @@ impl flags::AnalysisStats { if found_terms.is_empty() { acc.tail_expr_no_term += 1; acc.total_tail_exprs += 1; - // println!("\n{}\n", &original_text); + // println!("\n{original_text}\n"); continue; }; @@ -621,7 +620,7 @@ impl flags::AnalysisStats { module .krate() .display_name(db) - .map(|it| it.canonical_name().to_owned()) + .map(|it| it.canonical_name().as_str().to_owned()) .into_iter() .chain( module @@ -650,7 +649,7 @@ impl flags::AnalysisStats { }; if let Some(src) = source { let original_file = src.file_id.original_file(db); - let path = vfs.file_path(original_file); + let path = vfs.file_path(original_file.into()); let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { @@ -664,8 +663,10 @@ impl flags::AnalysisStats { bar.println(msg()); } bar.set_message(msg); - let (body, sm) = db.body_with_source_map(body_id.into()); + let body = db.body(body_id.into()); let inference_result = db.infer(body_id.into()); + // This query is LRU'd, so actually calling it will skew the timing results. + let sm = || db.body_with_source_map(body_id.into()).1; // region:expressions let (previous_exprs, previous_unknown, previous_partially_unknown) = @@ -676,7 +677,8 @@ impl flags::AnalysisStats { let unknown_or_partial = if ty.is_unknown() { num_exprs_unknown += 1; if verbosity.is_spammy() { - if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm, expr_id) { + if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id) + { bar.println(format!( "{} {}:{}-{}:{}: Unknown type", path, @@ -700,7 +702,7 @@ impl flags::AnalysisStats { }; if self.only.is_some() && verbosity.is_spammy() { // in super-verbose mode for just one function, we print every single expression - if let Some((_, start, end)) = expr_syntax_range(db, vfs, &sm, expr_id) { + if let Some((_, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id) { bar.println(format!( "{}:{}-{}:{}: {}", start.line + 1, @@ -716,14 +718,15 @@ impl flags::AnalysisStats { if unknown_or_partial && self.output == Some(OutputFormat::Csv) { println!( r#"{},type,"{}""#, - location_csv_expr(db, vfs, &sm, expr_id), + location_csv_expr(db, vfs, &sm(), expr_id), ty.display(db) ); } if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { num_expr_type_mismatches += 1; if verbosity.is_verbose() { - if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm, expr_id) { + if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id) + { bar.println(format!( "{} {}:{}-{}:{}: Expected {}, got {}", path, @@ -746,7 +749,7 @@ impl flags::AnalysisStats { if self.output == Some(OutputFormat::Csv) { println!( r#"{},mismatch,"{}","{}""#, - location_csv_expr(db, vfs, &sm, expr_id), + location_csv_expr(db, vfs, &sm(), expr_id), mismatch.expected.display(db), mismatch.actual.display(db) ); @@ -773,7 +776,7 @@ impl flags::AnalysisStats { let unknown_or_partial = if ty.is_unknown() { num_pats_unknown += 1; if verbosity.is_spammy() { - if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm, pat_id) { + if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) { bar.println(format!( "{} {}:{}-{}:{}: Unknown type", path, @@ -797,7 +800,7 @@ impl flags::AnalysisStats { }; if self.only.is_some() && verbosity.is_spammy() { // in super-verbose mode for just one function, we print every single pattern - if let Some((_, start, end)) = pat_syntax_range(db, vfs, &sm, pat_id) { + if let Some((_, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) { bar.println(format!( "{}:{}-{}:{}: {}", start.line + 1, @@ -813,14 +816,14 @@ impl flags::AnalysisStats { if unknown_or_partial && self.output == Some(OutputFormat::Csv) { println!( r#"{},type,"{}""#, - location_csv_pat(db, vfs, &sm, pat_id), + location_csv_pat(db, vfs, &sm(), pat_id), ty.display(db) ); } if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) { num_pat_type_mismatches += 1; if verbosity.is_verbose() { - if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm, pat_id) { + if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) { bar.println(format!( "{} {}:{}-{}:{}: Expected {}, got {}", path, @@ -843,7 +846,7 @@ impl flags::AnalysisStats { if self.output == Some(OutputFormat::Csv) { println!( r#"{},mismatch,"{}","{}""#, - location_csv_pat(db, vfs, &sm, pat_id), + location_csv_pat(db, vfs, &sm(), pat_id), mismatch.expected.display(db), mismatch.actual.display(db) ); @@ -912,7 +915,7 @@ impl flags::AnalysisStats { module .krate() .display_name(db) - .map(|it| it.canonical_name().to_owned()) + .map(|it| it.canonical_name().as_str().to_owned()) .into_iter() .chain( module @@ -944,7 +947,7 @@ impl flags::AnalysisStats { }; if let Some(src) = source { let original_file = src.file_id.original_file(db); - let path = vfs.file_path(original_file); + let path = vfs.file_path(original_file.into()); let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { @@ -958,7 +961,7 @@ impl flags::AnalysisStats { bar.println(msg()); } bar.set_message(msg); - db.body_with_source_map(body_id.into()); + db.body(body_id.into()); bar.inc(1); } @@ -968,7 +971,7 @@ impl flags::AnalysisStats { report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms"); } - fn run_ide_things(&self, analysis: Analysis, mut file_ids: Vec<FileId>) { + fn run_ide_things(&self, analysis: Analysis, mut file_ids: Vec<EditionedFileId>) { file_ids.sort(); file_ids.dedup(); let mut sw = self.stop_watch(); @@ -998,7 +1001,7 @@ impl flags::AnalysisStats { term_search_borrowck: true, }, ide::AssistResolveStrategy::All, - file_id, + file_id.into(), ); } for &file_id in &file_ids { @@ -1031,7 +1034,7 @@ impl flags::AnalysisStats { fields_to_resolve: InlayFieldsToResolve::empty(), range_exclusive_hints: true, }, - file_id, + file_id.into(), None, ); } @@ -1047,7 +1050,7 @@ impl flags::AnalysisStats { annotate_enum_variant_references: false, location: ide::AnnotationLocation::AboveName, }, - file_id, + file_id.into(), ) .unwrap() .into_iter() @@ -1072,8 +1075,8 @@ fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = node.as_ref().original_file_range_rooted(db); - let path = vfs.file_path(original_range.file_id); - let line_index = db.line_index(original_range.file_id); + let path = vfs.file_path(original_range.file_id.into()); + let line_index = db.line_index(original_range.file_id.into()); let text_range = original_range.range; let (start, end) = (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); @@ -1088,8 +1091,8 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = node.as_ref().original_file_range_rooted(db); - let path = vfs.file_path(original_range.file_id); - let line_index = db.line_index(original_range.file_id); + let path = vfs.file_path(original_range.file_id.into()); + let line_index = db.line_index(original_range.file_id.into()); let text_range = original_range.range; let (start, end) = (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); @@ -1107,8 +1110,8 @@ fn expr_syntax_range<'a>( let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = node.as_ref().original_file_range_rooted(db); - let path = vfs.file_path(original_range.file_id); - let line_index = db.line_index(original_range.file_id); + let path = vfs.file_path(original_range.file_id.into()); + let line_index = db.line_index(original_range.file_id.into()); let text_range = original_range.range; let (start, end) = (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); @@ -1128,8 +1131,8 @@ fn pat_syntax_range<'a>( let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = node.as_ref().original_file_range_rooted(db); - let path = vfs.file_path(original_range.file_id); - let line_index = db.line_index(original_range.file_id); + let path = vfs.file_path(original_range.file_id.into()); + let line_index = db.line_index(original_range.file_id.into()); let text_range = original_range.range; let (start, end) = (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs index d5eac49ad3a..4ddeb4ab1b0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs @@ -5,8 +5,8 @@ use project_model::{CargoConfig, RustLibSource}; use rustc_hash::FxHashSet; use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; -use ide::{AnalysisHost, AssistResolveStrategy, DiagnosticsConfig, Severity}; -use ide_db::base_db::SourceDatabaseExt; +use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity}; +use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase}; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use crate::cli::flags; @@ -48,7 +48,7 @@ impl flags::Diagnostics { let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source_file_id(db).original_file(db); - let source_root = db.file_source_root(file_id); + let source_root = db.file_source_root(file_id.into()); let source_root = db.source_root(source_root); !source_root.is_library }); @@ -58,12 +58,15 @@ impl flags::Diagnostics { if !visited_files.contains(&file_id) { let crate_name = module.krate().display_name(db).as_deref().unwrap_or("unknown").to_owned(); - println!("processing crate: {crate_name}, module: {}", _vfs.file_path(file_id)); + println!( + "processing crate: {crate_name}, module: {}", + _vfs.file_path(file_id.into()) + ); for diagnostic in analysis .diagnostics( &DiagnosticsConfig::test_sample(), AssistResolveStrategy::None, - file_id, + file_id.into(), ) .unwrap() { @@ -71,7 +74,11 @@ impl flags::Diagnostics { found_error = true; } - println!("{diagnostic:?}"); + let Diagnostic { code, message, range, severity, .. } = diagnostic; + let line_index = db.line_index(range.file_id); + let start = line_index.line_col(range.range.start()); + let end = line_index.line_col(range.range.end()); + println!("{severity:?} {code:?} from {start:?} to {end:?}: {message}"); } visited_files.insert(file_id); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index 8f60b17b594..ee134b6c507 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -342,7 +342,7 @@ mod test { let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ()"); let offset = range_or_offset.expect_offset(); - (host, FilePosition { file_id, offset }) + (host, FilePosition { file_id: file_id.into(), offset }) } /// If expected == "", then assert that there are no symbols (this is basically local symbol) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs index 28cbd1afd8c..7f24fa2835e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs @@ -1,6 +1,7 @@ //! Applies structured search replace rules from the command line. use anyhow::Context; +use ide_db::EditionedFileId; use ide_ssr::MatchFinder; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use project_model::{CargoConfig, RustLibSource}; @@ -67,7 +68,10 @@ impl flags::Search { for &root in db.local_roots().iter() { let sr = db.source_root(root); for file_id in sr.iter() { - for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) { + for debug_info in match_finder.debug_where_text_equal( + EditionedFileId::current_edition(file_id), + debug_snippet, + ) { println!("{debug_info:#?}"); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 3594cdda2e9..b9b8cfdfc9e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -8,6 +8,7 @@ use std::{fmt, iter, ops::Not, sync::OnceLock}; use cfg::{CfgAtom, CfgDiff}; use dirs::config_dir; use flycheck::{CargoOptions, FlycheckConfig}; +use hir::Symbol; use ide::{ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, GenericParameterHints, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, @@ -107,7 +108,7 @@ config_data! { /// targets and features, with the following base command line: /// /// ```bash - /// cargo check --quiet --workspace --message-format=json --all-targets + /// cargo check --quiet --workspace --message-format=json --all-targets --keep-going /// ``` /// . cargo_buildScripts_overrideCommand: Option<Vec<String>> = None, @@ -282,9 +283,9 @@ config_data! { linkedProjects: Vec<ManifestOrProjectJson> = vec![], /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. - lru_capacity: Option<usize> = None, + lru_capacity: Option<u16> = None, /// Sets the LRU capacity of the specified queries. - lru_query_capacities: FxHashMap<Box<str>, usize> = FxHashMap::default(), + lru_query_capacities: FxHashMap<Box<str>, u16> = FxHashMap::default(), /// These proc-macros will be ignored when trying to expand them. /// @@ -327,6 +328,101 @@ config_data! { /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only /// available on a nightly build. rustfmt_rangeFormatting_enable: bool = false, + + /// Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`]. + /// + /// [`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`. + /// `progress_label` is used for the title in progress indicators, whereas `files_to_watch` + /// is used to determine which build system-specific files should be watched in order to + /// reload rust-analyzer. + /// + /// Below is an example of a valid configuration: + /// ```json + /// "rust-analyzer.workspace.discoverConfig": { + /// "command": [ + /// "rust-project", + /// "develop-json" + /// ], + /// "progressLabel": "rust-analyzer", + /// "filesToWatch": [ + /// "BUCK" + /// ] + /// } + /// ``` + /// + /// ## On `DiscoverWorkspaceConfig::command` + /// + /// **Warning**: This format is provisional and subject to change. + /// + /// [`DiscoverWorkspaceConfig::command`] *must* return a JSON object + /// corresponding to `DiscoverProjectData::Finished`: + /// + /// ```norun + /// #[derive(Debug, Clone, Deserialize, Serialize)] + /// #[serde(tag = "kind")] + /// #[serde(rename_all = "snake_case")] + /// enum DiscoverProjectData { + /// Finished { buildfile: Utf8PathBuf, project: ProjectJsonData }, + /// Error { error: String, source: Option<String> }, + /// Progress { message: String }, + /// } + /// ``` + /// + /// As JSON, `DiscoverProjectData::Finished` is: + /// + /// ```json + /// { + /// // the internally-tagged representation of the enum. + /// "kind": "finished", + /// // the file used by a non-Cargo build system to define + /// // a package or target. + /// "buildfile": "rust-analyzer/BUILD", + /// // the contents of a rust-project.json, elided for brevity + /// "project": { + /// "sysroot": "foo", + /// "crates": [] + /// } + /// } + /// ``` + /// + /// It is encouraged, but not required, to use the other variants on + /// `DiscoverProjectData` to provide a more polished end-user experience. + /// + /// `DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, + /// which will be substituted with the JSON-serialized form of the following + /// enum: + /// + /// ```norun + /// #[derive(PartialEq, Clone, Debug, Serialize)] + /// #[serde(rename_all = "camelCase")] + /// pub enum DiscoverArgument { + /// Path(AbsPathBuf), + /// Buildfile(AbsPathBuf), + /// } + /// ``` + /// + /// The JSON representation of `DiscoverArgument::Path` is: + /// + /// ```json + /// { + /// "path": "src/main.rs" + /// } + /// ``` + /// + /// Similarly, the JSON representation of `DiscoverArgument::Buildfile` is: + /// + /// ``` + /// { + /// "buildfile": "BUILD" + /// } + /// ``` + /// + /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, + /// and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to + /// to update an existing workspace. As a reference for implementors, + /// buck2's `rust-project` will likely be useful: + /// https://github.com/facebook/buck2/tree/main/integrations/rust-project. + workspace_discoverConfig: Option<DiscoverWorkspaceConfig> = None, } } @@ -510,9 +606,9 @@ config_data! { /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = AdjustmentHintsModeDef::Prefix, /// Whether to show const generic parameter name inlay hints. - inlayHints_genericParameterHints_const_enable: bool= false, + inlayHints_genericParameterHints_const_enable: bool= true, /// Whether to show generic lifetime parameter name inlay hints. - inlayHints_genericParameterHints_lifetime_enable: bool = true, + inlayHints_genericParameterHints_lifetime_enable: bool = false, /// Whether to show generic type parameter name inlay hints. inlayHints_genericParameterHints_type_enable: bool = false, /// Whether to show implicit drop hints. @@ -558,9 +654,6 @@ config_data! { lens_debug_enable: bool = true, /// Whether to show CodeLens in Rust files. lens_enable: bool = true, - /// Internal config: use custom client-side commands even when the - /// client doesn't set the corresponding capability. - lens_forceCustomCommands: bool = true, /// Whether to show `Implementations` lens. Only applies when /// `#rust-analyzer.lens.enable#` is set. lens_implementations_enable: bool = true, @@ -585,9 +678,6 @@ config_data! { /// Whether to show `can't find Cargo.toml` error message. notifications_cargoTomlNotFound: bool = true, - /// Whether to send an UnindexedProject notification to the client. - notifications_unindexedProject: bool = false, - /// How many worker threads in the main loop. The default `null` means to pick automatically. numThreads: Option<NumThreads> = None, @@ -660,6 +750,20 @@ config_data! { } } +#[derive(Debug)] +pub enum RatomlFileKind { + Workspace, + Crate, +} + +#[derive(Debug, Clone)] +// FIXME @alibektas : Seems like a clippy warning of this sort should tell that combining different ConfigInputs into one enum was not a good idea. +#[allow(clippy::large_enum_variant)] +enum RatomlFile { + Workspace(GlobalLocalConfigInput), + Crate(LocalConfigInput), +} + #[derive(Debug, Clone)] pub struct Config { discovered_projects: Vec<ProjectManifest>, @@ -685,22 +789,20 @@ pub struct Config { /// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming | user_config_path: VfsPath, - /// FIXME @alibektas : Change this to sth better. /// Config node whose values apply to **every** Rust project. user_config: Option<(GlobalLocalConfigInput, ConfigErrors)>, - /// A special file for this session whose path is set to `self.root_path.join("rust-analyzer.toml")` - root_ratoml_path: VfsPath, - - /// This file can be used to make global changes while having only a workspace-wide scope. - root_ratoml: Option<(GlobalLocalConfigInput, ConfigErrors)>, - - /// For every `SourceRoot` there can be at most one RATOML file. - ratoml_files: FxHashMap<SourceRootId, (LocalConfigInput, ConfigErrors)>, + ratoml_file: FxHashMap<SourceRootId, (RatomlFile, ConfigErrors)>, /// Clone of the value that is stored inside a `GlobalState`. source_root_parent_map: Arc<FxHashMap<SourceRootId, SourceRootId>>, + /// Use case : It is an error to have an empty value for `check_command`. + /// Since it is a `global` command at the moment, its final value can only be determined by + /// traversing through `global` configs and the `client` config. However the non-null value constraint + /// is config level agnostic, so this requires an independent error storage + validation_errors: ConfigErrors, + detached_files: Vec<AbsPathBuf>, } @@ -730,6 +832,7 @@ impl Config { /// The return tuple's bool component signals whether the `GlobalState` should call its `update_configuration()` method. fn apply_change_with_sink(&self, change: ConfigChange) -> (Config, bool) { let mut config = self.clone(); + config.validation_errors = ConfigErrors::default(); let mut should_update = false; @@ -758,9 +861,10 @@ impl Config { if let Some(mut json) = change.client_config_change { tracing::info!("updating config from JSON: {:#}", json); + if !(json.is_null() || json.as_object().map_or(false, |it| it.is_empty())) { let mut json_errors = vec![]; - let detached_files = get_field::<Vec<Utf8PathBuf>>( + let detached_files = get_field_json::<Vec<Utf8PathBuf>>( &mut json, &mut json_errors, "detachedFiles", @@ -773,6 +877,37 @@ impl Config { patch_old_style::patch_json_for_outdated_configs(&mut json); + // IMPORTANT : This holds as long as ` completion_snippets_custom` is declared `client`. + config.snippets.clear(); + + let snips = self.completion_snippets_custom().to_owned(); + + for (name, def) in snips.iter() { + if def.prefix.is_empty() && def.postfix.is_empty() { + continue; + } + let scope = match def.scope { + SnippetScopeDef::Expr => SnippetScope::Expr, + SnippetScopeDef::Type => SnippetScope::Type, + SnippetScopeDef::Item => SnippetScope::Item, + }; + match Snippet::new( + &def.prefix, + &def.postfix, + &def.body, + def.description.as_ref().unwrap_or(name), + &def.requires, + scope, + ) { + Some(snippet) => config.snippets.push(snippet), + None => json_errors.push(( + name.to_owned(), + <serde_json::Error as serde::de::Error>::custom(format!( + "snippet {name} is invalid or triggers are missing", + )), + )), + } + } config.client_config = ( FullConfigInput::from_json(json, &mut json_errors), ConfigErrors( @@ -788,68 +923,96 @@ impl Config { should_update = true; } - if let Some(change) = change.root_ratoml_change { - tracing::info!("updating root ra-toml config: {:#}", change); - #[allow(clippy::single_match)] - match toml::from_str(&change) { - Ok(table) => { - let mut toml_errors = vec![]; - validate_toml_table( - GlobalLocalConfigInput::FIELDS, - &table, - &mut String::new(), - &mut toml_errors, - ); - config.root_ratoml = Some(( - GlobalLocalConfigInput::from_toml(table, &mut toml_errors), - ConfigErrors( - toml_errors - .into_iter() - .map(|(a, b)| ConfigErrorInner::Toml { config_key: a, error: b }) - .map(Arc::new) - .collect(), - ), - )); - should_update = true; - } - // FIXME - Err(_) => (), - } - } - if let Some(change) = change.ratoml_file_change { - for (source_root_id, (_, text)) in change { - if let Some(text) = text { - let mut toml_errors = vec![]; - tracing::info!("updating ra-toml config: {:#}", text); - #[allow(clippy::single_match)] - match toml::from_str(&text) { - Ok(table) => { - validate_toml_table( - &[LocalConfigInput::FIELDS], - &table, - &mut String::new(), - &mut toml_errors, - ); - config.ratoml_files.insert( - source_root_id, - ( - LocalConfigInput::from_toml(&table, &mut toml_errors), - ConfigErrors( - toml_errors - .into_iter() - .map(|(a, b)| ConfigErrorInner::Toml { - config_key: a, - error: b, - }) - .map(Arc::new) - .collect(), - ), - ), - ); + for (source_root_id, (kind, _, text)) in change { + match kind { + RatomlFileKind::Crate => { + if let Some(text) = text { + let mut toml_errors = vec![]; + tracing::info!("updating ra-toml config: {:#}", text); + match toml::from_str(&text) { + Ok(table) => { + validate_toml_table( + &[LocalConfigInput::FIELDS], + &table, + &mut String::new(), + &mut toml_errors, + ); + config.ratoml_file.insert( + source_root_id, + ( + RatomlFile::Crate(LocalConfigInput::from_toml( + &table, + &mut toml_errors, + )), + ConfigErrors( + toml_errors + .into_iter() + .map(|(a, b)| ConfigErrorInner::Toml { + config_key: a, + error: b, + }) + .map(Arc::new) + .collect(), + ), + ), + ); + } + Err(e) => { + config.validation_errors.0.push( + ConfigErrorInner::ParseError { + reason: e.message().to_owned(), + } + .into(), + ); + } + } + } + } + RatomlFileKind::Workspace => { + if let Some(text) = text { + let mut toml_errors = vec![]; + match toml::from_str(&text) { + Ok(table) => { + validate_toml_table( + GlobalLocalConfigInput::FIELDS, + &table, + &mut String::new(), + &mut toml_errors, + ); + config.ratoml_file.insert( + source_root_id, + ( + RatomlFile::Workspace( + GlobalLocalConfigInput::from_toml( + table, + &mut toml_errors, + ), + ), + ConfigErrors( + toml_errors + .into_iter() + .map(|(a, b)| ConfigErrorInner::Toml { + config_key: a, + error: b, + }) + .map(Arc::new) + .collect(), + ), + ), + ); + should_update = true; + } + Err(e) => { + config.validation_errors.0.push( + ConfigErrorInner::ParseError { + reason: e.message().to_owned(), + } + .into(), + ); + } + } } - // FIXME - Err(_) => (), } } } @@ -859,48 +1022,13 @@ impl Config { config.source_root_parent_map = source_root_map; } - // IMPORTANT : This holds as long as ` completion_snippets_custom` is declared `client`. - config.snippets.clear(); - - let snips = self.completion_snippets_custom().to_owned(); - - for (name, def) in snips.iter() { - if def.prefix.is_empty() && def.postfix.is_empty() { - continue; - } - let scope = match def.scope { - SnippetScopeDef::Expr => SnippetScope::Expr, - SnippetScopeDef::Type => SnippetScope::Type, - SnippetScopeDef::Item => SnippetScope::Item, - }; - #[allow(clippy::single_match)] - match Snippet::new( - &def.prefix, - &def.postfix, - &def.body, - def.description.as_ref().unwrap_or(name), - &def.requires, - scope, - ) { - Some(snippet) => config.snippets.push(snippet), - // FIXME - // None => error_sink.0.push(ConfigErrorInner::Json { - // config_key: "".to_owned(), - // error: <serde_json::Error as serde::de::Error>::custom(format!( - // "snippet {name} is invalid or triggers are missing", - // )), - // }), - None => (), - } + if config.check_command(None).is_empty() { + config.validation_errors.0.push(Arc::new(ConfigErrorInner::Json { + config_key: "/check/command".to_owned(), + error: serde_json::Error::custom("expected a non-empty string"), + })); } - // FIXME: bring this back - // if config.check_command().is_empty() { - // error_sink.0.push(ConfigErrorInner::Json { - // config_key: "/check/command".to_owned(), - // error: serde_json::Error::custom("expected a non-empty string"), - // }); - // } (config, should_update) } @@ -915,22 +1043,37 @@ impl Config { .1 .0 .iter() - .chain(config.root_ratoml.as_ref().into_iter().flat_map(|it| it.1 .0.iter())) .chain(config.user_config.as_ref().into_iter().flat_map(|it| it.1 .0.iter())) - .chain(config.ratoml_files.values().flat_map(|it| it.1 .0.iter())) + .chain(config.ratoml_file.values().flat_map(|it| it.1 .0.iter())) + .chain(config.validation_errors.0.iter()) .cloned() .collect(), ); (config, e, should_update) } + + pub fn add_linked_projects(&mut self, data: ProjectJsonData, buildfile: AbsPathBuf) { + let linked_projects = &mut self.client_config.0.global.linkedProjects; + + let new_project = ManifestOrProjectJson::DiscoveredProjectJson { data, buildfile }; + match linked_projects { + Some(projects) => { + match projects.iter_mut().find(|p| p.manifest() == new_project.manifest()) { + Some(p) => *p = new_project, + None => projects.push(new_project), + } + } + None => *linked_projects = Some(vec![new_project]), + } + } } #[derive(Default, Debug)] pub struct ConfigChange { user_config_change: Option<Arc<str>>, - root_ratoml_change: Option<Arc<str>>, client_config_change: Option<serde_json::Value>, - ratoml_file_change: Option<FxHashMap<SourceRootId, (VfsPath, Option<Arc<str>>)>>, + ratoml_file_change: + Option<FxHashMap<SourceRootId, (RatomlFileKind, VfsPath, Option<Arc<str>>)>>, source_map_change: Option<Arc<FxHashMap<SourceRootId, SourceRootId>>>, } @@ -940,10 +1083,10 @@ impl ConfigChange { source_root: SourceRootId, vfs_path: VfsPath, content: Option<Arc<str>>, - ) -> Option<(VfsPath, Option<Arc<str>>)> { + ) -> Option<(RatomlFileKind, VfsPath, Option<Arc<str>>)> { self.ratoml_file_change .get_or_insert_with(Default::default) - .insert(source_root, (vfs_path, content)) + .insert(source_root, (RatomlFileKind::Crate, vfs_path, content)) } pub fn change_user_config(&mut self, content: Option<Arc<str>>) { @@ -951,9 +1094,15 @@ impl ConfigChange { self.user_config_change = content; } - pub fn change_root_ratoml(&mut self, content: Option<Arc<str>>) { - assert!(self.root_ratoml_change.is_none()); // Otherwise it is a double write. - self.root_ratoml_change = content; + pub fn change_workspace_ratoml( + &mut self, + source_root: SourceRootId, + vfs_path: VfsPath, + content: Option<Arc<str>>, + ) -> Option<(RatomlFileKind, VfsPath, Option<Arc<str>>)> { + self.ratoml_file_change + .get_or_insert_with(Default::default) + .insert(source_root, (RatomlFileKind::Workspace, vfs_path, content)) } pub fn change_client_config(&mut self, change: serde_json::Value) { @@ -987,6 +1136,14 @@ impl From<ProjectJson> for LinkedProject { } } +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DiscoverWorkspaceConfig { + pub command: Vec<String>, + pub progress_label: String, + pub files_to_watch: Vec<String>, +} + pub struct CallInfoConfig { pub params_only: bool, pub docs: bool, @@ -1098,7 +1255,6 @@ pub enum FilesWatcher { #[derive(Debug, Clone)] pub struct NotificationsConfig { pub cargo_toml_not_found: bool, - pub unindexed_project: bool, } #[derive(Debug, Clone)] @@ -1128,22 +1284,24 @@ pub struct WorkspaceSymbolConfig { /// How many items are returned at most. pub search_limit: usize, } - +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct ClientCommandsConfig { pub run_single: bool, pub debug_single: bool, pub show_reference: bool, pub goto_location: bool, pub trigger_parameter_hints: bool, + pub rename: bool, } #[derive(Debug)] pub enum ConfigErrorInner { Json { config_key: String, error: serde_json::Error }, Toml { config_key: String, error: toml::de::Error }, + ParseError { reason: String }, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] pub struct ConfigErrors(Vec<Arc<ConfigErrorInner>>); impl ConfigErrors { @@ -1165,6 +1323,7 @@ impl fmt::Display for ConfigErrors { f(&": ")?; f(e) } + ConfigErrorInner::ParseError { reason } => f(reason), }); write!(f, "invalid config value{}:\n{}", if self.0.len() == 1 { "" } else { "s" }, errors) } @@ -1196,11 +1355,6 @@ impl Config { // FIXME @alibektas : Temporary solution. I don't think this is right as at some point we may allow users to specify // custom USER_CONFIG_PATHs which may also be relative. let user_config_path = VfsPath::from(AbsPathBuf::assert(user_config_path)); - let root_ratoml_path = { - let mut p = root_path.clone(); - p.push("rust-analyzer.toml"); - VfsPath::new_real_path(p.to_string()) - }; Config { caps: ClientCapabilities::new(caps), @@ -1210,14 +1364,13 @@ impl Config { workspace_roots, visual_studio_code_version, client_config: (FullConfigInput::default(), ConfigErrors(vec![])), - ratoml_files: FxHashMap::default(), default_config: DEFAULT_CONFIG_DATA.get_or_init(|| Box::leak(Box::default())), source_root_parent_map: Arc::new(FxHashMap::default()), user_config: None, user_config_path, - root_ratoml: None, - root_ratoml_path, detached_files: Default::default(), + validation_errors: Default::default(), + ratoml_file: Default::default(), } } @@ -1260,10 +1413,6 @@ impl Config { &self.root_path } - pub fn root_ratoml_path(&self) -> &VfsPath { - &self.root_ratoml_path - } - pub fn caps(&self) -> &ClientCapabilities { &self.caps } @@ -1317,11 +1466,11 @@ impl Config { pub fn diagnostics(&self, source_root: Option<SourceRootId>) -> DiagnosticsConfig { DiagnosticsConfig { - enabled: *self.diagnostics_enable(), + enabled: *self.diagnostics_enable(source_root), proc_attr_macros_enabled: self.expand_proc_attr_macros(), proc_macros_enabled: *self.procMacro_enable(), - disable_experimental: !self.diagnostics_experimental_enable(), - disabled: self.diagnostics_disabled().clone(), + disable_experimental: !self.diagnostics_experimental_enable(source_root), + disabled: self.diagnostics_disabled(source_root).clone(), expr_fill_default: match self.assist_expressionFillDefault(source_root) { ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo, ExprFillDefaultDef::Default => ExprFillDefaultMode::Default, @@ -1331,7 +1480,7 @@ impl Config { prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), - style_lints: self.diagnostics_styleLints_enable().to_owned(), + style_lints: self.diagnostics_styleLints_enable(source_root).to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), } @@ -1524,22 +1673,34 @@ impl Config { } pub fn has_linked_projects(&self) -> bool { - !self.linkedProjects().is_empty() + !self.linkedProjects(None).is_empty() } + pub fn linked_manifests(&self) -> impl Iterator<Item = &Utf8Path> + '_ { - self.linkedProjects().iter().filter_map(|it| match it { + self.linkedProjects(None).iter().filter_map(|it| match it { ManifestOrProjectJson::Manifest(p) => Some(&**p), - ManifestOrProjectJson::ProjectJson(_) => None, + // despite having a buildfile, using this variant as a manifest + // will fail. + ManifestOrProjectJson::DiscoveredProjectJson { .. } => None, + ManifestOrProjectJson::ProjectJson { .. } => None, }) } + pub fn has_linked_project_jsons(&self) -> bool { - self.linkedProjects().iter().any(|it| matches!(it, ManifestOrProjectJson::ProjectJson(_))) + self.linkedProjects(None) + .iter() + .any(|it| matches!(it, ManifestOrProjectJson::ProjectJson { .. })) + } + + pub fn discover_workspace_config(&self) -> Option<&DiscoverWorkspaceConfig> { + self.workspace_discoverConfig(None).as_ref() } + pub fn linked_or_discovered_projects(&self) -> Vec<LinkedProject> { - match self.linkedProjects().as_slice() { + match self.linkedProjects(None).as_slice() { [] => { let exclude_dirs: Vec<_> = - self.files_excludeDirs().iter().map(|p| self.root_path.join(p)).collect(); + self.files_excludeDirs(None).iter().map(|p| self.root_path.join(p)).collect(); self.discovered_projects .iter() .filter(|project| { @@ -1559,6 +1720,12 @@ impl Config { .ok() .map(Into::into) } + ManifestOrProjectJson::DiscoveredProjectJson { data, buildfile } => { + let root_path = + buildfile.parent().expect("Unable to get parent of buildfile"); + + Some(ProjectJson::new(None, root_path, data.clone()).into()) + } ManifestOrProjectJson::ProjectJson(it) => { Some(ProjectJson::new(None, &self.root_path, it.clone()).into()) } @@ -1568,48 +1735,48 @@ impl Config { } pub fn prefill_caches(&self) -> bool { - self.cachePriming_enable().to_owned() + self.cachePriming_enable(None).to_owned() } pub fn publish_diagnostics(&self) -> bool { - self.diagnostics_enable().to_owned() + self.diagnostics_enable(None).to_owned() } pub fn diagnostics_map(&self) -> DiagnosticsMapConfig { DiagnosticsMapConfig { - remap_prefix: self.diagnostics_remapPrefix().clone(), - warnings_as_info: self.diagnostics_warningsAsInfo().clone(), - warnings_as_hint: self.diagnostics_warningsAsHint().clone(), - check_ignore: self.check_ignore().clone(), + remap_prefix: self.diagnostics_remapPrefix(None).clone(), + warnings_as_info: self.diagnostics_warningsAsInfo(None).clone(), + warnings_as_hint: self.diagnostics_warningsAsHint(None).clone(), + check_ignore: self.check_ignore(None).clone(), } } pub fn extra_args(&self) -> &Vec<String> { - self.cargo_extraArgs() + self.cargo_extraArgs(None) } pub fn extra_env(&self) -> &FxHashMap<String, String> { - self.cargo_extraEnv() + self.cargo_extraEnv(None) } pub fn check_extra_args(&self) -> Vec<String> { let mut extra_args = self.extra_args().clone(); - extra_args.extend_from_slice(self.check_extraArgs()); + extra_args.extend_from_slice(self.check_extraArgs(None)); extra_args } pub fn check_extra_env(&self) -> FxHashMap<String, String> { - let mut extra_env = self.cargo_extraEnv().clone(); - extra_env.extend(self.check_extraEnv().clone()); + let mut extra_env = self.cargo_extraEnv(None).clone(); + extra_env.extend(self.check_extraEnv(None).clone()); extra_env } - pub fn lru_parse_query_capacity(&self) -> Option<usize> { - self.lru_capacity().to_owned() + pub fn lru_parse_query_capacity(&self) -> Option<u16> { + self.lru_capacity(None).to_owned() } - pub fn lru_query_capacities_config(&self) -> Option<&FxHashMap<Box<str>, usize>> { - self.lru_query_capacities().is_empty().not().then(|| self.lru_query_capacities()) + pub fn lru_query_capacities_config(&self) -> Option<&FxHashMap<Box<str>, u16>> { + self.lru_query_capacities(None).is_empty().not().then(|| self.lru_query_capacities(None)) } pub fn proc_macro_srv(&self) -> Option<AbsPathBuf> { @@ -1618,7 +1785,7 @@ impl Config { } pub fn ignored_proc_macros(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> { - self.procMacro_ignored() + self.procMacro_ignored(None) } pub fn expand_proc_macros(&self) -> bool { @@ -1633,34 +1800,37 @@ impl Config { } _ => FilesWatcher::Server, }, - exclude: self.files_excludeDirs().iter().map(|it| self.root_path.join(it)).collect(), + exclude: self + .files_excludeDirs(None) + .iter() + .map(|it| self.root_path.join(it)) + .collect(), } } pub fn notifications(&self) -> NotificationsConfig { NotificationsConfig { cargo_toml_not_found: self.notifications_cargoTomlNotFound().to_owned(), - unindexed_project: self.notifications_unindexedProject().to_owned(), } } pub fn cargo_autoreload_config(&self) -> bool { - self.cargo_autoreload().to_owned() + self.cargo_autoreload(None).to_owned() } pub fn run_build_scripts(&self) -> bool { - self.cargo_buildScripts_enable().to_owned() || self.procMacro_enable().to_owned() + self.cargo_buildScripts_enable(None).to_owned() || self.procMacro_enable().to_owned() } pub fn cargo(&self) -> CargoConfig { - let rustc_source = self.rustc_source().as_ref().map(|rustc_src| { + let rustc_source = self.rustc_source(None).as_ref().map(|rustc_src| { if rustc_src == "discover" { RustLibSource::Discover } else { RustLibSource::Path(self.root_path.join(rustc_src)) } }); - let sysroot = self.cargo_sysroot().as_ref().map(|sysroot| { + let sysroot = self.cargo_sysroot(None).as_ref().map(|sysroot| { if sysroot == "discover" { RustLibSource::Discover } else { @@ -1668,30 +1838,33 @@ impl Config { } }); let sysroot_src = - self.cargo_sysrootSrc().as_ref().map(|sysroot| self.root_path.join(sysroot)); - let sysroot_query_metadata = self.cargo_sysrootQueryMetadata(); + self.cargo_sysrootSrc(None).as_ref().map(|sysroot| self.root_path.join(sysroot)); + let sysroot_query_metadata = self.cargo_sysrootQueryMetadata(None); CargoConfig { - all_targets: *self.cargo_allTargets(), - features: match &self.cargo_features() { + all_targets: *self.cargo_allTargets(None), + features: match &self.cargo_features(None) { CargoFeaturesDef::All => CargoFeatures::All, CargoFeaturesDef::Selected(features) => CargoFeatures::Selected { features: features.clone(), - no_default_features: self.cargo_noDefaultFeatures().to_owned(), + no_default_features: self.cargo_noDefaultFeatures(None).to_owned(), }, }, - target: self.cargo_target().clone(), + target: self.cargo_target(None).clone(), sysroot, sysroot_query_metadata: *sysroot_query_metadata, sysroot_src, rustc_source, cfg_overrides: project_model::CfgOverrides { global: CfgDiff::new( - self.cargo_cfgs() + self.cargo_cfgs(None) .iter() .map(|(key, val)| match val { - Some(val) => CfgAtom::KeyValue { key: key.into(), value: val.into() }, - None => CfgAtom::Flag(key.into()), + Some(val) => CfgAtom::KeyValue { + key: Symbol::intern(key), + value: Symbol::intern(val), + }, + None => CfgAtom::Flag(Symbol::intern(key)), }) .collect(), vec![], @@ -1699,49 +1872,49 @@ impl Config { .unwrap(), selective: Default::default(), }, - wrap_rustc_in_build_scripts: *self.cargo_buildScripts_useRustcWrapper(), - invocation_strategy: match self.cargo_buildScripts_invocationStrategy() { + wrap_rustc_in_build_scripts: *self.cargo_buildScripts_useRustcWrapper(None), + invocation_strategy: match self.cargo_buildScripts_invocationStrategy(None) { InvocationStrategy::Once => project_model::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => project_model::InvocationStrategy::PerWorkspace, }, - invocation_location: match self.cargo_buildScripts_invocationLocation() { + invocation_location: match self.cargo_buildScripts_invocationLocation(None) { InvocationLocation::Root => { project_model::InvocationLocation::Root(self.root_path.clone()) } InvocationLocation::Workspace => project_model::InvocationLocation::Workspace, }, - run_build_script_command: self.cargo_buildScripts_overrideCommand().clone(), - extra_args: self.cargo_extraArgs().clone(), - extra_env: self.cargo_extraEnv().clone(), + run_build_script_command: self.cargo_buildScripts_overrideCommand(None).clone(), + extra_args: self.cargo_extraArgs(None).clone(), + extra_env: self.cargo_extraEnv(None).clone(), target_dir: self.target_dir_from_config(), } } - pub fn rustfmt(&self) -> RustfmtConfig { - match &self.rustfmt_overrideCommand() { + pub fn rustfmt(&self, source_root_id: Option<SourceRootId>) -> RustfmtConfig { + match &self.rustfmt_overrideCommand(source_root_id) { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); RustfmtConfig::CustomCommand { command, args } } Some(_) | None => RustfmtConfig::Rustfmt { - extra_args: self.rustfmt_extraArgs().clone(), - enable_range_formatting: *self.rustfmt_rangeFormatting_enable(), + extra_args: self.rustfmt_extraArgs(source_root_id).clone(), + enable_range_formatting: *self.rustfmt_rangeFormatting_enable(source_root_id), }, } } pub fn flycheck_workspace(&self) -> bool { - *self.check_workspace() + *self.check_workspace(None) } pub fn cargo_test_options(&self) -> CargoOptions { CargoOptions { - target_triples: self.cargo_target().clone().into_iter().collect(), + target_triples: self.cargo_target(None).clone().into_iter().collect(), all_targets: false, - no_default_features: *self.cargo_noDefaultFeatures(), - all_features: matches!(self.cargo_features(), CargoFeaturesDef::All), - features: match self.cargo_features().clone() { + no_default_features: *self.cargo_noDefaultFeatures(None), + all_features: matches!(self.cargo_features(None), CargoFeaturesDef::All), + features: match self.cargo_features(None).clone() { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, }, @@ -1752,7 +1925,7 @@ impl Config { } pub fn flycheck(&self) -> FlycheckConfig { - match &self.check_overrideCommand() { + match &self.check_overrideCommand(None) { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); @@ -1760,13 +1933,13 @@ impl Config { command, args, extra_env: self.check_extra_env(), - invocation_strategy: match self.check_invocationStrategy() { + invocation_strategy: match self.check_invocationStrategy(None) { InvocationStrategy::Once => flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { flycheck::InvocationStrategy::PerWorkspace } }, - invocation_location: match self.check_invocationLocation() { + invocation_location: match self.check_invocationLocation(None) { InvocationLocation::Root => { flycheck::InvocationLocation::Root(self.root_path.clone()) } @@ -1775,28 +1948,30 @@ impl Config { } } Some(_) | None => FlycheckConfig::CargoCommand { - command: self.check_command().clone(), + command: self.check_command(None).clone(), options: CargoOptions { target_triples: self - .check_targets() + .check_targets(None) .clone() .and_then(|targets| match &targets.0[..] { [] => None, targets => Some(targets.into()), }) - .unwrap_or_else(|| self.cargo_target().clone().into_iter().collect()), - all_targets: self.check_allTargets().unwrap_or(*self.cargo_allTargets()), + .unwrap_or_else(|| self.cargo_target(None).clone().into_iter().collect()), + all_targets: self + .check_allTargets(None) + .unwrap_or(*self.cargo_allTargets(None)), no_default_features: self - .check_noDefaultFeatures() - .unwrap_or(*self.cargo_noDefaultFeatures()), + .check_noDefaultFeatures(None) + .unwrap_or(*self.cargo_noDefaultFeatures(None)), all_features: matches!( - self.check_features().as_ref().unwrap_or(self.cargo_features()), + self.check_features(None).as_ref().unwrap_or(self.cargo_features(None)), CargoFeaturesDef::All ), features: match self - .check_features() + .check_features(None) .clone() - .unwrap_or_else(|| self.cargo_features().clone()) + .unwrap_or_else(|| self.cargo_features(None).clone()) { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, @@ -1811,7 +1986,7 @@ impl Config { } fn target_dir_from_config(&self) -> Option<Utf8PathBuf> { - self.cargo_targetDir().as_ref().and_then(|target_dir| match target_dir { + self.cargo_targetDir(None).as_ref().and_then(|target_dir| match target_dir { TargetDirectory::UseSubdirectory(true) => { Some(Utf8PathBuf::from("target/rust-analyzer")) } @@ -1822,18 +1997,18 @@ impl Config { } pub fn check_on_save(&self) -> bool { - *self.checkOnSave() + *self.checkOnSave(None) } pub fn script_rebuild_on_save(&self) -> bool { - *self.cargo_buildScripts_rebuildOnSave() + *self.cargo_buildScripts_rebuildOnSave(None) } pub fn runnables(&self) -> RunnablesConfig { RunnablesConfig { - override_cargo: self.runnables_command().clone(), - cargo_extra_args: self.runnables_extraArgs().clone(), - extra_test_binary_args: self.runnables_extraTestBinaryArgs().clone(), + override_cargo: self.runnables_command(None).clone(), + cargo_extra_args: self.runnables_extraArgs(None).clone(), + extra_test_binary_args: self.runnables_extraTestBinaryArgs(None).clone(), } } @@ -1889,23 +2064,22 @@ impl Config { } pub fn client_commands(&self) -> ClientCommandsConfig { - let commands = self.commands(); - let force = commands.is_none() && *self.lens_forceCustomCommands(); - let commands = commands.map(|it| it.commands).unwrap_or_default(); + let commands = self.commands().map(|it| it.commands).unwrap_or_default(); - let get = |name: &str| commands.iter().any(|it| it == name) || force; + let get = |name: &str| commands.iter().any(|it| it == name); ClientCommandsConfig { run_single: get("rust-analyzer.runSingle"), debug_single: get("rust-analyzer.debugSingle"), show_reference: get("rust-analyzer.showReferences"), goto_location: get("rust-analyzer.gotoLocation"), - trigger_parameter_hints: get("editor.action.triggerParameterHints"), + trigger_parameter_hints: get("rust-analyzer.triggerParameterHints"), + rename: get("rust-analyzer.rename"), } } pub fn prime_caches_num_threads(&self) -> usize { - match self.cachePriming_numThreads() { + match self.cachePriming_numThreads(None) { NumThreads::Concrete(0) | NumThreads::Physical => num_cpus::get_physical(), &NumThreads::Concrete(n) => n, NumThreads::Logical => num_cpus::get(), @@ -2095,11 +2269,47 @@ mod single_or_array { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(untagged)] enum ManifestOrProjectJson { Manifest(Utf8PathBuf), ProjectJson(ProjectJsonData), + DiscoveredProjectJson { + data: ProjectJsonData, + #[serde(serialize_with = "serialize_abs_pathbuf")] + #[serde(deserialize_with = "deserialize_abs_pathbuf")] + buildfile: AbsPathBuf, + }, +} + +fn deserialize_abs_pathbuf<'de, D>(de: D) -> std::result::Result<AbsPathBuf, D::Error> +where + D: serde::de::Deserializer<'de>, +{ + let path = String::deserialize(de)?; + + AbsPathBuf::try_from(path.as_ref()) + .map_err(|err| serde::de::Error::custom(format!("invalid path name: {err:?}"))) +} + +fn serialize_abs_pathbuf<S>(path: &AbsPathBuf, se: S) -> Result<S::Ok, S::Error> +where + S: serde::Serializer, +{ + let path: &Utf8Path = path.as_ref(); + se.serialize_str(path.as_str()) +} + +impl ManifestOrProjectJson { + fn manifest(&self) -> Option<&Utf8Path> { + match self { + ManifestOrProjectJson::Manifest(manifest) => Some(manifest), + ManifestOrProjectJson::DiscoveredProjectJson { buildfile, .. } => { + Some(buildfile.as_ref()) + } + ManifestOrProjectJson::ProjectJson(_) => None, + } + } } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -2341,20 +2551,23 @@ macro_rules! _impl_for_config_data { $($doc)* #[allow(non_snake_case)] $vis fn $field(&self, source_root: Option<SourceRootId>) -> &$ty { - let mut par: Option<SourceRootId> = source_root; - while let Some(source_root_id) = par { - par = self.source_root_parent_map.get(&source_root_id).copied(); - if let Some((config, _)) = self.ratoml_files.get(&source_root_id) { - if let Some(value) = config.$field.as_ref() { - return value; + let mut source_root = source_root.as_ref(); + while let Some(sr) = source_root { + if let Some((file, _)) = self.ratoml_file.get(&sr) { + match file { + RatomlFile::Workspace(config) => { + if let Some(v) = config.local.$field.as_ref() { + return &v; + } + }, + RatomlFile::Crate(config) => { + if let Some(value) = config.$field.as_ref() { + return value; + } + } } } - } - - if let Some((root_path_ratoml, _)) = self.root_ratoml.as_ref() { - if let Some(v) = root_path_ratoml.local.$field.as_ref() { - return &v; - } + source_root = self.source_root_parent_map.get(&sr); } if let Some(v) = self.client_config.0.local.$field.as_ref() { @@ -2381,12 +2594,16 @@ macro_rules! _impl_for_config_data { $( $($doc)* #[allow(non_snake_case)] - $vis fn $field(&self) -> &$ty { - - if let Some((root_path_ratoml, _)) = self.root_ratoml.as_ref() { - if let Some(v) = root_path_ratoml.global.$field.as_ref() { - return &v; + $vis fn $field(&self, source_root : Option<SourceRootId>) -> &$ty { + let mut source_root = source_root.as_ref(); + while let Some(sr) = source_root { + if let Some((RatomlFile::Workspace(config), _)) = self.ratoml_file.get(&sr) { + if let Some(v) = config.global.$field.as_ref() { + return &v; + } } + + source_root = self.source_root_parent_map.get(&sr); } if let Some(v) = self.client_config.0.global.$field.as_ref() { @@ -2399,6 +2616,7 @@ macro_rules! _impl_for_config_data { } } + &self.default_config.global.$field } )* @@ -2479,7 +2697,7 @@ macro_rules! _config_data { fn from_json(json: &mut serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> Self { Self {$( - $field: get_field( + $field: get_field_json( json, error_sink, stringify!($field), @@ -2597,7 +2815,7 @@ impl GlobalLocalConfigInput { } } -fn get_field<T: DeserializeOwned>( +fn get_field_json<T: DeserializeOwned>( json: &mut serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>, field: &'static str, @@ -2745,7 +2963,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "FxHashMap<String, String>" => set! { "type": "object", }, - "FxHashMap<Box<str>, usize>" => set! { + "FxHashMap<Box<str>, u16>" => set! { "type": "object", }, "FxHashMap<String, Option<String>>" => set! { @@ -2755,6 +2973,11 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "type": ["null", "integer"], "minimum": 0, }, + "Option<u16>" => set! { + "type": ["null", "integer"], + "minimum": 0, + "maximum": 65535, + }, "Option<String>" => set! { "type": ["null", "string"], }, @@ -3078,6 +3301,29 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json }, ], }, + "Option<DiscoverWorkspaceConfig>" => set! { + "anyOf": [ + { + "type": "null" + }, + { + "type": "object", + "properties": { + "command": { + "type": "array", + "items": { "type": "string" } + }, + "progressLabel": { + "type": "string" + }, + "filesToWatch": { + "type": "array", + "items": { "type": "string" } + }, + } + } + ] + }, _ => panic!("missing entry for {ty}: {default} (field {field})"), } @@ -3100,7 +3346,7 @@ fn validate_toml_table( ptr.push_str(k); match v { - // This is a table config, any entry in it is therefor valid + // This is a table config, any entry in it is therefore valid toml::Value::Table(_) if verify(ptr) => (), toml::Value::Table(table) => validate_toml_table(known_ptrs, table, ptr, error_sink), _ if !verify(ptr) => error_sink @@ -3300,7 +3546,7 @@ mod tests { })); (config, _, _) = config.apply_change(change); - assert_eq!(config.cargo_targetDir(), &None); + assert_eq!(config.cargo_targetDir(None), &None); assert!( matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir.is_none()) ); @@ -3323,7 +3569,7 @@ mod tests { (config, _, _) = config.apply_change(change); - assert_eq!(config.cargo_targetDir(), &Some(TargetDirectory::UseSubdirectory(true))); + assert_eq!(config.cargo_targetDir(None), &Some(TargetDirectory::UseSubdirectory(true))); assert!( matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer"))) ); @@ -3347,7 +3593,7 @@ mod tests { (config, _, _) = config.apply_change(change); assert_eq!( - config.cargo_targetDir(), + config.cargo_targetDir(None), &Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder"))) ); assert!( @@ -3367,7 +3613,7 @@ mod tests { let mut change = ConfigChange::default(); - change.change_root_ratoml(Some( + change.change_user_config(Some( toml::toml! { [cargo.cfgs] these = "these" @@ -3426,21 +3672,7 @@ mod tests { let (_, e, _) = config.apply_change(change); expect_test::expect![[r#" ConfigErrors( - [ - Toml { - config_key: "invalid/config/err", - error: Error { - inner: Error { - inner: TomlError { - message: "unexpected field", - raw: None, - keys: [], - span: None, - }, - }, - }, - }, - ], + [], ) "#]] .assert_debug_eq(&e); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index de4c9586dfd..f1dde104fce 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -6,10 +6,11 @@ use std::{ops::Not as _, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; -use flycheck::FlycheckHandle; +use flycheck::{project_json, FlycheckHandle}; use hir::ChangeWithProcMacros; use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId}; use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt}; +use itertools::Itertools; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; use nohash_hasher::IntMap; @@ -20,18 +21,15 @@ use parking_lot::{ use proc_macro_api::ProcMacroServer; use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts}; use rustc_hash::{FxHashMap, FxHashSet}; -use tracing::{span, Level}; +use tracing::{span, trace, Level}; use triomphe::Arc; -use vfs::{AnchoredPathBuf, ChangeKind, Vfs}; +use vfs::{AbsPathBuf, AnchoredPathBuf, ChangeKind, Vfs, VfsPath}; use crate::{ - config::{Config, ConfigChange, ConfigErrors}, + config::{Config, ConfigChange, ConfigErrors, RatomlFileKind}, diagnostics::{CheckFixes, DiagnosticCollection}, line_index::{LineEndings, LineIndex}, - lsp::{ - from_proto::{self}, - to_proto::url_from_abs_path, - }, + lsp::{from_proto, to_proto::url_from_abs_path}, lsp_ext, main_loop::Task, mem_docs::MemDocs, @@ -41,6 +39,11 @@ use crate::{ task_pool::{TaskPool, TaskQueue}, }; +pub(crate) struct FetchWorkspaceRequest { + pub(crate) path: Option<AbsPathBuf>, + pub(crate) force_crate_graph_reload: bool, +} + // Enforces drop order pub(crate) struct Handle<H, C> { pub(crate) handle: H, @@ -95,6 +98,11 @@ pub(crate) struct GlobalState { pub(crate) test_run_receiver: Receiver<flycheck::CargoTestMessage>, pub(crate) test_run_remaining_jobs: usize, + // Project loading + pub(crate) discover_handle: Option<project_json::DiscoverHandle>, + pub(crate) discover_sender: Sender<project_json::DiscoverProjectMessage>, + pub(crate) discover_receiver: Receiver<project_json::DiscoverProjectMessage>, + // VFS pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>, pub(crate) vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>, @@ -134,11 +142,12 @@ pub(crate) struct GlobalState { // op queues pub(crate) fetch_workspaces_queue: - OpQueue<bool, Option<(Vec<anyhow::Result<ProjectWorkspace>>, bool)>>, + OpQueue<FetchWorkspaceRequest, Option<(Vec<anyhow::Result<ProjectWorkspace>>, bool)>>, pub(crate) fetch_build_data_queue: OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>, pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>, pub(crate) prime_caches_queue: OpQueue, + pub(crate) discover_workspace_queue: OpQueue, /// A deferred task queue. /// @@ -146,7 +155,7 @@ pub(crate) struct GlobalState { /// handlers, as accessing the database may block latency-sensitive /// interactions and should be moved away from the main thread. /// - /// For certain features, such as [`lsp_ext::UnindexedProjectParams`], + /// For certain features, such as [`GlobalState::handle_discover_msg`], /// this queue should run only *after* [`GlobalState::process_changes`] has /// been called. pub(crate) deferred_task_queue: TaskQueue, @@ -202,6 +211,9 @@ impl GlobalState { } let (flycheck_sender, flycheck_receiver) = unbounded(); let (test_run_sender, test_run_receiver) = unbounded(); + + let (discover_sender, discover_receiver) = unbounded(); + let mut this = GlobalState { sender, req_queue: ReqQueue::default(), @@ -233,6 +245,10 @@ impl GlobalState { test_run_receiver, test_run_remaining_jobs: 0, + discover_handle: None, + discover_sender, + discover_receiver, + vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))), vfs_config_version: 0, vfs_progress_config_version: 0, @@ -247,6 +263,7 @@ impl GlobalState { fetch_proc_macros_queue: OpQueue::default(), prime_caches_queue: OpQueue::default(), + discover_workspace_queue: OpQueue::default(), deferred_task_queue: task_queue, }; @@ -296,11 +313,24 @@ impl GlobalState { modified_rust_files.push(file.file_id); } + let additional_files = self + .config + .discover_workspace_config() + .map(|cfg| { + cfg.files_to_watch.iter().map(String::as_str).collect::<Vec<&str>>() + }) + .unwrap_or_default(); + let path = path.to_path_buf(); if file.is_created_or_deleted() { workspace_structure_change.get_or_insert((path, false)).1 |= self.crate_graph_file_dependencies.contains(vfs_path); - } else if reload::should_refresh_for_change(&path, file.kind()) { + } else if reload::should_refresh_for_change( + &path, + file.kind(), + &additional_files, + ) { + trace!(?path, kind = ?file.kind(), "refreshing for a change"); workspace_structure_change.get_or_insert((path.clone(), false)); } } @@ -350,37 +380,62 @@ impl GlobalState { { let config_change = { let user_config_path = self.config.user_config_path(); - let root_ratoml_path = self.config.root_ratoml_path(); let mut change = ConfigChange::default(); let db = self.analysis_host.raw_database(); + // FIXME @alibektas : This is silly. There is no reason to use VfsPaths when there is SourceRoots. But how + // do I resolve a "workspace_root" to its corresponding id without having to rely on a cargo.toml's ( or project json etc.) file id? + let workspace_ratoml_paths = self + .workspaces + .iter() + .map(|ws| { + VfsPath::from({ + let mut p = ws.workspace_root().to_owned(); + p.push("rust-analyzer.toml"); + p + }) + }) + .collect_vec(); + for (file_id, (_change_kind, vfs_path)) in modified_ratoml_files { if vfs_path == *user_config_path { change.change_user_config(Some(db.file_text(file_id))); continue; } - if vfs_path == *root_ratoml_path { - change.change_root_ratoml(Some(db.file_text(file_id))); - continue; - } - // If change has been made to a ratoml file that // belongs to a non-local source root, we will ignore it. - // As it doesn't make sense a users to use external config files. let sr_id = db.file_source_root(file_id); let sr = db.source_root(sr_id); + if !sr.is_library { - if let Some((old_path, old_text)) = change.change_ratoml( - sr_id, - vfs_path.clone(), - Some(db.file_text(file_id)), - ) { + let entry = if workspace_ratoml_paths.contains(&vfs_path) { + change.change_workspace_ratoml( + sr_id, + vfs_path.clone(), + Some(db.file_text(file_id)), + ) + } else { + change.change_ratoml( + sr_id, + vfs_path.clone(), + Some(db.file_text(file_id)), + ) + }; + + if let Some((kind, old_path, old_text)) = entry { // SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins. if old_path < vfs_path { span!(Level::ERROR, "Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect."); // Put the old one back in. - change.change_ratoml(sr_id, old_path, old_text); + match kind { + RatomlFileKind::Crate => { + change.change_ratoml(sr_id, old_path, old_text); + } + RatomlFileKind::Workspace => { + change.change_workspace_ratoml(sr_id, old_path, old_text); + } + } } } } else { @@ -398,7 +453,7 @@ impl GlobalState { if should_update { self.update_configuration(config); } else { - // No global or client level config was changed. So we can just naively replace config. + // No global or client level config was changed. So we can naively replace config. self.config = Arc::new(config); } } @@ -419,7 +474,7 @@ impl GlobalState { self.fetch_workspaces_queue.request_op( format!("workspace vfs file change: {path}"), - force_crate_graph_reload, + FetchWorkspaceRequest { path: Some(path.to_owned()), force_crate_graph_reload }, ); } } @@ -579,6 +634,7 @@ impl GlobalStateSnapshot { target_kind: target_data.kind, required_features: target_data.required_features.clone(), features: package_data.features.keys().cloned().collect(), + sysroot_root: workspace.sysroot.root().map(ToOwned::to_owned), })); } ProjectWorkspaceKind::Json(project) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 095d7c941c1..4b14dcfc372 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -14,7 +14,7 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath}; use crate::{ config::{Config, ConfigChange}, - global_state::GlobalState, + global_state::{FetchWorkspaceRequest, GlobalState}, lsp::{from_proto, utils::apply_document_changes}, lsp_ext::{self, RunFlycheckParams}, mem_docs::DocumentData, @@ -73,7 +73,7 @@ pub(crate) fn handle_did_open_text_document( tracing::info!("New file content set {:?}", params.text_document.text); state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes())); - if state.config.notifications().unindexed_project { + if state.config.discover_workspace_config().is_some() { tracing::debug!("queuing task"); let _ = state .deferred_task_queue @@ -150,15 +150,29 @@ pub(crate) fn handle_did_save_text_document( if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { // Re-fetch workspaces if a workspace related file has changed - if let Some(abs_path) = vfs_path.as_path() { - if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { - state - .fetch_workspaces_queue - .request_op(format!("workspace vfs file change saved {abs_path}"), false); - } else if state.detached_files.contains(abs_path) { - state - .fetch_workspaces_queue - .request_op(format!("detached file saved {abs_path}"), false); + if let Some(path) = vfs_path.as_path() { + let additional_files = &state + .config + .discover_workspace_config() + .map(|cfg| cfg.files_to_watch.iter().map(String::as_str).collect::<Vec<&str>>()) + .unwrap_or_default(); + + if reload::should_refresh_for_change(path, ChangeKind::Modify, additional_files) { + state.fetch_workspaces_queue.request_op( + format!("workspace vfs file change saved {path}"), + FetchWorkspaceRequest { + path: Some(path.to_owned()), + force_crate_graph_reload: false, + }, + ); + } else if state.detached_files.contains(path) { + state.fetch_workspaces_queue.request_op( + format!("detached file saved {path}"), + FetchWorkspaceRequest { + path: Some(path.to_owned()), + force_crate_graph_reload: false, + }, + ); } } @@ -240,7 +254,9 @@ pub(crate) fn handle_did_change_workspace_folders( if !config.has_linked_projects() && config.detached_files().is_empty() { config.rediscover_workspaces(); - state.fetch_workspaces_queue.request_op("client workspaces changed".to_owned(), false) + + let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }; + state.fetch_workspaces_queue.request_op("client workspaces changed".to_owned(), req); } Ok(()) @@ -274,7 +290,6 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { .into_iter() .flat_map(|id| world.analysis.transitive_rev_deps(id)) .flatten() - .sorted() .unique() .collect(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index e19f7a4898b..eca139d79ae 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -27,7 +27,7 @@ use lsp_types::{ SemanticTokensResult, SymbolInformation, SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, }; use paths::Utf8PathBuf; -use project_model::{ManifestPath, ProjectWorkspaceKind, TargetKind}; +use project_model::{CargoWorkspace, ManifestPath, ProjectWorkspaceKind, TargetKind}; use serde_json::json; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; @@ -37,7 +37,7 @@ use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath}; use crate::{ config::{Config, RustfmtConfig, WorkspaceSymbolConfig}, diff::diff, - global_state::{GlobalState, GlobalStateSnapshot}, + global_state::{FetchWorkspaceRequest, GlobalState, GlobalStateSnapshot}, hack_recover_crate_name, line_index::LineEndings, lsp::{ @@ -50,14 +50,15 @@ use crate::{ self, CrateInfoResult, ExternalDocsPair, ExternalDocsResponse, FetchDependencyListParams, FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams, }, - target_spec::TargetSpec, + target_spec::{CargoTargetSpec, TargetSpec}, }; pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); state.build_deps_changed = false; - state.fetch_workspaces_queue.request_op("reload workspace request".to_owned(), false); + let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }; + state.fetch_workspaces_queue.request_op("reload workspace request".to_owned(), req); Ok(()) } @@ -110,6 +111,13 @@ pub(crate) fn handle_analyzer_status( .status(file_id) .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()), ); + + buf.push_str("\nVersion: \n"); + format_to!(buf, "{}", crate::version()); + + buf.push_str("\nConfiguration: \n"); + format_to!(buf, "{:?}", snap.config); + Ok(buf) } @@ -126,11 +134,6 @@ pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Res Ok(out) } -pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { - state.analysis_host.shuffle_crate_graph(); - Ok(()) -} - pub(crate) fn handle_syntax_tree( snap: GlobalStateSnapshot, params: lsp_ext::SyntaxTreeParams, @@ -191,6 +194,20 @@ pub(crate) fn handle_view_item_tree( Ok(res) } +// cargo test requires the real package name which might contain hyphens but +// the test identifier passed to this function is the namespace form where hyphens +// are replaced with underscores so we have to reverse this and find the real package name +fn find_package_name(namespace_root: &str, cargo: &CargoWorkspace) -> Option<String> { + cargo.packages().find_map(|p| { + let package_name = &cargo[p].name; + if package_name.replace('-', "_") == namespace_root { + Some(package_name.clone()) + } else { + None + } + }) +} + pub(crate) fn handle_run_test( state: &mut GlobalState, params: lsp_ext::RunTestParams, @@ -198,7 +215,7 @@ pub(crate) fn handle_run_test( if let Some(_session) = state.test_run_session.take() { state.send_notification::<lsp_ext::EndRunTest>(()); } - // We detect the lowest common ansector of all included tests, and + // We detect the lowest common ancestor of all included tests, and // run it. We ignore excluded tests for now, the client will handle // it for us. let lca = match params.include { @@ -217,20 +234,31 @@ pub(crate) fn handle_run_test( .unwrap_or_default(), None => "".to_owned(), }; - let test_path = if lca.is_empty() { - None - } else if let Some((_, path)) = lca.split_once("::") { - Some(path) + let (namespace_root, test_path) = if lca.is_empty() { + (None, None) + } else if let Some((namespace_root, path)) = lca.split_once("::") { + (Some(namespace_root), Some(path)) } else { - None + (Some(lca.as_str()), None) }; let mut handles = vec![]; for ws in &*state.workspaces { if let ProjectWorkspaceKind::Cargo { cargo, .. } = &ws.kind { + let test_target = if let Some(namespace_root) = namespace_root { + if let Some(package_name) = find_package_name(namespace_root, cargo) { + flycheck::TestTarget::Package(package_name) + } else { + flycheck::TestTarget::Workspace + } + } else { + flycheck::TestTarget::Workspace + }; + let handle = flycheck::CargoTestHandle::new( test_path, state.config.cargo_test_options(), cargo.workspace_root(), + test_target, state.test_run_sender.clone(), )?; handles.push(handle); @@ -848,6 +876,14 @@ pub(crate) fn handle_runnables( if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args { runnable.label = format!("{} + expect", runnable.label); r.environment.insert("UPDATE_EXPECT".to_owned(), "1".to_owned()); + if let Some(TargetSpec::Cargo(CargoTargetSpec { + sysroot_root: Some(sysroot_root), + .. + })) = &target_spec + { + r.environment + .insert("RUSTC_TOOLCHAIN".to_owned(), sysroot_root.to_string()); + } } } res.push(runnable); @@ -889,7 +925,12 @@ pub(crate) fn handle_runnables( override_cargo: config.override_cargo.clone(), cargo_args, executable_args: Vec::new(), - environment: Default::default(), + environment: spec + .sysroot_root + .as_ref() + .map(|root| ("RUSTC_TOOLCHAIN".to_owned(), root.to_string())) + .into_iter() + .collect(), }), }) } @@ -2069,8 +2110,9 @@ fn run_rustfmt( let edition = editions.iter().copied().max(); let line_index = snap.file_line_index(file_id)?; + let sr = snap.analysis.source_root_id(file_id)?; - let mut command = match snap.config.rustfmt() { + let mut command = match snap.config.rustfmt(Some(sr)) { RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { // FIXME: Set RUSTUP_TOOLCHAIN let mut cmd = process::Command::new(toolchain::Tool::Rustfmt.path()); @@ -2259,7 +2301,7 @@ pub(crate) fn internal_testing_fetch_config( serde_json::to_value(match &*params.config { "local" => state.config.assist(source_root).assist_emit_must_use, "global" => matches!( - state.config.rustfmt(), + state.config.rustfmt(source_root), RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } ), _ => return Err(anyhow::anyhow!("Unknown test config key: {}", params.config)), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index 9a852067f2e..1fcb636f856 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -75,14 +75,6 @@ impl Request for MemoryUsage { const METHOD: &'static str = "rust-analyzer/memoryUsage"; } -pub enum ShuffleCrateGraph {} - -impl Request for ShuffleCrateGraph { - type Params = (); - type Result = (); - const METHOD: &'static str = "rust-analyzer/shuffleCrateGraph"; -} - pub enum ReloadWorkspace {} impl Request for ReloadWorkspace { @@ -531,7 +523,7 @@ pub struct ServerStatusParams { pub message: Option<String>, } -#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] #[serde(rename_all = "camelCase")] pub enum Health { Ok, @@ -834,16 +826,3 @@ pub struct CompletionImport { pub struct ClientCommandOptions { pub commands: Vec<String>, } - -pub enum UnindexedProject {} - -impl Notification for UnindexedProject { - type Params = UnindexedProjectParams; - const METHOD: &'static str = "rust-analyzer/unindexedProject"; -} - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct UnindexedProjectParams { - pub text_documents: Vec<TextDocumentIdentifier>, -} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs index 60fe847bb7d..aea424298f8 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs @@ -1,10 +1,7 @@ //! Conversion lsp_types types to rust-analyzer specific ones. use anyhow::format_err; use ide::{Annotation, AnnotationKind, AssistKind, LineCol}; -use ide_db::{ - base_db::{FileId, FilePosition, FileRange}, - line_index::WideLineCol, -}; +use ide_db::{line_index::WideLineCol, FileId, FilePosition, FileRange}; use syntax::{TextRange, TextSize}; use vfs::AbsPathBuf; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index de394d3d118..eb6bc2a9ce9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -13,7 +13,7 @@ use ide::{ NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp, SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize, }; -use ide_db::{rust_doc::format_docs, FxHasher}; +use ide_db::{assists, rust_doc::format_docs, FxHasher}; use itertools::Itertools; use paths::{Utf8Component, Utf8Prefix}; use semver::VersionReq; @@ -1336,9 +1336,14 @@ pub(crate) fn code_action( command: None, }; - if assist.trigger_signature_help && snap.config.client_commands().trigger_parameter_hints { - res.command = Some(command::trigger_parameter_hints()); - } + let commands = snap.config.client_commands(); + res.command = match assist.command { + Some(assists::Command::TriggerParameterHints) if commands.trigger_parameter_hints => { + Some(command::trigger_parameter_hints()) + } + Some(assists::Command::Rename) if commands.rename => Some(command::rename()), + _ => None, + }; match (assist.source_change, resolve_data) { (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?), @@ -1394,7 +1399,11 @@ pub(crate) fn runnable( cargo_args, cwd: cwd.into(), executable_args, - environment: Default::default(), + environment: spec + .sysroot_root + .map(|root| ("RUSTC_TOOLCHAIN".to_owned(), root.to_string())) + .into_iter() + .collect(), }), })) } @@ -1715,6 +1724,14 @@ pub(crate) mod command { arguments: None, } } + + pub(crate) fn rename() -> lsp_types::Command { + lsp_types::Command { + title: "rename".into(), + command: "rust-analyzer.rename".into(), + arguments: None, + } + } } pub(crate) fn implementation_title(count: usize) -> String { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs index 800c0eee53a..9a9e66be51c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs @@ -74,13 +74,12 @@ impl GlobalState { } } - /// Sends a notification to the client containing the error `message`. /// If `additional_info` is [`Some`], appends a note to the notification telling to check the logs. /// This will always log `message` + `additional_info` to the server's error log. pub(crate) fn show_and_log_error(&mut self, message: String, additional_info: Option<String>) { match additional_info { Some(additional_info) => { - tracing::error!("{}:\n{}", &message, &additional_info); + tracing::error!("{message}:\n{additional_info}"); self.show_message( lsp_types::MessageType::ERROR, message, @@ -88,7 +87,7 @@ impl GlobalState { ); } None => { - tracing::error!("{}", &message); + tracing::error!("{message}"); self.send_notification::<lsp_types::notification::ShowMessage>( lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message }, ); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 07414a6e49c..9c820749ece 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -9,18 +9,19 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; +use flycheck::project_json; use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use lsp_server::{Connection, Notification, Request}; use lsp_types::{notification::Notification as _, TextDocumentIdentifier}; use stdx::thread::ThreadIntent; -use tracing::{span, Level}; -use vfs::FileId; +use tracing::{error, span, Level}; +use vfs::{AbsPathBuf, FileId}; use crate::{ config::Config, diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration}, dispatch::{NotificationDispatcher, RequestDispatcher}, - global_state::{file_id_to_url, url_to_file_id, GlobalState}, + global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState}, hack_recover_crate_name, lsp::{ from_proto, to_proto, @@ -62,6 +63,7 @@ enum Event { Vfs(vfs::loader::Message), Flycheck(flycheck::Message), TestResult(flycheck::CargoTestMessage), + DiscoverProject(project_json::DiscoverProjectMessage), } impl fmt::Display for Event { @@ -73,6 +75,7 @@ impl fmt::Display for Event { Event::Flycheck(_) => write!(f, "Event::Flycheck"), Event::QueuedTask(_) => write!(f, "Event::QueuedTask"), Event::TestResult(_) => write!(f, "Event::TestResult"), + Event::DiscoverProject(_) => write!(f, "Event::DiscoverProject"), } } } @@ -86,7 +89,7 @@ pub(crate) enum QueuedTask { #[derive(Debug)] pub(crate) enum Task { Response(lsp_server::Response), - ClientNotification(lsp_ext::UnindexedProjectParams), + DiscoverLinkedProjects(DiscoverProjectParam), Retry(lsp_server::Request), Diagnostics(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>), DiscoverTest(lsp_ext::DiscoverTestResults), @@ -98,6 +101,12 @@ pub(crate) enum Task { } #[derive(Debug)] +pub(crate) enum DiscoverProjectParam { + Buildfile(AbsPathBuf), + Path(AbsPathBuf), +} + +#[derive(Debug)] pub(crate) enum PrimeCachesProgress { Begin, Report(ide::ParallelPrimeCachesProgress), @@ -134,6 +143,7 @@ impl fmt::Debug for Event { Event::Vfs(it) => fmt::Debug::fmt(it, f), Event::Flycheck(it) => fmt::Debug::fmt(it, f), Event::TestResult(it) => fmt::Debug::fmt(it, f), + Event::DiscoverProject(it) => fmt::Debug::fmt(it, f), } } } @@ -143,14 +153,24 @@ impl GlobalState { self.update_status_or_notify(); if self.config.did_save_text_document_dynamic_registration() { - self.register_did_save_capability(); + let additional_patterns = self + .config + .discover_workspace_config() + .map(|cfg| cfg.files_to_watch.clone().into_iter()) + .into_iter() + .flatten() + .map(|f| format!("**/{f}")); + self.register_did_save_capability(additional_patterns); } - self.fetch_workspaces_queue.request_op("startup".to_owned(), false); - if let Some((cause, force_crate_graph_reload)) = - self.fetch_workspaces_queue.should_start_op() - { - self.fetch_workspaces(cause, force_crate_graph_reload); + if self.config.discover_workspace_config().is_none() { + let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }; + self.fetch_workspaces_queue.request_op("startup".to_owned(), req); + if let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = + self.fetch_workspaces_queue.should_start_op() + { + self.fetch_workspaces(cause, path, force_crate_graph_reload); + } } while let Some(event) = self.next_event(&inbox) { @@ -167,32 +187,36 @@ impl GlobalState { anyhow::bail!("client exited without proper shutdown sequence") } - fn register_did_save_capability(&mut self) { + fn register_did_save_capability(&mut self, additional_patterns: impl Iterator<Item = String>) { + let additional_filters = additional_patterns.map(|pattern| lsp_types::DocumentFilter { + language: None, + scheme: None, + pattern: (Some(pattern)), + }); + + let mut selectors = vec![ + lsp_types::DocumentFilter { + language: None, + scheme: None, + pattern: Some("**/*.rs".into()), + }, + lsp_types::DocumentFilter { + language: None, + scheme: None, + pattern: Some("**/Cargo.toml".into()), + }, + lsp_types::DocumentFilter { + language: None, + scheme: None, + pattern: Some("**/Cargo.lock".into()), + }, + ]; + selectors.extend(additional_filters); + let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions { include_text: Some(false), text_document_registration_options: lsp_types::TextDocumentRegistrationOptions { - document_selector: Some(vec![ - lsp_types::DocumentFilter { - language: None, - scheme: None, - pattern: Some("**/*.rs".into()), - }, - lsp_types::DocumentFilter { - language: None, - scheme: None, - pattern: Some("**/Cargo.toml".into()), - }, - lsp_types::DocumentFilter { - language: None, - scheme: None, - pattern: Some("**/Cargo.lock".into()), - }, - lsp_types::DocumentFilter { - language: None, - scheme: None, - pattern: Some("**/rust-analyzer.toml".into()), - }, - ]), + document_selector: Some(selectors), }, }; @@ -230,6 +254,8 @@ impl GlobalState { recv(self.test_run_receiver) -> task => Some(Event::TestResult(task.unwrap())), + recv(self.discover_receiver) -> task => + Some(Event::DiscoverProject(task.unwrap())), } } @@ -340,6 +366,13 @@ impl GlobalState { self.handle_cargo_test_msg(message); } } + Event::DiscoverProject(message) => { + self.handle_discover_msg(message); + // Coalesce many project discovery events into a single loop turn. + while let Ok(message) = self.discover_receiver.try_recv() { + self.handle_discover_msg(message); + } + } } let event_handling_duration = loop_start.elapsed(); @@ -427,11 +460,13 @@ impl GlobalState { } } - if self.config.cargo_autoreload_config() { - if let Some((cause, force_crate_graph_reload)) = + if self.config.cargo_autoreload_config() + || self.config.discover_workspace_config().is_some() + { + if let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = self.fetch_workspaces_queue.should_start_op() { - self.fetch_workspaces(cause, force_crate_graph_reload); + self.fetch_workspaces(cause, path, force_crate_graph_reload); } } @@ -606,9 +641,6 @@ impl GlobalState { fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) { match task { Task::Response(response) => self.respond(response), - Task::ClientNotification(params) => { - self.send_notification::<lsp_ext::UnindexedProject>(params) - } // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work. Task::Retry(req) if !self.is_completed(&req) => self.on_request(req), Task::Retry(_) => (), @@ -638,7 +670,7 @@ impl GlobalState { self.fetch_workspaces_queue .op_completed(Some((workspaces, force_reload_crate_graph))); if let Err(e) = self.fetch_workspace_error() { - tracing::error!("FetchWorkspaceError:\n{e}"); + error!("FetchWorkspaceError:\n{e}"); } self.switch_workspaces("fetched workspace".to_owned()); (Progress::End, None) @@ -647,6 +679,35 @@ impl GlobalState { self.report_progress("Fetching", state, msg, None, None); } + Task::DiscoverLinkedProjects(arg) => { + if let Some(cfg) = self.config.discover_workspace_config() { + if !self.discover_workspace_queue.op_in_progress() { + // the clone is unfortunately necessary to avoid a borrowck error when + // `self.report_progress` is called later + let title = &cfg.progress_label.clone(); + let command = cfg.command.clone(); + let discover = + project_json::Discover::new(self.discover_sender.clone(), command); + + self.report_progress(title, Progress::Begin, None, None, None); + self.discover_workspace_queue + .request_op("Discovering workspace".to_owned(), ()); + let _ = self.discover_workspace_queue.should_start_op(); + + let arg = match arg { + DiscoverProjectParam::Buildfile(it) => { + project_json::DiscoverArgument::Buildfile(it) + } + DiscoverProjectParam::Path(it) => { + project_json::DiscoverArgument::Path(it) + } + }; + + let handle = discover.spawn(arg).unwrap(); + self.discover_handle = Some(handle); + } + } + } Task::FetchBuildData(progress) => { let (state, msg) = match progress { BuildDataProgress::Begin => (Some(Progress::Begin), None), @@ -654,7 +715,7 @@ impl GlobalState { BuildDataProgress::End(build_data_result) => { self.fetch_build_data_queue.op_completed(build_data_result); if let Err(e) = self.fetch_build_data_error() { - tracing::error!("FetchBuildDataError:\n{e}"); + error!("FetchBuildDataError:\n{e}"); } self.switch_workspaces("fetched build data".to_owned()); @@ -755,10 +816,12 @@ impl GlobalState { let id = from_proto::file_id(&snap, &uri).expect("unable to get FileId"); if let Ok(crates) = &snap.analysis.crates_for(id) { if crates.is_empty() { - let params = lsp_ext::UnindexedProjectParams { - text_documents: vec![lsp_types::TextDocumentIdentifier { uri }], - }; - sender.send(Task::ClientNotification(params)).unwrap(); + if snap.config.discover_workspace_config().is_some() { + let path = + from_proto::abs_path(&uri).expect("Unable to get AbsPath"); + let arg = DiscoverProjectParam::Path(path); + sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); + } } else { tracing::debug!(?uri, "is indexed"); } @@ -787,6 +850,33 @@ impl GlobalState { } } + fn handle_discover_msg(&mut self, message: project_json::DiscoverProjectMessage) { + let title = self + .config + .discover_workspace_config() + .map(|cfg| cfg.progress_label.clone()) + .expect("No title could be found; this is a bug"); + match message { + project_json::DiscoverProjectMessage::Finished { project, buildfile } => { + self.report_progress(&title, Progress::End, None, None, None); + self.discover_workspace_queue.op_completed(()); + + let mut config = Config::clone(&*self.config); + config.add_linked_projects(project, buildfile); + self.update_configuration(config); + } + project_json::DiscoverProjectMessage::Progress { message } => { + self.report_progress(&title, Progress::Report, Some(message), None, None) + } + project_json::DiscoverProjectMessage::Error { error, source } => { + let message = format!("Project discovery failed: {error}"); + self.discover_workspace_queue.op_completed(()); + self.show_and_log_error(message.clone(), source); + self.report_progress(&title, Progress::End, Some(message), None, None) + } + } + } + fn handle_cargo_test_msg(&mut self, message: flycheck::CargoTestMessage) { match message { flycheck::CargoTestMessage::Test { name, state } => { @@ -838,7 +928,7 @@ impl GlobalState { diag.fix, ), Err(err) => { - tracing::error!( + error!( "flycheck {id}: File with cargo diagnostic not found in VFS: {}", err ); @@ -928,7 +1018,6 @@ impl GlobalState { .on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload) .on_sync_mut::<lsp_ext::RebuildProcMacros>(handlers::handle_proc_macros_rebuild) .on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage) - .on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph) .on_sync_mut::<lsp_ext::RunTest>(handlers::handle_run_test) // Request handlers which are related to the user typing // are run on the main thread to reduce latency: @@ -964,10 +1053,10 @@ impl GlobalState { .on::<NO_RETRY, lsp_request::GotoDeclaration>(handlers::handle_goto_declaration) .on::<NO_RETRY, lsp_request::GotoImplementation>(handlers::handle_goto_implementation) .on::<NO_RETRY, lsp_request::GotoTypeDefinition>(handlers::handle_goto_type_definition) - .on::<RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints) - .on::<RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve) + .on::<NO_RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints) + .on::<NO_RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve) .on::<NO_RETRY, lsp_request::CodeLensRequest>(handlers::handle_code_lens) - .on::<RETRY, lsp_request::CodeLensResolve>(handlers::handle_code_lens_resolve) + .on::<NO_RETRY, lsp_request::CodeLensResolve>(handlers::handle_code_lens_resolve) .on::<NO_RETRY, lsp_request::PrepareRenameRequest>(handlers::handle_prepare_rename) .on::<NO_RETRY, lsp_request::Rename>(handlers::handle_rename) .on::<NO_RETRY, lsp_request::References>(handlers::handle_references) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs index 932730fc234..99f9e9829c9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs @@ -3,6 +3,7 @@ pub(crate) type Cause = String; +#[derive(Debug)] pub(crate) struct OpQueue<Args = (), Output = ()> { op_requested: Option<(Cause, Args)>, op_in_progress: bool, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 1039daf850c..5c95ccd4b82 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -16,8 +16,7 @@ use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; -use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros}; -use ide::CrateId; +use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder}; use ide_db::{ base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version}, FxHashMap, @@ -33,11 +32,12 @@ use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, - global_state::GlobalState, + global_state::{FetchWorkspaceRequest, GlobalState}, lsp_ext, - main_loop::Task, + main_loop::{DiscoverProjectParam, Task}, op_queue::Cause, }; +use tracing::{debug, info}; #[derive(Debug)] pub(crate) enum ProjectWorkspaceProgress { @@ -66,6 +66,7 @@ impl GlobalState { || self.fetch_workspaces_queue.op_in_progress() || self.fetch_build_data_queue.op_in_progress() || self.fetch_proc_macros_queue.op_in_progress() + || self.discover_workspace_queue.op_in_progress() || self.vfs_progress_config_version < self.vfs_config_version || self.vfs_progress_n_done < self.vfs_progress_n_total) } @@ -81,9 +82,11 @@ impl GlobalState { &self.config.lru_query_capacities_config().cloned().unwrap_or_default(), ); } + if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() { - self.fetch_workspaces_queue.request_op("discovered projects changed".to_owned(), false) + let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }; + self.fetch_workspaces_queue.request_op("discovered projects changed".to_owned(), req) } else if self.config.flycheck() != old_config.flycheck() { self.reload_flycheck(); } @@ -106,9 +109,10 @@ impl GlobalState { }; let mut message = String::new(); - if !self.config.cargo_autoreload() + if !self.config.cargo_autoreload(None) && self.is_quiescent() && self.fetch_workspaces_queue.op_requested() + && self.config.discover_workspace_config().is_none() { status.health |= lsp_ext::Health::Warning; message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n"); @@ -124,7 +128,6 @@ impl GlobalState { status.health |= lsp_ext::Health::Warning; message.push_str("Failed to run build scripts of some packages.\n\n"); } - if let Some(err) = &self.config_errors { status.health |= lsp_ext::Health::Warning; format_to!(message, "{err}\n"); @@ -217,8 +220,13 @@ impl GlobalState { status } - pub(crate) fn fetch_workspaces(&mut self, cause: Cause, force_crate_graph_reload: bool) { - tracing::info!(%cause, "will fetch workspaces"); + pub(crate) fn fetch_workspaces( + &mut self, + cause: Cause, + path: Option<AbsPathBuf>, + force_crate_graph_reload: bool, + ) { + info!(%cause, "will fetch workspaces"); self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, { let linked_projects = self.config.linked_or_discovered_projects(); @@ -231,6 +239,10 @@ impl GlobalState { .filter_map(Result::ok) .collect(); let cargo_config = self.config.cargo(); + let discover_command = self.config.discover_workspace_config().cloned(); + let is_quiescent = !(self.discover_workspace_queue.op_in_progress() + || self.vfs_progress_config_version < self.vfs_config_version + || self.vfs_progress_n_done < self.vfs_progress_n_total); move |sender| { let progress = { @@ -244,10 +256,28 @@ impl GlobalState { sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap(); + if let (Some(_command), Some(path)) = (&discover_command, &path) { + let build = linked_projects.iter().find_map(|project| match project { + LinkedProject::InlineJsonProject(it) => it.crate_by_buildfile(path), + _ => None, + }); + + if let Some(build) = build { + if is_quiescent { + let path = AbsPathBuf::try_from(build.build_file) + .expect("Unable to convert to an AbsPath"); + let arg = DiscoverProjectParam::Buildfile(path); + sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); + } + } + } + let mut workspaces = linked_projects .iter() .map(|project| match project { LinkedProject::ProjectManifest(manifest) => { + debug!(path = %manifest, "loading project from manifest"); + project_model::ProjectWorkspace::load( manifest.clone(), &cargo_config, @@ -255,12 +285,13 @@ impl GlobalState { ) } LinkedProject::InlineJsonProject(it) => { - Ok(project_model::ProjectWorkspace::load_inline( + let workspace = project_model::ProjectWorkspace::load_inline( it.clone(), cargo_config.target.as_deref(), &cargo_config.extra_env, &cargo_config.cfg_overrides, - )) + ); + Ok(workspace) } }) .collect::<Vec<_>>(); @@ -286,7 +317,7 @@ impl GlobalState { )); } - tracing::info!("did fetch workspaces {:?}", workspaces); + info!(?workspaces, "did fetch workspaces"); sender .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End( workspaces, @@ -298,7 +329,7 @@ impl GlobalState { } pub(crate) fn fetch_build_data(&mut self, cause: Cause) { - tracing::info!(%cause, "will fetch build data"); + info!(%cause, "will fetch build data"); let workspaces = Arc::clone(&self.workspaces); let config = self.config.cargo(); let root_path = self.config.root_path().clone(); @@ -324,7 +355,7 @@ impl GlobalState { } pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec<ProcMacroPaths>) { - tracing::info!(%cause, "will load proc macros"); + info!(%cause, "will load proc macros"); let ignored_proc_macros = self.config.ignored_proc_macros().clone(); let proc_macro_clients = self.proc_macro_clients.clone(); @@ -339,43 +370,44 @@ impl GlobalState { } }; - let mut res = FxHashMap::default(); + let mut builder = ProcMacrosBuilder::default(); let chain = proc_macro_clients .iter() .map(|res| res.as_ref().map_err(|e| e.to_string())) - .chain(iter::repeat_with(|| Err("Proc macros servers are not running".into()))); + .chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into()))); for (client, paths) in chain.zip(paths) { - res.extend(paths.into_iter().map(move |(crate_id, res)| { - ( - crate_id, - res.map_or_else( - |_| Err("proc macro crate is missing dylib".to_owned()), - |(crate_name, path)| { - progress(path.to_string()); - client.as_ref().map_err(Clone::clone).and_then(|client| { - load_proc_macro( - client, - &path, - crate_name - .as_deref() - .and_then(|crate_name| { - ignored_proc_macros.iter().find_map( - |(name, macros)| { - eq_ignore_underscore(name, crate_name) + paths + .into_iter() + .map(move |(crate_id, res)| { + ( + crate_id, + res.map_or_else( + |e| Err((e, true)), + |(crate_name, path)| { + progress(path.to_string()); + client.as_ref().map_err(|it| (it.clone(), true)).and_then( + |client| { + load_proc_macro( + client, + &path, + ignored_proc_macros + .iter() + .find_map(|(name, macros)| { + eq_ignore_underscore(name, &crate_name) .then_some(&**macros) - }, - ) - }) - .unwrap_or_default(), + }) + .unwrap_or_default(), + ) + }, ) - }) - }, - ), - ) - })); + }, + ), + ) + }) + .for_each(|(krate, res)| builder.insert(krate, res)); } - sender.send(Task::LoadProcMacros(ProcMacroProgress::End(res))).unwrap(); + sender.send(Task::LoadProcMacros(ProcMacroProgress::End(builder.build()))).unwrap(); }); } @@ -395,6 +427,7 @@ impl GlobalState { return; }; + info!(%cause, ?force_reload_crate_graph); if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() { if *force_reload_crate_graph { self.recreate_crate_graph(cause); @@ -416,7 +449,7 @@ impl GlobalState { if same_workspaces { let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result(); if Arc::ptr_eq(workspaces, &self.workspaces) { - tracing::debug!("set build scripts to workspaces"); + info!("set build scripts to workspaces"); let workspaces = workspaces .iter() @@ -428,9 +461,10 @@ impl GlobalState { }) .collect::<Vec<_>>(); // Workspaces are the same, but we've updated build data. + info!("same workspace, but new build data"); self.workspaces = Arc::new(workspaces); } else { - tracing::info!("build scripts do not match the version of the active workspace"); + info!("build scripts do not match the version of the active workspace"); if *force_reload_crate_graph { self.recreate_crate_graph(cause); } @@ -440,7 +474,7 @@ impl GlobalState { return; } } else { - tracing::debug!("abandon build scripts for workspaces"); + info!("abandon build scripts for workspaces"); // Here, we completely changed the workspace (Cargo.toml edit), so // we don't care about build-script results, they are stale. @@ -507,7 +541,6 @@ impl GlobalState { watchers.extend( iter::once(self.config.user_config_path().as_path()) - .chain(iter::once(self.config.root_ratoml_path().as_path())) .chain(self.workspaces.iter().map(|ws| ws.manifest().map(ManifestPath::as_ref))) .flatten() .map(|glob_pattern| lsp_types::FileSystemWatcher { @@ -535,7 +568,7 @@ impl GlobalState { if (self.proc_macro_clients.is_empty() || !same_workspaces) && self.config.expand_proc_macros() { - tracing::info!("Spawning proc-macro servers"); + info!("Spawning proc-macro servers"); self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| { let path = match self.config.proc_macro_srv() { @@ -562,7 +595,7 @@ impl GlobalState { _ => Default::default(), }; - tracing::info!("Using proc-macro server at {path}"); + info!("Using proc-macro server at {path}"); ProcMacroServer::spawn(&path, &env).map_err(|err| { tracing::error!( @@ -588,12 +621,14 @@ impl GlobalState { self.source_root_config = project_folders.source_root_config; self.local_roots_parent_map = Arc::new(self.source_root_config.source_root_parent_map()); + info!(?cause, "recreating the crate graph"); self.recreate_crate_graph(cause); - tracing::info!("did switch workspaces"); + info!("did switch workspaces"); } fn recreate_crate_graph(&mut self, cause: String) { + info!(?cause, "Building Crate Graph"); self.report_progress( "Building CrateGraph", crate::lsp::utils::Progress::Begin, @@ -631,10 +666,17 @@ impl GlobalState { change.set_proc_macros( crate_graph .iter() - .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true)))) .collect(), ); self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + } else { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false)))) + .collect(), + ); } change.set_crate_graph(crate_graph); change.set_target_data_layouts(layouts); @@ -658,12 +700,19 @@ impl GlobalState { let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()); }; - if last_op_result.is_empty() { - stdx::format_to!(buf, "rust-analyzer failed to discover workspace"); - } else { - for ws in last_op_result { - if let Err(err) = ws { - stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err); + + if !self.discover_workspace_queue.op_in_progress() { + if last_op_result.is_empty() { + stdx::format_to!(buf, "rust-analyzer failed to discover workspace"); + } else { + for ws in last_op_result { + if let Err(err) = ws { + stdx::format_to!( + buf, + "rust-analyzer failed to load workspace: {:#}\n", + err + ); + } } } } @@ -766,12 +815,7 @@ pub fn ws_to_crate_graph( workspaces: &[ProjectWorkspace], extra_env: &FxHashMap<String, String>, mut load: impl FnMut(&AbsPath) -> Option<vfs::FileId>, -) -> ( - CrateGraph, - Vec<FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>>, - Vec<Result<Arc<str>, Arc<str>>>, - Vec<Option<Version>>, -) { +) -> (CrateGraph, Vec<ProcMacroPaths>, Vec<Result<Arc<str>, Arc<str>>>, Vec<Option<Version>>) { let mut crate_graph = CrateGraph::default(); let mut proc_macro_paths = Vec::default(); let mut layouts = Vec::default(); @@ -818,7 +862,11 @@ pub fn ws_to_crate_graph( (crate_graph, proc_macro_paths, layouts, toolchains) } -pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { +pub(crate) fn should_refresh_for_change( + path: &AbsPath, + change_kind: ChangeKind, + additional_paths: &[&str], +) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; @@ -830,6 +878,11 @@ pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) if let "Cargo.toml" | "Cargo.lock" = file_name { return true; } + + if additional_paths.contains(&file_name) { + return true; + } + if change_kind == ChangeKind::Modify { return false; } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs index 863ff064399..67e1bad5281 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs @@ -3,6 +3,7 @@ use std::mem; use cfg::{CfgAtom, CfgExpr}; +use hir::sym; use ide::{Cancellable, CrateId, FileId, RunnableKind, TestId}; use project_model::project_json::Runnable; use project_model::{CargoFeatures, ManifestPath, TargetKind}; @@ -56,6 +57,7 @@ pub(crate) struct CargoTargetSpec { pub(crate) crate_id: CrateId, pub(crate) required_features: Vec<String>, pub(crate) features: FxHashSet<String>, + pub(crate) sysroot_root: Option<vfs::AbsPathBuf>, } #[derive(Clone, Debug)] @@ -237,14 +239,14 @@ impl CargoTargetSpec { /// Fill minimal features needed fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) { match cfg_expr { - CfgExpr::Atom(CfgAtom::KeyValue { key, value }) if key == "feature" => { + CfgExpr::Atom(CfgAtom::KeyValue { key, value }) if *key == sym::feature => { features.push(value.to_string()) } CfgExpr::All(preds) => { preds.iter().for_each(|cfg| required_features(cfg, features)); } CfgExpr::Any(preds) => { - for cfg in preds { + for cfg in preds.iter() { let len_features = features.len(); required_features(cfg, features); if len_features != features.len() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs index fcdbf6c6949..f330754f19a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs @@ -48,7 +48,10 @@ where let writer = self.writer; - let ra_fmt_layer = tracing_subscriber::fmt::layer().with_writer(writer).with_filter(filter); + let ra_fmt_layer = tracing_subscriber::fmt::layer() + .with_target(false) + .with_writer(writer) + .with_filter(filter); let mut chalk_layer = None; if let Some(chalk_filter) = self.chalk_filter { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index 56f416a0b6e..b1ef4837717 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -27,8 +27,7 @@ use lsp_types::{ InlayHint, InlayHintLabel, InlayHintParams, PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, }; - -use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams, UnindexedProject}; +use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams}; use serde_json::json; use stdx::format_to_acc; @@ -813,66 +812,6 @@ fn main() {{}} } #[test] -fn test_opening_a_file_outside_of_indexed_workspace() { - if skip_slow_tests() { - return; - } - - let tmp_dir = TestDir::new(); - let path = tmp_dir.path(); - - let project = json!({ - "roots": [path], - "crates": [ { - "root_module": path.join("src/crate_one/lib.rs"), - "deps": [], - "edition": "2015", - "cfg": [ "cfg_atom_1", "feature=\"cfg_1\""], - } ] - }); - - let code = format!( - r#" -//- /rust-project.json -{project} - -//- /src/crate_one/lib.rs -mod bar; - -fn main() {{}} -"#, - ); - - let server = Project::with_fixture(&code) - .tmp_dir(tmp_dir) - .with_config(serde_json::json!({ - "notifications": { - "unindexedProject": true - }, - })) - .server() - .wait_until_workspace_is_loaded(); - - let uri = server.doc_id("src/crate_two/lib.rs").uri; - server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams { - text_document: TextDocumentItem { - uri: uri.clone(), - language_id: "rust".to_owned(), - version: 0, - text: "/// Docs\nfn foo() {}".to_owned(), - }, - }); - let expected = json!({ - "textDocuments": [ - { - "uri": uri - } - ] - }); - server.expect_notification::<UnindexedProject>(expected); -} - -#[test] fn diagnostics_dont_block_typing() { if skip_slow_tests() { return; @@ -970,7 +909,7 @@ version = \"0.0.0\" fn out_dirs_check_impl(root_contains_symlink: bool) { if skip_slow_tests() { - return; + // return; } let mut server = Project::with_fixture( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs index 218a9a32adb..c06ba9eee14 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs @@ -10,6 +10,7 @@ use paths::Utf8PathBuf; use rust_analyzer::lsp::ext::{InternalTestingFetchConfig, InternalTestingFetchConfigParams}; use serde_json::json; +use test_utils::skip_slow_tests; enum QueryType { Local, @@ -30,8 +31,6 @@ impl RatomlTest { const EMIT_MUST_USE: &'static str = r#"assist.emitMustUse = true"#; const EMIT_MUST_NOT_USE: &'static str = r#"assist.emitMustUse = false"#; - const GLOBAL_TRAIT_ASSOC_ITEMS_ZERO: &'static str = r#"hover.show.traitAssocItems = 0"#; - fn new( fixtures: Vec<&str>, roots: Vec<&str>, @@ -180,29 +179,14 @@ impl RatomlTest { } } -// /// Check if we are listening for changes in user's config file ( e.g on Linux `~/.config/rust-analyzer/.rust-analyzer.toml`) -// #[test] -// #[cfg(target_os = "windows")] -// fn listen_to_user_config_scenario_windows() { -// todo!() -// } - -// #[test] -// #[cfg(target_os = "linux")] -// fn listen_to_user_config_scenario_linux() { -// todo!() -// } - -// #[test] -// #[cfg(target_os = "macos")] -// fn listen_to_user_config_scenario_macos() { -// todo!() -// } - /// Check if made changes have had any effect on /// the client config. #[test] fn ratoml_client_config_basic() { + if skip_slow_tests() { + return; + } + let server = RatomlTest::new( vec![ r#" @@ -304,6 +288,10 @@ enum Value { #[test] #[ignore = "the user config is currently not being watched on startup, fix this"] fn ratoml_user_config_detected() { + if skip_slow_tests() { + return; + } + let server = RatomlTest::new( vec![ r#" @@ -333,6 +321,10 @@ enum Value { #[test] #[ignore = "the user config is currently not being watched on startup, fix this"] fn ratoml_create_user_config() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -364,6 +356,10 @@ enum Value { #[test] #[ignore = "the user config is currently not being watched on startup, fix this"] fn ratoml_modify_user_config() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -394,6 +390,10 @@ assist.emitMustUse = true"#, #[test] #[ignore = "the user config is currently not being watched on startup, fix this"] fn ratoml_delete_user_config() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -420,18 +420,13 @@ assist.emitMustUse = true"#, server.delete(2); assert!(!server.query(QueryType::Local, 1)); } -// #[test] -// fn delete_user_config() { -// todo!() -// } - -// #[test] -// fn modify_client_config() { -// todo!() -// } #[test] fn ratoml_inherit_config_from_ws_root() { + if skip_slow_tests() { + return; + } + let server = RatomlTest::new( vec![ r#" @@ -475,6 +470,10 @@ pub fn add(left: usize, right: usize) -> usize { #[test] fn ratoml_modify_ratoml_at_ws_root() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -520,6 +519,10 @@ pub fn add(left: usize, right: usize) -> usize { #[test] fn ratoml_delete_ratoml_at_ws_root() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -565,6 +568,10 @@ pub fn add(left: usize, right: usize) -> usize { #[test] fn ratoml_add_immediate_child_to_ws_root() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -609,7 +616,12 @@ pub fn add(left: usize, right: usize) -> usize { } #[test] +#[ignore = "Root ratomls are not being looked for on startup. Fix this."] fn ratoml_rm_ws_root_ratoml_child_has_client_as_parent_now() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -655,6 +667,10 @@ pub fn add(left: usize, right: usize) -> usize { #[test] fn ratoml_crates_both_roots() { + if skip_slow_tests() { + return; + } + let server = RatomlTest::new( vec![ r#" @@ -699,6 +715,10 @@ enum Value { #[test] fn ratoml_multiple_ratoml_in_single_source_root() { + if skip_slow_tests() { + return; + } + let server = RatomlTest::new( vec![ r#" @@ -729,37 +749,6 @@ fn ratoml_multiple_ratoml_in_single_source_root() { ); assert!(server.query(QueryType::Local, 3)); - - let server = RatomlTest::new( - vec![ - r#" -//- /p1/Cargo.toml -[package] -name = "p1" -version = "0.1.0" -edition = "2021" -"#, - r#" -//- /p1/src/rust-analyzer.toml -assist.emitMustUse = false -"#, - r#" -//- /p1/rust-analyzer.toml -assist.emitMustUse = true -"#, - r#" -//- /p1/src/lib.rs -enum Value { - Number(i32), - Text(String), -} -"#, - ], - vec!["p1"], - None, - ); - - assert!(server.query(QueryType::Local, 3)); } /// If a root is non-local, so we cannot find what its parent is @@ -838,6 +827,10 @@ enum Value { /// configuring global level configurations as well. #[test] fn ratoml_in_root_is_global() { + if skip_slow_tests() { + return; + } + let server = RatomlTest::new( vec![ r#" @@ -848,32 +841,28 @@ version = "0.1.0" edition = "2021" "#, r#" -//- /rust-analyzer.toml -hover.show.traitAssocItems = 4 +//- /p1/rust-analyzer.toml +rustfmt.rangeFormatting.enable = true "#, r#" //- /p1/src/lib.rs -trait RandomTrait { - type B; - fn abc() -> i32; - fn def() -> i64; -} - fn main() { - let a = RandomTrait; + todo!() }"#, ], - vec![], + vec!["p1"], None, ); - server.query(QueryType::Global, 2); + assert!(server.query(QueryType::Global, 2)); } -#[allow(unused)] -// #[test] -// FIXME: Re-enable this test when we have a global config we can check again +#[test] fn ratoml_root_is_updateable() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -884,34 +873,30 @@ version = "0.1.0" edition = "2021" "#, r#" -//- /rust-analyzer.toml -hover.show.traitAssocItems = 4 - "#, +//- /p1/rust-analyzer.toml +rustfmt.rangeFormatting.enable = true + "#, r#" //- /p1/src/lib.rs -trait RandomTrait { - type B; - fn abc() -> i32; - fn def() -> i64; -} - fn main() { - let a = RandomTrait; + todo!() }"#, ], - vec![], + vec!["p1"], None, ); assert!(server.query(QueryType::Global, 2)); - server.edit(1, RatomlTest::GLOBAL_TRAIT_ASSOC_ITEMS_ZERO.to_owned()); + server.edit(1, "rustfmt.rangeFormatting.enable = false".to_owned()); assert!(!server.query(QueryType::Global, 2)); } -#[allow(unused)] -// #[test] -// FIXME: Re-enable this test when we have a global config we can check again +#[test] fn ratoml_root_is_deletable() { + if skip_slow_tests() { + return; + } + let mut server = RatomlTest::new( vec![ r#" @@ -922,22 +907,16 @@ version = "0.1.0" edition = "2021" "#, r#" -//- /rust-analyzer.toml -hover.show.traitAssocItems = 4 - "#, +//- /p1/rust-analyzer.toml +rustfmt.rangeFormatting.enable = true + "#, r#" //- /p1/src/lib.rs -trait RandomTrait { - type B; - fn abc() -> i32; - fn def() -> i64; -} - fn main() { - let a = RandomTrait; + todo!() }"#, ], - vec![], + vec!["p1"], None, ); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index 66100971fbf..081ee5fa3e4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -256,40 +256,6 @@ impl Server { self.send_notification(r) } - pub(crate) fn expect_notification<N>(&self, expected: Value) - where - N: lsp_types::notification::Notification, - N::Params: Serialize, - { - while let Some(Message::Notification(actual)) = - recv_timeout(&self.client.receiver).unwrap_or_else(|_| panic!("timed out")) - { - if actual.method == N::METHOD { - let actual = actual - .clone() - .extract::<Value>(N::METHOD) - .expect("was not able to extract notification"); - - tracing::debug!(?actual, "got notification"); - if let Some((expected_part, actual_part)) = find_mismatch(&expected, &actual) { - panic!( - "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", - to_string_pretty(&expected).unwrap(), - to_string_pretty(&actual).unwrap(), - to_string_pretty(expected_part).unwrap(), - to_string_pretty(actual_part).unwrap(), - ); - } else { - tracing::debug!("successfully matched notification"); - return; - } - } else { - continue; - } - } - panic!("never got expected notification"); - } - #[track_caller] pub(crate) fn request<R>(&self, params: R::Params, expected_resp: Value) where diff --git a/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs b/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs index 4e707412397..eeaf008a15c 100644 --- a/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs +++ b/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs @@ -53,7 +53,11 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream num_storages += 1; } "dependencies" => { - storage = QueryStorage::Dependencies; + storage = QueryStorage::LruDependencies; + num_storages += 1; + } + "lru" => { + storage = QueryStorage::LruMemoized; num_storages += 1; } "input" => { @@ -235,7 +239,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream queries_with_storage.push(fn_name); - let tracing = if let QueryStorage::Memoized = query.storage { + let tracing = if let QueryStorage::Memoized | QueryStorage::LruMemoized = query.storage { let s = format!("{trait_name}::{fn_name}"); Some(quote! { let _p = tracing::debug_span!(#s, #(#key_names = tracing::field::debug(&#key_names)),*).entered(); @@ -376,8 +380,9 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream let storage = match &query.storage { QueryStorage::Memoized => quote!(salsa::plumbing::MemoizedStorage<Self>), - QueryStorage::Dependencies => { - quote!(salsa::plumbing::DependencyStorage<Self>) + QueryStorage::LruMemoized => quote!(salsa::plumbing::LruMemoizedStorage<Self>), + QueryStorage::LruDependencies => { + quote!(salsa::plumbing::LruDependencyStorage<Self>) } QueryStorage::Input if query.keys.is_empty() => { quote!(salsa::plumbing::UnitInputStorage<Self>) @@ -724,7 +729,8 @@ impl Query { #[derive(Debug, Clone, PartialEq, Eq)] enum QueryStorage { Memoized, - Dependencies, + LruDependencies, + LruMemoized, Input, Interned, InternedLookup { intern_query_type: Ident }, @@ -739,7 +745,9 @@ impl QueryStorage { | QueryStorage::Interned | QueryStorage::InternedLookup { .. } | QueryStorage::Transparent => false, - QueryStorage::Memoized | QueryStorage::Dependencies => true, + QueryStorage::Memoized | QueryStorage::LruMemoized | QueryStorage::LruDependencies => { + true + } } } } diff --git a/src/tools/rust-analyzer/crates/salsa/src/derived.rs b/src/tools/rust-analyzer/crates/salsa/src/derived.rs index fd31ab20416..8b2fdd6b19c 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/derived.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/derived.rs @@ -1,9 +1,7 @@ use crate::debug::TableEntry; use crate::durability::Durability; use crate::hash::FxIndexMap; -use crate::lru::Lru; use crate::plumbing::DerivedQueryStorageOps; -use crate::plumbing::LruQueryStorageOps; use crate::plumbing::QueryFunction; use crate::plumbing::QueryStorageMassOps; use crate::plumbing::QueryStorageOps; @@ -13,7 +11,6 @@ use crate::{Database, DatabaseKeyIndex, QueryDb, Revision}; use parking_lot::RwLock; use std::borrow::Borrow; use std::hash::Hash; -use std::marker::PhantomData; use triomphe::Arc; mod slot; @@ -22,79 +19,33 @@ use slot::Slot; /// Memoized queries store the result plus a list of the other queries /// that they invoked. This means we can avoid recomputing them when /// none of those inputs have changed. -pub type MemoizedStorage<Q> = DerivedStorage<Q, AlwaysMemoizeValue>; - -/// "Dependency" queries just track their dependencies and not the -/// actual value (which they produce on demand). This lessens the -/// storage requirements. -pub type DependencyStorage<Q> = DerivedStorage<Q, NeverMemoizeValue>; +pub type MemoizedStorage<Q> = DerivedStorage<Q>; /// Handles storage where the value is 'derived' by executing a /// function (in contrast to "inputs"). -pub struct DerivedStorage<Q, MP> +pub struct DerivedStorage<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, { group_index: u16, - lru_list: Lru<Slot<Q, MP>>, - slot_map: RwLock<FxIndexMap<Q::Key, Arc<Slot<Q, MP>>>>, - policy: PhantomData<MP>, + slot_map: RwLock<FxIndexMap<Q::Key, Arc<Slot<Q>>>>, } -impl<Q, MP> std::panic::RefUnwindSafe for DerivedStorage<Q, MP> +impl<Q> std::panic::RefUnwindSafe for DerivedStorage<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Key: std::panic::RefUnwindSafe, Q::Value: std::panic::RefUnwindSafe, { } -pub trait MemoizationPolicy<Q>: Send + Sync -where - Q: QueryFunction, -{ - fn should_memoize_value(key: &Q::Key) -> bool; - - fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool; -} - -pub enum AlwaysMemoizeValue {} -impl<Q> MemoizationPolicy<Q> for AlwaysMemoizeValue +impl<Q> DerivedStorage<Q> where Q: QueryFunction, Q::Value: Eq, { - fn should_memoize_value(_key: &Q::Key) -> bool { - true - } - - fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool { - old_value == new_value - } -} - -pub enum NeverMemoizeValue {} -impl<Q> MemoizationPolicy<Q> for NeverMemoizeValue -where - Q: QueryFunction, -{ - fn should_memoize_value(_key: &Q::Key) -> bool { - false - } - - fn memoized_value_eq(_old_value: &Q::Value, _new_value: &Q::Value) -> bool { - panic!("cannot reach since we never memoize") - } -} - -impl<Q, MP> DerivedStorage<Q, MP> -where - Q: QueryFunction, - MP: MemoizationPolicy<Q>, -{ - fn slot(&self, key: &Q::Key) -> Arc<Slot<Q, MP>> { + fn slot(&self, key: &Q::Key) -> Arc<Slot<Q>> { if let Some(v) = self.slot_map.read().get(key) { return v.clone(); } @@ -111,20 +62,15 @@ where } } -impl<Q, MP> QueryStorageOps<Q> for DerivedStorage<Q, MP> +impl<Q> QueryStorageOps<Q> for DerivedStorage<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Value: Eq, { const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = Q::CYCLE_STRATEGY; fn new(group_index: u16) -> Self { - DerivedStorage { - group_index, - slot_map: RwLock::new(FxIndexMap::default()), - lru_list: Default::default(), - policy: PhantomData, - } + DerivedStorage { group_index, slot_map: RwLock::new(FxIndexMap::default()) } } fn fmt_index( @@ -161,10 +107,6 @@ where let slot = self.slot(key); let StampedValue { value, durability, changed_at } = slot.read(db, key); - if let Some(evicted) = self.lru_list.record_use(&slot) { - evicted.evict(); - } - db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( slot.database_key_index(), durability, @@ -175,7 +117,7 @@ where } fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Durability { - self.slot(key).durability(db) + self.slot_map.read().get(key).map_or(Durability::LOW, |slot| slot.durability(db)) } fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C @@ -187,31 +129,19 @@ where } } -impl<Q, MP> QueryStorageMassOps for DerivedStorage<Q, MP> +impl<Q> QueryStorageMassOps for DerivedStorage<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, { fn purge(&self) { - self.lru_list.purge(); *self.slot_map.write() = Default::default(); } } -impl<Q, MP> LruQueryStorageOps for DerivedStorage<Q, MP> -where - Q: QueryFunction, - MP: MemoizationPolicy<Q>, -{ - fn set_lru_capacity(&self, new_capacity: usize) { - self.lru_list.set_lru_capacity(new_capacity); - } -} - -impl<Q, MP> DerivedQueryStorageOps<Q> for DerivedStorage<Q, MP> +impl<Q> DerivedQueryStorageOps<Q> for DerivedStorage<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Value: Eq, { fn invalidate<S>(&self, runtime: &mut Runtime, key: &S) where diff --git a/src/tools/rust-analyzer/crates/salsa/src/derived/slot.rs b/src/tools/rust-analyzer/crates/salsa/src/derived/slot.rs index cfafa40ce33..de7a3976074 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/derived/slot.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/derived/slot.rs @@ -1,12 +1,8 @@ use crate::debug::TableEntry; -use crate::derived::MemoizationPolicy; use crate::durability::Durability; -use crate::lru::LruIndex; -use crate::lru::LruNode; use crate::plumbing::{DatabaseOps, QueryFunction}; use crate::revision::Revision; use crate::runtime::local_state::ActiveQueryGuard; -use crate::runtime::local_state::QueryInputs; use crate::runtime::local_state::QueryRevisions; use crate::runtime::Runtime; use crate::runtime::RuntimeId; @@ -15,21 +11,18 @@ use crate::runtime::WaitResult; use crate::Cycle; use crate::{Database, DatabaseKeyIndex, Event, EventKind, QueryDb}; use parking_lot::{RawRwLock, RwLock}; -use std::marker::PhantomData; use std::ops::Deref; use std::sync::atomic::{AtomicBool, Ordering}; use tracing::{debug, info}; -pub(super) struct Slot<Q, MP> +pub(super) struct Slot<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, { key_index: u32, + // FIXME: Yeet this group_index: u16, state: RwLock<QueryState<Q>>, - policy: PhantomData<MP>, - lru_index: LruIndex, } /// Defines the "current state" of query's memoized results. @@ -55,7 +48,7 @@ where struct Memo<V> { /// The result of the query, if we decide to memoize it. - value: Option<V>, + value: V, /// Last revision when this memo was verified; this begins /// as the current revision. @@ -78,12 +71,6 @@ enum ProbeState<V, G> { /// verified in this revision. Stale(G), - /// There is an entry, and it has been verified - /// in this revision, but it has no cached - /// value. The `Revision` is the revision where the - /// value last changed (if we were to recompute it). - NoValue(G, Revision), - /// There is an entry which has been verified, /// and it has the following value-- or, we blocked /// on another thread, and that resulted in a cycle. @@ -104,18 +91,16 @@ enum MaybeChangedSinceProbeState<G> { Stale(G), } -impl<Q, MP> Slot<Q, MP> +impl<Q> Slot<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Value: Eq, { pub(super) fn new(database_key_index: DatabaseKeyIndex) -> Self { Self { key_index: database_key_index.key_index, group_index: database_key_index.group_index, state: RwLock::new(QueryState::NotComputed), - lru_index: LruIndex::default(), - policy: PhantomData, } } @@ -147,9 +132,7 @@ where loop { match self.probe(db, self.state.read(), runtime, revision_now) { ProbeState::UpToDate(v) => return v, - ProbeState::Stale(..) | ProbeState::NoValue(..) | ProbeState::NotComputed(..) => { - break - } + ProbeState::Stale(..) | ProbeState::NotComputed(..) => break, ProbeState::Retry => continue, } } @@ -177,9 +160,7 @@ where let mut old_memo = loop { match self.probe(db, self.state.upgradable_read(), runtime, revision_now) { ProbeState::UpToDate(v) => return v, - ProbeState::Stale(state) - | ProbeState::NotComputed(state) - | ProbeState::NoValue(state, _) => { + ProbeState::Stale(state) | ProbeState::NotComputed(state) => { type RwLockUpgradableReadGuard<'a, T> = lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>; @@ -227,7 +208,7 @@ where runtime: &Runtime, revision_now: Revision, active_query: ActiveQueryGuard<'_>, - panic_guard: PanicGuard<'_, Q, MP>, + panic_guard: PanicGuard<'_, Q>, old_memo: Option<Memo<Q::Value>>, key: &Q::Key, ) -> StampedValue<Q::Value> { @@ -286,22 +267,18 @@ where // "backdate" its `changed_at` revision to be the same as the // old value. if let Some(old_memo) = &old_memo { - if let Some(old_value) = &old_memo.value { - // Careful: if the value became less durable than it - // used to be, that is a "breaking change" that our - // consumers must be aware of. Becoming *more* durable - // is not. See the test `constant_to_non_constant`. - if revisions.durability >= old_memo.revisions.durability - && MP::memoized_value_eq(old_value, &value) - { - debug!( - "read_upgrade({:?}): value is equal, back-dating to {:?}", - self, old_memo.revisions.changed_at, - ); - - assert!(old_memo.revisions.changed_at <= revisions.changed_at); - revisions.changed_at = old_memo.revisions.changed_at; - } + // Careful: if the value became less durable than it + // used to be, that is a "breaking change" that our + // consumers must be aware of. Becoming *more* durable + // is not. See the test `constant_to_non_constant`. + if revisions.durability >= old_memo.revisions.durability && old_memo.value == value { + debug!( + "read_upgrade({:?}): value is equal, back-dating to {:?}", + self, old_memo.revisions.changed_at, + ); + + assert!(old_memo.revisions.changed_at <= revisions.changed_at); + revisions.changed_at = old_memo.revisions.changed_at; } } @@ -311,8 +288,7 @@ where changed_at: revisions.changed_at, }; - let memo_value = - if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None }; + let memo_value = new_value.value.clone(); debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); @@ -372,20 +348,16 @@ where return ProbeState::Stale(state); } - if let Some(value) = &memo.value { - let value = StampedValue { - durability: memo.revisions.durability, - changed_at: memo.revisions.changed_at, - value: value.clone(), - }; + let value = &memo.value; + let value = StampedValue { + durability: memo.revisions.durability, + changed_at: memo.revisions.changed_at, + value: value.clone(), + }; - info!("{:?}: returning memoized value changed at {:?}", self, value.changed_at); + info!("{:?}: returning memoized value changed at {:?}", self, value.changed_at); - ProbeState::UpToDate(value) - } else { - let changed_at = memo.revisions.changed_at; - ProbeState::NoValue(state, changed_at) - } + ProbeState::UpToDate(value) } } } @@ -408,21 +380,9 @@ where match &*self.state.read() { QueryState::NotComputed => None, QueryState::InProgress { .. } => Some(TableEntry::new(key.clone(), None)), - QueryState::Memoized(memo) => Some(TableEntry::new(key.clone(), memo.value.clone())), - } - } - - pub(super) fn evict(&self) { - let mut state = self.state.write(); - if let QueryState::Memoized(memo) = &mut *state { - // Evicting a value with an untracked input could - // lead to inconsistencies. Note that we can't check - // `has_untracked_input` when we add the value to the cache, - // because inputs can become untracked in the next revision. - if memo.has_untracked_input() { - return; + QueryState::Memoized(memo) => { + Some(TableEntry::new(key.clone(), Some(memo.value.clone()))) } - memo.value = None; } } @@ -430,7 +390,8 @@ where tracing::debug!("Slot::invalidate(new_revision = {:?})", new_revision); match &mut *self.state.write() { QueryState::Memoized(memo) => { - memo.revisions.inputs = QueryInputs::Untracked; + memo.revisions.untracked = true; + memo.revisions.inputs = None; memo.revisions.changed_at = new_revision; Some(memo.revisions.durability) } @@ -489,8 +450,7 @@ where // If we know when value last changed, we can return right away. // Note that we don't need the actual value to be available. - ProbeState::NoValue(_, changed_at) - | ProbeState::UpToDate(StampedValue { value: _, durability: _, changed_at }) => { + ProbeState::UpToDate(StampedValue { value: _, durability: _, changed_at }) => { MaybeChangedSinceProbeState::ChangedAt(changed_at) } @@ -545,7 +505,7 @@ where let maybe_changed = old_memo.revisions.changed_at > revision; panic_guard.proceed(Some(old_memo)); maybe_changed - } else if old_memo.value.is_some() { + } else { // We found that this memoized value may have changed // but we have an old value. We can re-run the code and // actually *check* if it has changed. @@ -559,12 +519,6 @@ where key, ); changed_at > revision - } else { - // We found that inputs to this memoized value may have chanced - // but we don't have an old value to compare against or re-use. - // No choice but to drop the memo and say that its value may have changed. - panic_guard.proceed(None); - true } } @@ -583,10 +537,6 @@ where mutex_guard, ) } - - fn should_memoize_value(&self, key: &Q::Key) -> bool { - MP::should_memoize_value(key) - } } impl<Q> QueryState<Q> @@ -598,21 +548,21 @@ where } } -struct PanicGuard<'me, Q, MP> +struct PanicGuard<'me, Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Value: Eq, { - slot: &'me Slot<Q, MP>, + slot: &'me Slot<Q>, runtime: &'me Runtime, } -impl<'me, Q, MP> PanicGuard<'me, Q, MP> +impl<'me, Q> PanicGuard<'me, Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Value: Eq, { - fn new(slot: &'me Slot<Q, MP>, runtime: &'me Runtime) -> Self { + fn new(slot: &'me Slot<Q>, runtime: &'me Runtime) -> Self { Self { slot, runtime } } @@ -666,10 +616,10 @@ Please report this bug to https://github.com/salsa-rs/salsa/issues." } } -impl<'me, Q, MP> Drop for PanicGuard<'me, Q, MP> +impl<'me, Q> Drop for PanicGuard<'me, Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Value: Eq, { fn drop(&mut self) { if std::thread::panicking() { @@ -702,15 +652,11 @@ where revision_now: Revision, active_query: &ActiveQueryGuard<'_>, ) -> Option<StampedValue<V>> { - // If we don't have a memoized value, nothing to validate. - if self.value.is_none() { - return None; - } if self.verify_revisions(db, revision_now, active_query) { - self.value.clone().map(|value| StampedValue { + Some(StampedValue { durability: self.revisions.durability, changed_at: self.revisions.changed_at, - value, + value: self.value.clone(), }) } else { None @@ -746,11 +692,8 @@ where match &self.revisions.inputs { // We can't validate values that had untracked inputs; just have to // re-execute. - QueryInputs::Untracked => { - return false; - } - - QueryInputs::NoInputs => {} + None if self.revisions.untracked => return false, + None => {} // Check whether any of our inputs changed since the // **last point where we were verified** (not since we @@ -761,7 +704,7 @@ where // R1. But our *verification* date will be R2, and we // are only interested in finding out whether the // input changed *again*. - QueryInputs::Tracked { inputs } => { + Some(inputs) => { let changed_input = inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); if let Some(input) = changed_input { @@ -791,58 +734,42 @@ where self.verified_at = revision_now; true } - - fn has_untracked_input(&self) -> bool { - matches!(self.revisions.inputs, QueryInputs::Untracked) - } } -impl<Q, MP> std::fmt::Debug for Slot<Q, MP> +impl<Q> std::fmt::Debug for Slot<Q> where Q: QueryFunction, - MP: MemoizationPolicy<Q>, { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(fmt, "{:?}", Q::default()) } } -impl<Q, MP> LruNode for Slot<Q, MP> -where - Q: QueryFunction, - MP: MemoizationPolicy<Q>, -{ - fn lru_index(&self) -> &LruIndex { - &self.lru_index - } -} - -/// Check that `Slot<Q, MP>: Send + Sync` as long as +/// Check that `Slot<Q, >: Send + Sync` as long as /// `DB::DatabaseData: Send + Sync`, which in turn implies that /// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`. #[allow(dead_code)] -fn check_send_sync<Q, MP>() +fn check_send_sync<Q>() where Q: QueryFunction, - MP: MemoizationPolicy<Q>, + Q::Key: Send + Sync, Q::Value: Send + Sync, { fn is_send_sync<T: Send + Sync>() {} - is_send_sync::<Slot<Q, MP>>(); + is_send_sync::<Slot<Q>>(); } -/// Check that `Slot<Q, MP>: 'static` as long as +/// Check that `Slot<Q, >: 'static` as long as /// `DB::DatabaseData: 'static`, which in turn implies that /// `Q::Key: 'static`, `Q::Value: 'static`. #[allow(dead_code)] -fn check_static<Q, MP>() +fn check_static<Q>() where Q: QueryFunction + 'static, - MP: MemoizationPolicy<Q> + 'static, Q::Key: 'static, Q::Value: 'static, { fn is_static<T: 'static>() {} - is_static::<Slot<Q, MP>>(); + is_static::<Slot<Q>>(); } diff --git a/src/tools/rust-analyzer/crates/salsa/src/derived_lru.rs b/src/tools/rust-analyzer/crates/salsa/src/derived_lru.rs new file mode 100644 index 00000000000..bdb448e2412 --- /dev/null +++ b/src/tools/rust-analyzer/crates/salsa/src/derived_lru.rs @@ -0,0 +1,233 @@ +use crate::debug::TableEntry; +use crate::durability::Durability; +use crate::hash::FxIndexMap; +use crate::lru::Lru; +use crate::plumbing::DerivedQueryStorageOps; +use crate::plumbing::LruQueryStorageOps; +use crate::plumbing::QueryFunction; +use crate::plumbing::QueryStorageMassOps; +use crate::plumbing::QueryStorageOps; +use crate::runtime::StampedValue; +use crate::Runtime; +use crate::{Database, DatabaseKeyIndex, QueryDb, Revision}; +use parking_lot::RwLock; +use std::borrow::Borrow; +use std::hash::Hash; +use std::marker::PhantomData; +use triomphe::Arc; + +mod slot; +use slot::Slot; + +/// Memoized queries store the result plus a list of the other queries +/// that they invoked. This means we can avoid recomputing them when +/// none of those inputs have changed. +pub type MemoizedStorage<Q> = DerivedStorage<Q, AlwaysMemoizeValue>; + +/// "Dependency" queries just track their dependencies and not the +/// actual value (which they produce on demand). This lessens the +/// storage requirements. +pub type DependencyStorage<Q> = DerivedStorage<Q, NeverMemoizeValue>; + +/// Handles storage where the value is 'derived' by executing a +/// function (in contrast to "inputs"). +pub struct DerivedStorage<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + group_index: u16, + lru_list: Lru<Slot<Q, MP>>, + slot_map: RwLock<FxIndexMap<Q::Key, Arc<Slot<Q, MP>>>>, + policy: PhantomData<MP>, +} + +impl<Q, MP> std::panic::RefUnwindSafe for DerivedStorage<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, + Q::Key: std::panic::RefUnwindSafe, + Q::Value: std::panic::RefUnwindSafe, +{ +} + +pub trait MemoizationPolicy<Q>: Send + Sync +where + Q: QueryFunction, +{ + fn should_memoize_value(key: &Q::Key) -> bool; + + fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool; +} + +pub enum AlwaysMemoizeValue {} +impl<Q> MemoizationPolicy<Q> for AlwaysMemoizeValue +where + Q: QueryFunction, + Q::Value: Eq, +{ + fn should_memoize_value(_key: &Q::Key) -> bool { + true + } + + fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool { + old_value == new_value + } +} + +pub enum NeverMemoizeValue {} +impl<Q> MemoizationPolicy<Q> for NeverMemoizeValue +where + Q: QueryFunction, +{ + fn should_memoize_value(_key: &Q::Key) -> bool { + false + } + + fn memoized_value_eq(_old_value: &Q::Value, _new_value: &Q::Value) -> bool { + panic!("cannot reach since we never memoize") + } +} + +impl<Q, MP> DerivedStorage<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn slot(&self, key: &Q::Key) -> Arc<Slot<Q, MP>> { + if let Some(v) = self.slot_map.read().get(key) { + return v.clone(); + } + + let mut write = self.slot_map.write(); + let entry = write.entry(key.clone()); + let key_index = entry.index() as u32; + let database_key_index = DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index, + }; + entry.or_insert_with(|| Arc::new(Slot::new(database_key_index))).clone() + } +} + +impl<Q, MP> QueryStorageOps<Q> for DerivedStorage<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = Q::CYCLE_STRATEGY; + + fn new(group_index: u16) -> Self { + DerivedStorage { + group_index, + slot_map: RwLock::new(FxIndexMap::default()), + lru_list: Default::default(), + policy: PhantomData, + } + } + + fn fmt_index( + &self, + _db: &<Q as QueryDb<'_>>::DynDb, + index: u32, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + let slot_map = self.slot_map.read(); + let key = slot_map.get_index(index as usize).unwrap().0; + write!(fmt, "{}::{}({:?})", std::any::type_name::<Q>(), Q::QUERY_NAME, key) + } + + fn maybe_changed_after( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + index: u32, + revision: Revision, + ) -> bool { + debug_assert!(revision < db.salsa_runtime().current_revision()); + let (key, slot) = { + let read = self.slot_map.read(); + let Some((key, slot)) = read.get_index(index as usize) else { + return false; + }; + (key.clone(), slot.clone()) + }; + slot.maybe_changed_after(db, revision, &key) + } + + fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value { + db.unwind_if_cancelled(); + + let slot = self.slot(key); + let StampedValue { value, durability, changed_at } = slot.read(db, key); + + if let Some(evicted) = self.lru_list.record_use(&slot) { + evicted.evict(); + } + + db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( + slot.database_key_index(), + durability, + changed_at, + ); + + value + } + + fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Durability { + self.slot(key).durability(db) + } + + fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C + where + C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>, + { + let slot_map = self.slot_map.read(); + slot_map.iter().filter_map(|(key, slot)| slot.as_table_entry(key)).collect() + } +} + +impl<Q, MP> QueryStorageMassOps for DerivedStorage<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn purge(&self) { + self.lru_list.purge(); + *self.slot_map.write() = Default::default(); + } +} + +impl<Q, MP> LruQueryStorageOps for DerivedStorage<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn set_lru_capacity(&self, new_capacity: u16) { + self.lru_list.set_lru_capacity(new_capacity); + } +} + +impl<Q, MP> DerivedQueryStorageOps<Q> for DerivedStorage<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn invalidate<S>(&self, runtime: &mut Runtime, key: &S) + where + S: Eq + Hash, + Q::Key: Borrow<S>, + { + runtime.with_incremented_revision(|new_revision| { + let map_read = self.slot_map.read(); + + if let Some(slot) = map_read.get(key) { + if let Some(durability) = slot.invalidate(new_revision) { + return Some(durability); + } + } + + None + }) + } +} diff --git a/src/tools/rust-analyzer/crates/salsa/src/derived_lru/slot.rs b/src/tools/rust-analyzer/crates/salsa/src/derived_lru/slot.rs new file mode 100644 index 00000000000..d0e4b5422b5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/salsa/src/derived_lru/slot.rs @@ -0,0 +1,845 @@ +use crate::debug::TableEntry; +use crate::derived_lru::MemoizationPolicy; +use crate::durability::Durability; +use crate::lru::LruIndex; +use crate::lru::LruNode; +use crate::plumbing::{DatabaseOps, QueryFunction}; +use crate::revision::Revision; +use crate::runtime::local_state::ActiveQueryGuard; +use crate::runtime::local_state::QueryRevisions; +use crate::runtime::Runtime; +use crate::runtime::RuntimeId; +use crate::runtime::StampedValue; +use crate::runtime::WaitResult; +use crate::Cycle; +use crate::{Database, DatabaseKeyIndex, Event, EventKind, QueryDb}; +use parking_lot::{RawRwLock, RwLock}; +use std::marker::PhantomData; +use std::ops::Deref; +use std::sync::atomic::{AtomicBool, Ordering}; +use tracing::{debug, info}; + +pub(super) struct Slot<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + key_index: u32, + group_index: u16, + state: RwLock<QueryState<Q>>, + lru_index: LruIndex, + policy: PhantomData<MP>, +} + +/// Defines the "current state" of query's memoized results. +enum QueryState<Q> +where + Q: QueryFunction, +{ + NotComputed, + + /// The runtime with the given id is currently computing the + /// result of this query. + InProgress { + id: RuntimeId, + + /// Set to true if any other queries are blocked, + /// waiting for this query to complete. + anyone_waiting: AtomicBool, + }, + + /// We have computed the query already, and here is the result. + Memoized(Memo<Q::Value>), +} + +struct Memo<V> { + /// The result of the query, if we decide to memoize it. + value: Option<V>, + + /// Last revision when this memo was verified; this begins + /// as the current revision. + pub(crate) verified_at: Revision, + + /// Revision information + revisions: QueryRevisions, +} + +/// Return value of `probe` helper. +enum ProbeState<V, G> { + /// Another thread was active but has completed. + /// Try again! + Retry, + + /// No entry for this key at all. + NotComputed(G), + + /// There is an entry, but its contents have not been + /// verified in this revision. + Stale(G), + + /// There is an entry, and it has been verified + /// in this revision, but it has no cached + /// value. The `Revision` is the revision where the + /// value last changed (if we were to recompute it). + NoValue(G, Revision), + + /// There is an entry which has been verified, + /// and it has the following value-- or, we blocked + /// on another thread, and that resulted in a cycle. + UpToDate(V), +} + +/// Return value of `maybe_changed_after_probe` helper. +enum MaybeChangedSinceProbeState<G> { + /// Another thread was active but has completed. + /// Try again! + Retry, + + /// Value may have changed in the given revision. + ChangedAt(Revision), + + /// There is a stale cache entry that has not been + /// verified in this revision, so we can't say. + Stale(G), +} + +impl<Q, MP> Slot<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + pub(super) fn new(database_key_index: DatabaseKeyIndex) -> Self { + Self { + key_index: database_key_index.key_index, + group_index: database_key_index.group_index, + state: RwLock::new(QueryState::NotComputed), + lru_index: LruIndex::default(), + policy: PhantomData, + } + } + + pub(super) fn database_key_index(&self) -> DatabaseKeyIndex { + DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: self.key_index, + } + } + + pub(super) fn read( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + key: &Q::Key, + ) -> StampedValue<Q::Value> { + let runtime = db.salsa_runtime(); + + // NB: We don't need to worry about people modifying the + // revision out from under our feet. Either `db` is a frozen + // database, in which case there is a lock, or the mutator + // thread is the current thread, and it will be prevented from + // doing any `set` invocations while the query function runs. + let revision_now = runtime.current_revision(); + + info!("{:?}: invoked at {:?}", self, revision_now,); + + // First, do a check with a read-lock. + loop { + match self.probe(db, self.state.read(), runtime, revision_now) { + ProbeState::UpToDate(v) => return v, + ProbeState::Stale(..) | ProbeState::NoValue(..) | ProbeState::NotComputed(..) => { + break + } + ProbeState::Retry => continue, + } + } + + self.read_upgrade(db, key, revision_now) + } + + /// Second phase of a read operation: acquires an upgradable-read + /// and -- if needed -- validates whether inputs have changed, + /// recomputes value, etc. This is invoked after our initial probe + /// shows a potentially out of date value. + fn read_upgrade( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + key: &Q::Key, + revision_now: Revision, + ) -> StampedValue<Q::Value> { + let runtime = db.salsa_runtime(); + + debug!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,); + + // Check with an upgradable read to see if there is a value + // already. (This permits other readers but prevents anyone + // else from running `read_upgrade` at the same time.) + let mut old_memo = loop { + match self.probe(db, self.state.upgradable_read(), runtime, revision_now) { + ProbeState::UpToDate(v) => return v, + ProbeState::Stale(state) + | ProbeState::NotComputed(state) + | ProbeState::NoValue(state, _) => { + type RwLockUpgradableReadGuard<'a, T> = + lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>; + + let mut state = RwLockUpgradableReadGuard::upgrade(state); + match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) { + QueryState::Memoized(old_memo) => break Some(old_memo), + QueryState::InProgress { .. } => unreachable!(), + QueryState::NotComputed => break None, + } + } + ProbeState::Retry => continue, + } + }; + + let panic_guard = PanicGuard::new(self, runtime); + let active_query = runtime.push_query(self.database_key_index()); + + // If we have an old-value, it *may* now be stale, since there + // has been a new revision since the last time we checked. So, + // first things first, let's walk over each of our previous + // inputs and check whether they are out of date. + if let Some(memo) = &mut old_memo { + if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) { + info!("{:?}: validated old memoized value", self,); + + db.salsa_event(Event { + runtime_id: runtime.id(), + kind: EventKind::DidValidateMemoizedValue { + database_key: self.database_key_index(), + }, + }); + + panic_guard.proceed(old_memo); + + return value; + } + } + + self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo, key) + } + + fn execute( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + runtime: &Runtime, + revision_now: Revision, + active_query: ActiveQueryGuard<'_>, + panic_guard: PanicGuard<'_, Q, MP>, + old_memo: Option<Memo<Q::Value>>, + key: &Q::Key, + ) -> StampedValue<Q::Value> { + tracing::info!("{:?}: executing query", self.database_key_index().debug(db)); + + db.salsa_event(Event { + runtime_id: db.salsa_runtime().id(), + kind: EventKind::WillExecute { database_key: self.database_key_index() }, + }); + + // Query was not previously executed, or value is potentially + // stale, or value is absent. Let's execute! + let value = match Cycle::catch(|| Q::execute(db, key.clone())) { + Ok(v) => v, + Err(cycle) => { + tracing::debug!( + "{:?}: caught cycle {:?}, have strategy {:?}", + self.database_key_index().debug(db), + cycle, + Q::CYCLE_STRATEGY, + ); + match Q::CYCLE_STRATEGY { + crate::plumbing::CycleRecoveryStrategy::Panic => { + panic_guard.proceed(None); + cycle.throw() + } + crate::plumbing::CycleRecoveryStrategy::Fallback => { + if let Some(c) = active_query.take_cycle() { + assert!(c.is(&cycle)); + Q::cycle_fallback(db, &cycle, key) + } else { + // we are not a participant in this cycle + debug_assert!(!cycle + .participant_keys() + .any(|k| k == self.database_key_index())); + cycle.throw() + } + } + } + } + }; + + let mut revisions = active_query.pop(); + + // We assume that query is side-effect free -- that is, does + // not mutate the "inputs" to the query system. Sanity check + // that assumption here, at least to the best of our ability. + assert_eq!( + runtime.current_revision(), + revision_now, + "revision altered during query execution", + ); + + // If the new value is equal to the old one, then it didn't + // really change, even if some of its inputs have. So we can + // "backdate" its `changed_at` revision to be the same as the + // old value. + if let Some(old_memo) = &old_memo { + if let Some(old_value) = &old_memo.value { + // Careful: if the value became less durable than it + // used to be, that is a "breaking change" that our + // consumers must be aware of. Becoming *more* durable + // is not. See the test `constant_to_non_constant`. + if revisions.durability >= old_memo.revisions.durability + && MP::memoized_value_eq(old_value, &value) + { + debug!( + "read_upgrade({:?}): value is equal, back-dating to {:?}", + self, old_memo.revisions.changed_at, + ); + + assert!(old_memo.revisions.changed_at <= revisions.changed_at); + revisions.changed_at = old_memo.revisions.changed_at; + } + } + } + + let new_value = StampedValue { + value, + durability: revisions.durability, + changed_at: revisions.changed_at, + }; + + let memo_value = + if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None }; + + debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); + + panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions })); + + new_value + } + + /// Helper for `read` that does a shallow check (not recursive) if we have an up-to-date value. + /// + /// Invoked with the guard `state` corresponding to the `QueryState` of some `Slot` (the guard + /// can be either read or write). Returns a suitable `ProbeState`: + /// + /// - `ProbeState::UpToDate(r)` if the table has an up-to-date value (or we blocked on another + /// thread that produced such a value). + /// - `ProbeState::StaleOrAbsent(g)` if either (a) there is no memo for this key, (b) the memo + /// has no value; or (c) the memo has not been verified at the current revision. + /// + /// Note that in case `ProbeState::UpToDate`, the lock will have been released. + fn probe<StateGuard>( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + state: StateGuard, + runtime: &Runtime, + revision_now: Revision, + ) -> ProbeState<StampedValue<Q::Value>, StateGuard> + where + StateGuard: Deref<Target = QueryState<Q>>, + { + match &*state { + QueryState::NotComputed => ProbeState::NotComputed(state), + + QueryState::InProgress { id, anyone_waiting } => { + let other_id = *id; + + // NB: `Ordering::Relaxed` is sufficient here, + // as there are no loads that are "gated" on this + // value. Everything that is written is also protected + // by a lock that must be acquired. The role of this + // boolean is to decide *whether* to acquire the lock, + // not to gate future atomic reads. + anyone_waiting.store(true, Ordering::Relaxed); + + self.block_on_or_unwind(db, runtime, other_id, state); + + // Other thread completely normally, so our value may be available now. + ProbeState::Retry + } + + QueryState::Memoized(memo) => { + debug!( + "{:?}: found memoized value, verified_at={:?}, changed_at={:?}", + self, memo.verified_at, memo.revisions.changed_at, + ); + + if memo.verified_at < revision_now { + return ProbeState::Stale(state); + } + + if let Some(value) = &memo.value { + let value = StampedValue { + durability: memo.revisions.durability, + changed_at: memo.revisions.changed_at, + value: value.clone(), + }; + + info!("{:?}: returning memoized value changed at {:?}", self, value.changed_at); + + ProbeState::UpToDate(value) + } else { + let changed_at = memo.revisions.changed_at; + ProbeState::NoValue(state, changed_at) + } + } + } + } + + pub(super) fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb) -> Durability { + match &*self.state.read() { + QueryState::NotComputed => Durability::LOW, + QueryState::InProgress { .. } => panic!("query in progress"), + QueryState::Memoized(memo) => { + if memo.check_durability(db.salsa_runtime()) { + memo.revisions.durability + } else { + Durability::LOW + } + } + } + } + + pub(super) fn as_table_entry(&self, key: &Q::Key) -> Option<TableEntry<Q::Key, Q::Value>> { + match &*self.state.read() { + QueryState::NotComputed => None, + QueryState::InProgress { .. } => Some(TableEntry::new(key.clone(), None)), + QueryState::Memoized(memo) => Some(TableEntry::new(key.clone(), memo.value.clone())), + } + } + + pub(super) fn evict(&self) { + let mut state = self.state.write(); + if let QueryState::Memoized(memo) = &mut *state { + // Evicting a value with an untracked input could + // lead to inconsistencies. Note that we can't check + // `has_untracked_input` when we add the value to the cache, + // because inputs can become untracked in the next revision. + if memo.has_untracked_input() { + return; + } + memo.value = None; + } + } + + pub(super) fn invalidate(&self, new_revision: Revision) -> Option<Durability> { + tracing::debug!("Slot::invalidate(new_revision = {:?})", new_revision); + match &mut *self.state.write() { + QueryState::Memoized(memo) => { + memo.revisions.untracked = true; + memo.revisions.inputs = None; + memo.revisions.changed_at = new_revision; + Some(memo.revisions.durability) + } + QueryState::NotComputed => None, + QueryState::InProgress { .. } => unreachable!(), + } + } + + pub(super) fn maybe_changed_after( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + revision: Revision, + key: &Q::Key, + ) -> bool { + let runtime = db.salsa_runtime(); + let revision_now = runtime.current_revision(); + + db.unwind_if_cancelled(); + + debug!( + "maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}", + self, revision, revision_now, + ); + + // Do an initial probe with just the read-lock. + // + // If we find that a cache entry for the value is present + // but hasn't been verified in this revision, we'll have to + // do more. + loop { + match self.maybe_changed_after_probe(db, self.state.read(), runtime, revision_now) { + MaybeChangedSinceProbeState::Retry => continue, + MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision, + MaybeChangedSinceProbeState::Stale(state) => { + drop(state); + return self.maybe_changed_after_upgrade(db, revision, key); + } + } + } + } + + fn maybe_changed_after_probe<StateGuard>( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + state: StateGuard, + runtime: &Runtime, + revision_now: Revision, + ) -> MaybeChangedSinceProbeState<StateGuard> + where + StateGuard: Deref<Target = QueryState<Q>>, + { + match self.probe(db, state, runtime, revision_now) { + ProbeState::Retry => MaybeChangedSinceProbeState::Retry, + + ProbeState::Stale(state) => MaybeChangedSinceProbeState::Stale(state), + + // If we know when value last changed, we can return right away. + // Note that we don't need the actual value to be available. + ProbeState::NoValue(_, changed_at) + | ProbeState::UpToDate(StampedValue { value: _, durability: _, changed_at }) => { + MaybeChangedSinceProbeState::ChangedAt(changed_at) + } + + // If we have nothing cached, then value may have changed. + ProbeState::NotComputed(_) => MaybeChangedSinceProbeState::ChangedAt(revision_now), + } + } + + fn maybe_changed_after_upgrade( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + revision: Revision, + key: &Q::Key, + ) -> bool { + let runtime = db.salsa_runtime(); + let revision_now = runtime.current_revision(); + + // Get an upgradable read lock, which permits other reads but no writers. + // Probe again. If the value is stale (needs to be verified), then upgrade + // to a write lock and swap it with InProgress while we work. + let mut old_memo = match self.maybe_changed_after_probe( + db, + self.state.upgradable_read(), + runtime, + revision_now, + ) { + MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision, + + // If another thread was active, then the cache line is going to be + // either verified or cleared out. Just recurse to figure out which. + // Note that we don't need an upgradable read. + MaybeChangedSinceProbeState::Retry => { + return self.maybe_changed_after(db, revision, key) + } + + MaybeChangedSinceProbeState::Stale(state) => { + type RwLockUpgradableReadGuard<'a, T> = + lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>; + + let mut state = RwLockUpgradableReadGuard::upgrade(state); + match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) { + QueryState::Memoized(old_memo) => old_memo, + QueryState::NotComputed | QueryState::InProgress { .. } => unreachable!(), + } + } + }; + + let panic_guard = PanicGuard::new(self, runtime); + let active_query = runtime.push_query(self.database_key_index()); + + if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) { + let maybe_changed = old_memo.revisions.changed_at > revision; + panic_guard.proceed(Some(old_memo)); + maybe_changed + } else if old_memo.value.is_some() { + // We found that this memoized value may have changed + // but we have an old value. We can re-run the code and + // actually *check* if it has changed. + let StampedValue { changed_at, .. } = self.execute( + db, + runtime, + revision_now, + active_query, + panic_guard, + Some(old_memo), + key, + ); + changed_at > revision + } else { + // We found that inputs to this memoized value may have chanced + // but we don't have an old value to compare against or re-use. + // No choice but to drop the memo and say that its value may have changed. + panic_guard.proceed(None); + true + } + } + + /// Helper: see [`Runtime::try_block_on_or_unwind`]. + fn block_on_or_unwind<MutexGuard>( + &self, + db: &<Q as QueryDb<'_>>::DynDb, + runtime: &Runtime, + other_id: RuntimeId, + mutex_guard: MutexGuard, + ) { + runtime.block_on_or_unwind( + db.ops_database(), + self.database_key_index(), + other_id, + mutex_guard, + ) + } + + fn should_memoize_value(&self, key: &Q::Key) -> bool { + MP::should_memoize_value(key) + } +} + +impl<Q> QueryState<Q> +where + Q: QueryFunction, +{ + fn in_progress(id: RuntimeId) -> Self { + QueryState::InProgress { id, anyone_waiting: Default::default() } + } +} + +struct PanicGuard<'me, Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + slot: &'me Slot<Q, MP>, + runtime: &'me Runtime, +} + +impl<'me, Q, MP> PanicGuard<'me, Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn new(slot: &'me Slot<Q, MP>, runtime: &'me Runtime) -> Self { + Self { slot, runtime } + } + + /// Indicates that we have concluded normally (without panicking). + /// If `opt_memo` is some, then this memo is installed as the new + /// memoized value. If `opt_memo` is `None`, then the slot is cleared + /// and has no value. + fn proceed(mut self, opt_memo: Option<Memo<Q::Value>>) { + self.overwrite_placeholder(WaitResult::Completed, opt_memo); + std::mem::forget(self) + } + + /// Overwrites the `InProgress` placeholder for `key` that we + /// inserted; if others were blocked, waiting for us to finish, + /// then notify them. + fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option<Memo<Q::Value>>) { + let old_value = { + let mut write = self.slot.state.write(); + match opt_memo { + // Replace the `InProgress` marker that we installed with the new + // memo, thus releasing our unique access to this key. + Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)), + + // We had installed an `InProgress` marker, but we panicked before + // it could be removed. At this point, we therefore "own" unique + // access to our slot, so we can just remove the key. + None => std::mem::replace(&mut *write, QueryState::NotComputed), + } + }; + + match old_value { + QueryState::InProgress { id, anyone_waiting } => { + assert_eq!(id, self.runtime.id()); + + // NB: As noted on the `store`, `Ordering::Relaxed` is + // sufficient here. This boolean signals us on whether to + // acquire a mutex; the mutex will guarantee that all writes + // we are interested in are visible. + if anyone_waiting.load(Ordering::Relaxed) { + self.runtime + .unblock_queries_blocked_on(self.slot.database_key_index(), wait_result); + } + } + _ => panic!( + "\ +Unexpected panic during query evaluation, aborting the process. + +Please report this bug to https://github.com/salsa-rs/salsa/issues." + ), + } + } +} + +impl<'me, Q, MP> Drop for PanicGuard<'me, Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn drop(&mut self) { + if std::thread::panicking() { + // We panicked before we could proceed and need to remove `key`. + self.overwrite_placeholder(WaitResult::Panicked, None) + } else { + // If no panic occurred, then panic guard ought to be + // "forgotten" and so this Drop code should never run. + panic!(".forget() was not called") + } + } +} + +impl<V> Memo<V> +where + V: Clone, +{ + /// Determines whether the value stored in this memo (if any) is still + /// valid in the current revision. If so, returns a stamped value. + /// + /// If needed, this will walk each dependency and + /// recursively invoke `maybe_changed_after`, which may in turn + /// re-execute the dependency. This can cause cycles to occur, + /// so the current query must be pushed onto the + /// stack to permit cycle detection and recovery: therefore, + /// takes the `active_query` argument as evidence. + fn verify_value( + &mut self, + db: &dyn Database, + revision_now: Revision, + active_query: &ActiveQueryGuard<'_>, + ) -> Option<StampedValue<V>> { + // If we don't have a memoized value, nothing to validate. + if self.value.is_none() { + return None; + } + if self.verify_revisions(db, revision_now, active_query) { + self.value.clone().map(|value| StampedValue { + durability: self.revisions.durability, + changed_at: self.revisions.changed_at, + value, + }) + } else { + None + } + } + + /// Determines whether the value represented by this memo is still + /// valid in the current revision; note that the value itself is + /// not needed for this check. If needed, this will walk each + /// dependency and recursively invoke `maybe_changed_after`, which + /// may in turn re-execute the dependency. This can cause cycles to occur, + /// so the current query must be pushed onto the + /// stack to permit cycle detection and recovery: therefore, + /// takes the `active_query` argument as evidence. + fn verify_revisions( + &mut self, + db: &dyn Database, + revision_now: Revision, + _active_query: &ActiveQueryGuard<'_>, + ) -> bool { + assert!(self.verified_at != revision_now); + let verified_at = self.verified_at; + + debug!( + "verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}", + verified_at, revision_now, self.revisions.inputs + ); + + if self.check_durability(db.salsa_runtime()) { + return self.mark_value_as_verified(revision_now); + } + + match &self.revisions.inputs { + // We can't validate values that had untracked inputs; just have to + // re-execute. + None if self.revisions.untracked => return false, + None => {} + + // Check whether any of our inputs changed since the + // **last point where we were verified** (not since we + // last changed). This is important: if we have + // memoized values, then an input may have changed in + // revision R2, but we found that *our* value was the + // same regardless, so our change date is still + // R1. But our *verification* date will be R2, and we + // are only interested in finding out whether the + // input changed *again*. + Some(inputs) => { + let changed_input = + inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); + if let Some(input) = changed_input { + debug!("validate_memoized_value: `{:?}` may have changed", input); + + return false; + } + } + }; + + self.mark_value_as_verified(revision_now) + } + + /// True if this memo is known not to have changed based on its durability. + fn check_durability(&self, runtime: &Runtime) -> bool { + let last_changed = runtime.last_changed_revision(self.revisions.durability); + debug!( + "check_durability(last_changed={:?} <= verified_at={:?}) = {:?}", + last_changed, + self.verified_at, + last_changed <= self.verified_at, + ); + last_changed <= self.verified_at + } + + fn mark_value_as_verified(&mut self, revision_now: Revision) -> bool { + self.verified_at = revision_now; + true + } + + fn has_untracked_input(&self) -> bool { + self.revisions.untracked + } +} + +impl<Q, MP> std::fmt::Debug for Slot<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{:?}", Q::default()) + } +} + +impl<Q, MP> LruNode for Slot<Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, +{ + fn lru_index(&self) -> &LruIndex { + &self.lru_index + } +} + +/// Check that `Slot<Q, MP>: Send + Sync` as long as +/// `DB::DatabaseData: Send + Sync`, which in turn implies that +/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`. +#[allow(dead_code)] +fn check_send_sync<Q, MP>() +where + Q: QueryFunction, + MP: MemoizationPolicy<Q>, + Q::Key: Send + Sync, + Q::Value: Send + Sync, +{ + fn is_send_sync<T: Send + Sync>() {} + is_send_sync::<Slot<Q, MP>>(); +} + +/// Check that `Slot<Q, MP>: 'static` as long as +/// `DB::DatabaseData: 'static`, which in turn implies that +/// `Q::Key: 'static`, `Q::Value: 'static`. +#[allow(dead_code)] +fn check_static<Q, MP>() +where + Q: QueryFunction + 'static, + MP: MemoizationPolicy<Q> + 'static, + Q::Key: 'static, + Q::Value: 'static, +{ + fn is_static<T: 'static>() {} + is_static::<Slot<Q, MP>>(); +} diff --git a/src/tools/rust-analyzer/crates/salsa/src/intern_id.rs b/src/tools/rust-analyzer/crates/salsa/src/intern_id.rs index b060d8aab68..8e74c100aca 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/intern_id.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/intern_id.rs @@ -63,7 +63,8 @@ impl InternId { /// `value` must be less than `MAX` pub const unsafe fn new_unchecked(value: u32) -> Self { debug_assert!(value < InternId::MAX); - InternId { value: NonZeroU32::new_unchecked(value + 1) } + let value = unsafe { NonZeroU32::new_unchecked(value + 1) }; + InternId { value } } /// Convert this raw-id into a u32 value. diff --git a/src/tools/rust-analyzer/crates/salsa/src/interned.rs b/src/tools/rust-analyzer/crates/salsa/src/interned.rs index eef8bcc814f..359662ec6b2 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/interned.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/interned.rs @@ -121,8 +121,8 @@ impl InternValueTrivial for String {} #[derive(Debug)] struct Slot<V> { - /// DatabaseKeyIndex for this slot. - database_key_index: DatabaseKeyIndex, + /// key index for this slot. + key_index: u32, /// Value that was interned. value: V, @@ -199,13 +199,8 @@ where }; let create_slot = |index: InternId| { - let database_key_index = DatabaseKeyIndex { - group_index: self.group_index, - query_index: Q::QUERY_INDEX, - key_index: index.as_u32(), - }; Arc::new(Slot { - database_key_index, + key_index: index.as_u32(), value: insert(Q::Value::from_intern_id(index)), interned_at: revision_now, }) @@ -242,7 +237,11 @@ where }; let changed_at = slot.interned_at; db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( - slot.database_key_index, + DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: slot.key_index, + }, INTERN_DURABILITY, changed_at, ); @@ -294,7 +293,11 @@ where }; let changed_at = slot.interned_at; db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( - slot.database_key_index, + DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: slot.key_index, + }, INTERN_DURABILITY, changed_at, ); @@ -414,7 +417,11 @@ where let value = slot.value.clone(); let interned_at = slot.interned_at; db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( - slot.database_key_index, + DatabaseKeyIndex { + group_index: interned_storage.group_index, + query_index: Q::QUERY_INDEX, + key_index: slot.key_index, + }, INTERN_DURABILITY, interned_at, ); diff --git a/src/tools/rust-analyzer/crates/salsa/src/lib.rs b/src/tools/rust-analyzer/crates/salsa/src/lib.rs index e11e6e2e19f..48d6dc2e387 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/lib.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/lib.rs @@ -10,6 +10,7 @@ //! from previous invocations as appropriate. mod derived; +mod derived_lru; mod durability; mod hash; mod input; @@ -577,7 +578,7 @@ where /// cost of potential extra recalculations of evicted values. /// /// If `cap` is zero, all values are preserved, this is the default. - pub fn set_lru_capacity(&self, cap: usize) + pub fn set_lru_capacity(&self, cap: u16) where Q::Storage: plumbing::LruQueryStorageOps, { diff --git a/src/tools/rust-analyzer/crates/salsa/src/lru.rs b/src/tools/rust-analyzer/crates/salsa/src/lru.rs index f63f4c1e986..a6f96beeab1 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/lru.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/lru.rs @@ -1,7 +1,7 @@ use oorandom::Rand64; use parking_lot::Mutex; use std::fmt::Debug; -use std::sync::atomic::AtomicUsize; +use std::sync::atomic::AtomicU16; use std::sync::atomic::Ordering; use triomphe::Arc; @@ -20,15 +20,15 @@ pub(crate) struct Lru<Node> where Node: LruNode, { - green_zone: AtomicUsize, + green_zone: AtomicU16, data: Mutex<LruData<Node>>, } #[derive(Debug)] struct LruData<Node> { - end_red_zone: usize, - end_yellow_zone: usize, - end_green_zone: usize, + end_red_zone: u16, + end_yellow_zone: u16, + end_green_zone: u16, rng: Rand64, entries: Vec<Arc<Node>>, } @@ -39,9 +39,9 @@ pub(crate) trait LruNode: Sized + Debug { #[derive(Debug)] pub(crate) struct LruIndex { - /// Index in the appropriate LRU list, or std::usize::MAX if not a + /// Index in the appropriate LRU list, or std::u16::MAX if not a /// member. - index: AtomicUsize, + index: AtomicU16, } impl<Node> Default for Lru<Node> @@ -68,12 +68,12 @@ where #[cfg_attr(not(test), allow(dead_code))] fn with_seed(seed: &str) -> Self { - Lru { green_zone: AtomicUsize::new(0), data: Mutex::new(LruData::with_seed(seed)) } + Lru { green_zone: AtomicU16::new(0), data: Mutex::new(LruData::with_seed(seed)) } } /// Adjust the total number of nodes permitted to have a value at /// once. If `len` is zero, this disables LRU caching completely. - pub(crate) fn set_lru_capacity(&self, len: usize) { + pub(crate) fn set_lru_capacity(&self, len: u16) { let mut data = self.data.lock(); // We require each zone to have at least 1 slot. Therefore, @@ -143,23 +143,24 @@ where LruData { end_yellow_zone: 0, end_green_zone: 0, end_red_zone: 0, entries: Vec::new(), rng } } - fn green_zone(&self) -> std::ops::Range<usize> { + fn green_zone(&self) -> std::ops::Range<u16> { 0..self.end_green_zone } - fn yellow_zone(&self) -> std::ops::Range<usize> { + fn yellow_zone(&self) -> std::ops::Range<u16> { self.end_green_zone..self.end_yellow_zone } - fn red_zone(&self) -> std::ops::Range<usize> { + fn red_zone(&self) -> std::ops::Range<u16> { self.end_yellow_zone..self.end_red_zone } - fn resize(&mut self, len_green_zone: usize, len_yellow_zone: usize, len_red_zone: usize) { + fn resize(&mut self, len_green_zone: u16, len_yellow_zone: u16, len_red_zone: u16) { self.end_green_zone = len_green_zone; self.end_yellow_zone = self.end_green_zone + len_yellow_zone; self.end_red_zone = self.end_yellow_zone + len_red_zone; - let entries = std::mem::replace(&mut self.entries, Vec::with_capacity(self.end_red_zone)); + let entries = + std::mem::replace(&mut self.entries, Vec::with_capacity(self.end_red_zone as usize)); tracing::debug!("green_zone = {:?}", self.green_zone()); tracing::debug!("yellow_zone = {:?}", self.yellow_zone()); @@ -207,7 +208,7 @@ where // Easy case: we still have capacity. Push it, and then promote // it up to the appropriate zone. - let len = self.entries.len(); + let len = self.entries.len() as u16; if len < self.end_red_zone { self.entries.push(node.clone()); node.lru_index().store(len); @@ -218,7 +219,7 @@ where // Harder case: no capacity. Create some by evicting somebody from red // zone and then promoting. let victim_index = self.pick_index(self.red_zone()); - let victim_node = std::mem::replace(&mut self.entries[victim_index], node.clone()); + let victim_node = std::mem::replace(&mut self.entries[victim_index as usize], node.clone()); tracing::debug!("evicting red node {:?} from {}", victim_node, victim_index); victim_node.lru_index().clear(); self.promote_red_to_green(node, victim_index); @@ -231,7 +232,7 @@ where /// /// NB: It is not required that `node.lru_index()` is up-to-date /// when entering this method. - fn promote_red_to_green(&mut self, node: &Arc<Node>, red_index: usize) { + fn promote_red_to_green(&mut self, node: &Arc<Node>, red_index: u16) { debug_assert!(self.red_zone().contains(&red_index)); // Pick a yellow at random and switch places with it. @@ -242,12 +243,12 @@ where let yellow_index = self.pick_index(self.yellow_zone()); tracing::debug!( "demoting yellow node {:?} from {} to red at {}", - self.entries[yellow_index], + self.entries[yellow_index as usize], yellow_index, red_index, ); - self.entries.swap(yellow_index, red_index); - self.entries[red_index].lru_index().store(red_index); + self.entries.swap(yellow_index as usize, red_index as usize); + self.entries[red_index as usize].lru_index().store(red_index); // Now move ourselves up into the green zone. self.promote_yellow_to_green(node, yellow_index); @@ -259,51 +260,51 @@ where /// /// NB: It is not required that `node.lru_index()` is up-to-date /// when entering this method. - fn promote_yellow_to_green(&mut self, node: &Arc<Node>, yellow_index: usize) { + fn promote_yellow_to_green(&mut self, node: &Arc<Node>, yellow_index: u16) { debug_assert!(self.yellow_zone().contains(&yellow_index)); // Pick a yellow at random and switch places with it. let green_index = self.pick_index(self.green_zone()); tracing::debug!( "demoting green node {:?} from {} to yellow at {}", - self.entries[green_index], + self.entries[green_index as usize], green_index, yellow_index ); - self.entries.swap(green_index, yellow_index); - self.entries[yellow_index].lru_index().store(yellow_index); + self.entries.swap(green_index as usize, yellow_index as usize); + self.entries[yellow_index as usize].lru_index().store(yellow_index); node.lru_index().store(green_index); tracing::debug!("promoted {:?} to green index {}", node, green_index); } - fn pick_index(&mut self, zone: std::ops::Range<usize>) -> usize { - let end_index = std::cmp::min(zone.end, self.entries.len()); - self.rng.rand_range(zone.start as u64..end_index as u64) as usize + fn pick_index(&mut self, zone: std::ops::Range<u16>) -> u16 { + let end_index = std::cmp::min(zone.end, self.entries.len() as u16); + self.rng.rand_range(zone.start as u64..end_index as u64) as u16 } } impl Default for LruIndex { fn default() -> Self { - Self { index: AtomicUsize::new(usize::MAX) } + Self { index: AtomicU16::new(u16::MAX) } } } impl LruIndex { - fn load(&self) -> usize { + fn load(&self) -> u16 { self.index.load(Ordering::Acquire) // see note on ordering below } - fn store(&self, value: usize) { + fn store(&self, value: u16) { self.index.store(value, Ordering::Release) // see note on ordering below } fn clear(&self) { - self.store(usize::MAX); + self.store(u16::MAX); } fn is_in_lru(&self) -> bool { - self.load() != usize::MAX + self.load() != u16::MAX } } diff --git a/src/tools/rust-analyzer/crates/salsa/src/plumbing.rs b/src/tools/rust-analyzer/crates/salsa/src/plumbing.rs index 1dfde639869..e96b9daa979 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/plumbing.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/plumbing.rs @@ -12,8 +12,9 @@ use std::fmt::Debug; use std::hash::Hash; use triomphe::Arc; -pub use crate::derived::DependencyStorage; pub use crate::derived::MemoizedStorage; +pub use crate::derived_lru::DependencyStorage as LruDependencyStorage; +pub use crate::derived_lru::MemoizedStorage as LruMemoizedStorage; pub use crate::input::{InputStorage, UnitInputStorage}; pub use crate::interned::InternedStorage; pub use crate::interned::LookupInternedStorage; @@ -228,7 +229,7 @@ where /// that is, storage whose value is not derived from other storage but /// is set independently. pub trait LruQueryStorageOps { - fn set_lru_capacity(&self, new_capacity: usize); + fn set_lru_capacity(&self, new_capacity: u16); } pub trait DerivedQueryStorageOps<Q> diff --git a/src/tools/rust-analyzer/crates/salsa/src/runtime.rs b/src/tools/rust-analyzer/crates/salsa/src/runtime.rs index 4f3341f5150..5fe5f4b46d3 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/runtime.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/runtime.rs @@ -18,7 +18,7 @@ use dependency_graph::DependencyGraph; pub(crate) mod local_state; use local_state::LocalState; -use self::local_state::{ActiveQueryGuard, QueryInputs, QueryRevisions}; +use self::local_state::{ActiveQueryGuard, QueryRevisions}; /// The salsa runtime stores the storage for all queries as well as /// tracking the query stack and dependencies between cycles. @@ -558,21 +558,25 @@ impl ActiveQuery { } pub(crate) fn revisions(&self) -> QueryRevisions { - let inputs = match &self.dependencies { - None => QueryInputs::Untracked, + let (inputs, untracked) = match &self.dependencies { + None => (None, true), - Some(dependencies) => { + Some(dependencies) => ( if dependencies.is_empty() { - QueryInputs::NoInputs + None } else { - QueryInputs::Tracked { - inputs: ThinArc::from_header_and_iter((), dependencies.iter().copied()), - } - } - } + Some(ThinArc::from_header_and_iter((), dependencies.iter().copied())) + }, + false, + ), }; - QueryRevisions { changed_at: self.changed_at, inputs, durability: self.durability } + QueryRevisions { + changed_at: self.changed_at, + inputs, + untracked, + durability: self.durability, + } } /// Adds any dependencies from `other` into `self`. diff --git a/src/tools/rust-analyzer/crates/salsa/src/runtime/local_state.rs b/src/tools/rust-analyzer/crates/salsa/src/runtime/local_state.rs index 0dbea1d563e..73869671886 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/runtime/local_state.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/runtime/local_state.rs @@ -34,21 +34,13 @@ pub(crate) struct QueryRevisions { /// Minimum durability of the inputs to this query. pub(crate) durability: Durability, - /// The inputs that went into our query, if we are tracking them. - pub(crate) inputs: QueryInputs, -} - -/// Every input. -#[derive(Debug, Clone)] -pub(crate) enum QueryInputs { - /// Non-empty set of inputs, fully known - Tracked { inputs: ThinArc<(), DatabaseKeyIndex> }, - - /// Empty set of inputs, fully known. - NoInputs, + /// Whether the input is untracked. + /// Invariant: if `untracked`, `inputs` is `None`. + /// Why is this encoded like this and not a proper enum? Struct size, this saves us 8 bytes. + pub(crate) untracked: bool, - /// Unknown quantity of inputs - Untracked, + /// The inputs that went into our query, if we are tracking them. + pub(crate) inputs: Option<ThinArc<(), DatabaseKeyIndex>>, } impl Default for LocalState { diff --git a/src/tools/rust-analyzer/crates/salsa/tests/lru.rs b/src/tools/rust-analyzer/crates/salsa/tests/lru.rs index 3da8519b081..ef98a2c32b4 100644 --- a/src/tools/rust-analyzer/crates/salsa/tests/lru.rs +++ b/src/tools/rust-analyzer/crates/salsa/tests/lru.rs @@ -24,7 +24,9 @@ impl Drop for HotPotato { #[salsa::query_group(QueryGroupStorage)] trait QueryGroup: salsa::Database { + #[salsa::lru] fn get(&self, x: u32) -> Arc<HotPotato>; + #[salsa::lru] fn get_volatile(&self, x: u32) -> usize; } diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs index 332745aae6e..b61baa22446 100644 --- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs +++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs @@ -80,13 +80,11 @@ macro_rules! register_ast_id_node { } register_ast_id_node! { impl AstIdNode for - Item, + Item, AnyHasGenericParams, Adt, Enum, Variant, Struct, - RecordField, - TupleField, Union, AssocItem, Const, @@ -104,7 +102,7 @@ register_ast_id_node! { Trait, TraitAlias, Use, - BlockExpr, ConstArg, Param, SelfParam + BlockExpr, ConstArg } /// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back. diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs index bbaf1b2a6d5..b4e21d64f81 100644 --- a/src/tools/rust-analyzer/crates/span/src/lib.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -17,18 +17,6 @@ pub use syntax::Edition; pub use text_size::{TextRange, TextSize}; pub use vfs::FileId; -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct FilePosition { - pub file_id: FileId, - pub offset: TextSize, -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct FileRange { - pub file_id: FileId, - pub range: TextRange, -} - // The first index is always the root node's AstId /// The root ast id always points to the encompassing file, using this in spans is discouraged as /// any range relative to it will be effectively absolute, ruining the entire point of anchored @@ -45,6 +33,16 @@ pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId = pub type Span = SpanData<SyntaxContextId>; +impl Span { + pub fn cover(self, other: Span) -> Span { + if self.anchor != other.anchor { + return self; + } + let range = self.range.cover(other.range); + Span { range, ..self } + } +} + /// Spans represent a region of code, used by the IDE to be able link macro inputs and outputs /// together. Positions in spans are relative to some [`SpanAnchor`] to make them more incremental /// friendly. @@ -63,7 +61,7 @@ pub struct SpanData<Ctx> { impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if f.alternate() { - fmt::Debug::fmt(&self.anchor.file_id.index(), f)?; + fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?; f.write_char(':')?; fmt::Debug::fmt(&self.anchor.ast_id.into_raw(), f)?; f.write_char('@')?; @@ -88,7 +86,7 @@ impl<Ctx: Copy> SpanData<Ctx> { impl fmt::Display for Span { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&self.anchor.file_id.index(), f)?; + fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?; f.write_char(':')?; fmt::Debug::fmt(&self.anchor.ast_id.into_raw(), f)?; f.write_char('@')?; @@ -100,7 +98,7 @@ impl fmt::Display for Span { #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct SpanAnchor { - pub file_id: FileId, + pub file_id: EditionedFileId, pub ast_id: ErasedFileAstId, } @@ -110,6 +108,81 @@ impl fmt::Debug for SpanAnchor { } } +/// A [`FileId`] and [`Edition`] bundled up together. +/// The MSB is reserved for `HirFileId` encoding, more upper bits are used to then encode the edition. +#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct EditionedFileId(u32); + +impl fmt::Debug for EditionedFileId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("EditionedFileId").field(&self.file_id()).field(&self.edition()).finish() + } +} + +impl From<EditionedFileId> for FileId { + fn from(value: EditionedFileId) -> Self { + value.file_id() + } +} + +const _: () = assert!( + EditionedFileId::RESERVED_HIGH_BITS + + EditionedFileId::EDITION_BITS + + EditionedFileId::FILE_ID_BITS + == u32::BITS +); +const _: () = assert!( + EditionedFileId::RESERVED_MASK ^ EditionedFileId::EDITION_MASK ^ EditionedFileId::FILE_ID_MASK + == 0xFFFF_FFFF +); + +impl EditionedFileId { + pub const RESERVED_MASK: u32 = 0x8000_0000; + pub const EDITION_MASK: u32 = 0x7F80_0000; + pub const FILE_ID_MASK: u32 = 0x007F_FFFF; + + pub const MAX_FILE_ID: u32 = Self::FILE_ID_MASK; + + pub const RESERVED_HIGH_BITS: u32 = Self::RESERVED_MASK.count_ones(); + pub const FILE_ID_BITS: u32 = Self::FILE_ID_MASK.count_ones(); + pub const EDITION_BITS: u32 = Self::EDITION_MASK.count_ones(); + + pub const fn current_edition(file_id: FileId) -> Self { + Self::new(file_id, Edition::CURRENT) + } + + pub const fn new(file_id: FileId, edition: Edition) -> Self { + let file_id = file_id.index(); + let edition = edition as u32; + assert!(file_id <= Self::MAX_FILE_ID); + Self(file_id | (edition << Self::FILE_ID_BITS)) + } + + pub fn from_raw(u32: u32) -> Self { + assert!(u32 & Self::RESERVED_MASK == 0); + assert!((u32 & Self::EDITION_MASK) >> Self::FILE_ID_BITS <= Edition::LATEST as u32); + Self(u32) + } + + pub const fn as_u32(self) -> u32 { + self.0 + } + + pub const fn file_id(self) -> FileId { + FileId::from_raw(self.0 & Self::FILE_ID_MASK) + } + + pub const fn unpack(self) -> (FileId, Edition) { + (self.file_id(), self.edition()) + } + + pub const fn edition(self) -> Edition { + let edition = (self.0 & Self::EDITION_MASK) >> Self::FILE_ID_BITS; + debug_assert!(edition <= Edition::LATEST as u32); + unsafe { std::mem::transmute(edition as u8) } + } +} + /// Input to the analyzer is a set of files, where each file is identified by /// `FileId` and contains source code. However, another source of source code in /// Rust are macros: each macro can be thought of as producing a "temporary @@ -149,6 +222,38 @@ impl fmt::Debug for HirFileId { } } +impl PartialEq<FileId> for HirFileId { + fn eq(&self, &other: &FileId) -> bool { + self.file_id().map(EditionedFileId::file_id) == Some(other) + } +} +impl PartialEq<HirFileId> for FileId { + fn eq(&self, other: &HirFileId) -> bool { + other.file_id().map(EditionedFileId::file_id) == Some(*self) + } +} + +impl PartialEq<EditionedFileId> for HirFileId { + fn eq(&self, &other: &EditionedFileId) -> bool { + *self == HirFileId::from(other) + } +} +impl PartialEq<HirFileId> for EditionedFileId { + fn eq(&self, &other: &HirFileId) -> bool { + other == HirFileId::from(*self) + } +} +impl PartialEq<EditionedFileId> for FileId { + fn eq(&self, &other: &EditionedFileId) -> bool { + *self == FileId::from(other) + } +} +impl PartialEq<FileId> for EditionedFileId { + fn eq(&self, &other: &FileId) -> bool { + other == FileId::from(*self) + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroFileId { pub macro_call_id: MacroCallId, @@ -182,14 +287,14 @@ impl MacroCallId { #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub enum HirFileIdRepr { - FileId(FileId), + FileId(EditionedFileId), MacroFile(MacroFileId), } impl fmt::Debug for HirFileIdRepr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(), + Self::FileId(arg0) => arg0.fmt(f), Self::MacroFile(arg0) => { f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish() } @@ -197,19 +302,17 @@ impl fmt::Debug for HirFileIdRepr { } } -impl From<FileId> for HirFileId { +impl From<EditionedFileId> for HirFileId { #[allow(clippy::let_unit_value)] - fn from(id: FileId) -> Self { - _ = Self::ASSERT_MAX_FILE_ID_IS_SAME; - assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index()); - HirFileId(id.index()) + fn from(id: EditionedFileId) -> Self { + assert!(id.as_u32() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.as_u32()); + HirFileId(id.as_u32()) } } impl From<MacroFileId> for HirFileId { #[allow(clippy::let_unit_value)] fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self { - _ = Self::ASSERT_MAX_FILE_ID_IS_SAME; let id = id.as_u32(); assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {id} is too large"); HirFileId(id | Self::MACRO_FILE_TAG_MASK) @@ -217,9 +320,6 @@ impl From<MacroFileId> for HirFileId { } impl HirFileId { - const ASSERT_MAX_FILE_ID_IS_SAME: () = - [()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize]; - const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; const MACRO_FILE_TAG_MASK: u32 = 1 << 31; @@ -239,9 +339,9 @@ impl HirFileId { } #[inline] - pub fn file_id(self) -> Option<FileId> { + pub fn file_id(self) -> Option<EditionedFileId> { match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => Some(FileId::from_raw(self.0)), + 0 => Some(EditionedFileId(self.0)), _ => None, } } @@ -249,7 +349,7 @@ impl HirFileId { #[inline] pub fn repr(self) -> HirFileIdRepr { match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)), + 0 => HirFileIdRepr::FileId(EditionedFileId(self.0)), _ => HirFileIdRepr::MacroFile(MacroFileId { macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), }), diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs index 81fc56c961e..6269f4c30c7 100644 --- a/src/tools/rust-analyzer/crates/span/src/map.rs +++ b/src/tools/rust-analyzer/crates/span/src/map.rs @@ -4,11 +4,10 @@ use std::{fmt, hash::Hash}; use stdx::{always, itertools::Itertools}; -use vfs::FileId; use crate::{ - ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, TextRange, TextSize, - ROOT_ERASED_FILE_AST_ID, + EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, TextRange, + TextSize, ROOT_ERASED_FILE_AST_ID, }; /// Maps absolute text ranges for the corresponding file to the relevant span data. @@ -109,7 +108,7 @@ where #[derive(PartialEq, Eq, Hash, Debug)] pub struct RealSpanMap { - file_id: FileId, + file_id: EditionedFileId, /// Invariant: Sorted vec over TextSize // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? pairs: Box<[(TextSize, ErasedFileAstId)]>, @@ -128,7 +127,7 @@ impl fmt::Display for RealSpanMap { impl RealSpanMap { /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). - pub fn absolute(file_id: FileId) -> Self { + pub fn absolute(file_id: EditionedFileId) -> Self { RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]), @@ -137,7 +136,7 @@ impl RealSpanMap { } pub fn from_file( - file_id: FileId, + file_id: EditionedFileId, pairs: Box<[(TextSize, ErasedFileAstId)]>, end: TextSize, ) -> Self { diff --git a/src/tools/rust-analyzer/crates/stdx/src/anymap.rs b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs index 4eafcfb060f..91fab8e9238 100644 --- a/src/tools/rust-analyzer/crates/stdx/src/anymap.rs +++ b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs @@ -271,12 +271,12 @@ macro_rules! implement { #[inline] unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T { - &*(self as *const Self as *const T) + unsafe { &*(self as *const Self as *const T) } } #[inline] unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T { - &mut *(self as *mut Self as *mut T) + unsafe { &mut *(self as *mut Self as *mut T) } } } diff --git a/src/tools/rust-analyzer/crates/stdx/src/process.rs b/src/tools/rust-analyzer/crates/stdx/src/process.rs index c54d850d7b5..75ae064db9a 100644 --- a/src/tools/rust-analyzer/crates/stdx/src/process.rs +++ b/src/tools/rust-analyzer/crates/stdx/src/process.rs @@ -212,17 +212,13 @@ mod imp { impl<'a> Pipe<'a> { unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> { - Pipe { - dst, - pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), - overlapped: Overlapped::zero(), - done: false, - } + let pipe = unsafe { NamedPipe::from_raw_handle(p.into_raw_handle()) }; + Pipe { dst, pipe, overlapped: Overlapped::zero(), done: false } } unsafe fn read(&mut self) -> io::Result<()> { - let dst = slice_to_end(self.dst); - match self.pipe.read_overlapped(dst, self.overlapped.raw()) { + let dst = unsafe { slice_to_end(self.dst) }; + match unsafe { self.pipe.read_overlapped(dst, self.overlapped.raw()) } { Ok(_) => Ok(()), Err(e) => { if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { @@ -237,7 +233,7 @@ mod imp { unsafe fn complete(&mut self, status: &CompletionStatus) { let prev = self.dst.len(); - self.dst.set_len(prev + status.bytes_transferred() as usize); + unsafe { self.dst.set_len(prev + status.bytes_transferred() as usize) }; if status.bytes_transferred() == 0 { self.done = true; } @@ -251,7 +247,9 @@ mod imp { if v.capacity() == v.len() { v.reserve(1); } - slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) + let data = unsafe { v.as_mut_ptr().add(v.len()) }; + let len = v.capacity() - v.len(); + unsafe { slice::from_raw_parts_mut(data, len) } } } diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 8c772b9c7a2..c23bcd69149 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -8,7 +8,11 @@ // // // -- comment // Name = -- non-terminal definition -// 'ident' -- token (terminal) +// 'ident' -- keyword or punct token (terminal) +// '?ident' -- contextual keyword (terminal) +// too) +// '#ident' -- generic token (terminal) +// '@ident' -- literal token (terminal) // A B -- sequence // A | B -- alternation // A* -- zero or more repetition @@ -17,17 +21,17 @@ // label:A -- suggested name for field of AST node //*************************// -// Names, Paths and Macros // +// Paths // //*************************// Name = - 'ident' | 'self' + '#ident' | 'self' NameRef = - 'ident' | 'int_number' | 'self' | 'super' | 'crate' | 'Self' + '#ident' | '@int_number' | 'self' | 'super' | 'crate' | 'Self' Lifetime = - 'lifetime_ident' + '#lifetime_ident' Path = (qualifier:Path '::')? segment:PathSegment @@ -38,6 +42,11 @@ PathSegment = | NameRef ParamList RetType? | '<' Type ('as' PathType)? '>' + +//*************************// +// Generics // +//*************************// + GenericArgList = '::'? '<' (GenericArg (',' GenericArg)* ','?)? '>' @@ -61,6 +70,36 @@ LifetimeArg = ConstArg = Expr +GenericParamList = + '<' (GenericParam (',' GenericParam)* ','?)? '>' + +GenericParam = + ConstParam +| LifetimeParam +| TypeParam + +TypeParam = + Attr* Name (':' TypeBoundList?)? + ('=' default_type:Type)? + +ConstParam = + Attr* 'const' Name ':' Type + ('=' default_val:ConstArg)? + +LifetimeParam = + Attr* Lifetime (':' TypeBoundList?)? + +WhereClause = + 'where' predicates:(WherePred (',' WherePred)* ','?) + +WherePred = + ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList? + + +//*************************// +// Macro // +//*************************// + MacroCall = Attr* Path '!' TokenTree ';'? @@ -72,22 +111,23 @@ TokenTree = MacroItems = Item* -MacroEagerInput = - '(' (Expr (',' Expr)* ','?)? ')' -| '{' (Expr (',' Expr)* ','?)? '}' -| '[' (Expr (',' Expr)* ','?)? ']' - - MacroStmts = statements:Stmt* Expr? +Attr = + '#' '!'? '[' Meta ']' + +Meta = + 'unsafe' '(' Path ('=' Expr | TokenTree)? ')' +| Path ('=' Expr | TokenTree)? + //*************************// // Items // //*************************// SourceFile = - 'shebang'? + '#shebang'? Attr* Item* @@ -112,7 +152,7 @@ Item = MacroRules = Attr* Visibility? - 'macro_rules' '!' Name + '?macro_rules' '!' Name TokenTree MacroDef = @@ -148,12 +188,12 @@ UseTreeList = Fn = Attr* Visibility? - 'default'? 'const'? 'async'? 'unsafe'? Abi? + '?default'? 'const'? 'async'? 'unsafe'? Abi? 'fn' Name GenericParamList? ParamList RetType? WhereClause? (body:BlockExpr | ';') Abi = - 'extern' 'string'? + 'extern' '@string'? ParamList = '('( @@ -180,7 +220,7 @@ RetType = TypeAlias = Attr* Visibility? - 'default'? + '?default'? 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause? ('=' Type)? ';' @@ -223,7 +263,7 @@ Variant = Union = Attr* Visibility? - 'union' Name GenericParamList? WhereClause? + '?union' Name GenericParamList? WhereClause? RecordFieldList // A Data Type. @@ -236,7 +276,7 @@ Adt = Const = Attr* Visibility? - 'default'? + '?default'? 'const' (Name | '_') ':' Type ('=' body:Expr)? ';' @@ -247,7 +287,7 @@ Static = Trait = Attr* Visibility? - 'unsafe'? 'auto'? + 'unsafe'? '?auto'? 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause? AssocItemList @@ -266,7 +306,7 @@ AssocItem = Impl = Attr* Visibility? - 'default'? 'unsafe'? + '?default'? 'unsafe'? 'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause? AssocItemList @@ -282,41 +322,9 @@ ExternItem = | Static | TypeAlias -GenericParamList = - '<' (GenericParam (',' GenericParam)* ','?)? '>' - -GenericParam = - ConstParam -| LifetimeParam -| TypeParam - -TypeParam = - Attr* Name (':' TypeBoundList?)? - ('=' default_type:Type)? - -ConstParam = - Attr* 'const' Name ':' Type - ('=' default_val:ConstArg)? - -LifetimeParam = - Attr* Lifetime (':' TypeBoundList?)? - -WhereClause = - 'where' predicates:(WherePred (',' WherePred)* ','?) - -WherePred = - ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList? - Visibility = 'pub' ('(' 'in'? Path ')')? -Attr = - '#' '!'? '[' Meta ']' - -Meta = - 'unsafe' '(' Path ('=' Expr | TokenTree)? ')' -| Path ('=' Expr | TokenTree)? - //****************************// // Statements and Expressions // @@ -379,13 +387,13 @@ Expr = | UnderscoreExpr OffsetOfExpr = - Attr* 'builtin' '#' 'offset_of' '(' Type ',' fields:(NameRef ('.' NameRef)* ) ')' + Attr* '?builtin' '#' '?offset_of' '(' Type ',' fields:(NameRef ('.' NameRef)* ) ')' AsmExpr = - Attr* 'builtin' '#' 'asm' '(' Expr ')' + Attr* '?builtin' '#' '?asm' '(' Expr ')' FormatArgsExpr = - Attr* 'builtin' '#' 'format_args' '(' + Attr* '?builtin' '#' '?format_args' '(' template:Expr (',' args:(FormatArgsArg (',' FormatArgsArg)* ','?)? )? ')' @@ -398,11 +406,12 @@ MacroExpr = Literal = Attr* value:( - 'int_number' | 'float_number' - | 'string' | 'raw_string' - | 'byte_string' | 'raw_byte_string' + '@int_number' | '@float_number' + | '@string' | '@raw_string' + | '@byte_string' | '@raw_byte_string' + | '@c_string' | '@raw_c_string' + | '@char' | '@byte' | 'true' | 'false' - | 'char' | 'byte' ) PathExpr = @@ -416,13 +425,13 @@ StmtList = '}' RefExpr = - Attr* '&' (('raw' 'const'?)| ('raw'? 'mut') ) Expr + Attr* '&' (('?raw' 'const'?)| ('?raw'? 'mut') ) Expr TryExpr = Attr* Expr '?' BlockExpr = - Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList + Attr* Label? ('try' | 'unsafe' | ('async' 'move'?) | ('gen' 'move'?) | 'const') StmtList PrefixExpr = Attr* op:('-' | '!' | '*') Expr @@ -482,9 +491,12 @@ FieldExpr = Attr* Expr '.' NameRef ClosureExpr = - Attr* ('for' GenericParamList)? 'const'? 'static'? 'async'? 'move'? ParamList RetType? + Attr* ClosureBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType? body:Expr +ClosureBinder = + 'for' GenericParamList + IfExpr = Attr* 'if' condition:Expr then_branch:BlockExpr ('else' else_branch:(IfExpr | BlockExpr))? @@ -538,7 +550,7 @@ YieldExpr = Attr* 'yield' Expr? YeetExpr = - Attr* 'do' 'yeet' Expr? + Attr* 'do' '?yeet' Expr? LetExpr = Attr* 'let' Pat '=' Expr @@ -617,6 +629,7 @@ TypeBoundList = TypeBound = Lifetime | ('~' 'const' | 'const')? 'async'? '?'? Type +| 'use' GenericParamList //************************// // Patterns // diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs index 9b43da83418..8dc6d36a7e7 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/algo.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs @@ -643,7 +643,7 @@ fn main() { let deletions = diff .deletions .iter() - .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v)))); + .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), fmt_syntax(v)))); let actual = format!( "insertions:\n\n{insertions}\n\nreplacements:\n\n{replacements}\n\ndeletions:\n\n{deletions}\n" diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs index b0ee9dfd507..6ed205e2856 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs @@ -352,13 +352,22 @@ pub enum BlockModifier { Unsafe(SyntaxToken), Try(SyntaxToken), Const(SyntaxToken), + AsyncGen(SyntaxToken), + Gen(SyntaxToken), Label(ast::Label), } impl ast::BlockExpr { pub fn modifier(&self) -> Option<BlockModifier> { - self.async_token() - .map(BlockModifier::Async) + self.gen_token() + .map(|v| { + if self.async_token().is_some() { + BlockModifier::AsyncGen(v) + } else { + BlockModifier::Gen(v) + } + }) + .or_else(|| self.async_token().map(BlockModifier::Async)) .or_else(|| self.unsafe_token().map(BlockModifier::Unsafe)) .or_else(|| self.try_token().map(BlockModifier::Try)) .or_else(|| self.const_token().map(BlockModifier::Const)) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 0373e7c5529..01886d119d6 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -14,6 +14,8 @@ pub struct Abi { impl Abi { #[inline] pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) } + #[inline] + pub fn string_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![string]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -182,6 +184,10 @@ impl BlockExpr { #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } #[inline] + pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) } + #[inline] + pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) } + #[inline] pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) } #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } @@ -238,13 +244,24 @@ impl CastExpr { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ClosureBinder { + pub(crate) syntax: SyntaxNode, +} +impl ClosureBinder { + #[inline] + pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + #[inline] + pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ClosureExpr { pub(crate) syntax: SyntaxNode, } impl ast::HasAttrs for ClosureExpr {} impl ClosureExpr { #[inline] - pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) } #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } #[inline] @@ -254,7 +271,7 @@ impl ClosureExpr { #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } #[inline] - pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } + pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) } #[inline] pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) } #[inline] @@ -834,27 +851,6 @@ impl MacroDef { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct MacroEagerInput { - pub(crate) syntax: SyntaxNode, -} -impl MacroEagerInput { - #[inline] - pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) } - #[inline] - pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } - #[inline] - pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } - #[inline] - pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } - #[inline] - pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } - #[inline] - pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } - #[inline] - pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroExpr { pub(crate) syntax: SyntaxNode, } @@ -1050,6 +1046,10 @@ impl NameRef { #[inline] pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) } #[inline] + pub fn int_number_token(&self) -> Option<SyntaxToken> { + support::token(&self.syntax, T![int_number]) + } + #[inline] pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) } #[inline] pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) } @@ -1789,6 +1789,8 @@ pub struct TypeBound { } impl TypeBound { #[inline] + pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } @@ -1799,6 +1801,8 @@ impl TypeBound { #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } #[inline] + pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) } + #[inline] pub fn tilde_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![~]) } } @@ -2461,6 +2465,20 @@ impl AstNode for CastExpr { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for ClosureBinder { + #[inline] + fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER } + #[inline] + fn cast(syntax: SyntaxNode) -> Option<Self> { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + #[inline] + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for ClosureExpr { #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR } @@ -3021,20 +3039,6 @@ impl AstNode for MacroDef { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } -impl AstNode for MacroEagerInput { - #[inline] - fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EAGER_INPUT } - #[inline] - fn cast(syntax: SyntaxNode) -> Option<Self> { - if Self::can_cast(syntax.kind()) { - Some(Self { syntax }) - } else { - None - } - } - #[inline] - fn syntax(&self) -> &SyntaxNode { &self.syntax } -} impl AstNode for MacroExpr { #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR } @@ -5106,6 +5110,14 @@ impl AstNode for AnyHasArgList { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<CallExpr> for AnyHasArgList { + #[inline] + fn from(node: CallExpr) -> AnyHasArgList { AnyHasArgList { syntax: node.syntax } } +} +impl From<MethodCallExpr> for AnyHasArgList { + #[inline] + fn from(node: MethodCallExpr) -> AnyHasArgList { AnyHasArgList { syntax: node.syntax } } +} impl AnyHasAttrs { #[inline] pub fn new<T: ast::HasAttrs>(node: T) -> AnyHasAttrs { @@ -5198,6 +5210,294 @@ impl AstNode for AnyHasAttrs { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<ArrayExpr> for AnyHasAttrs { + #[inline] + fn from(node: ArrayExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<AsmExpr> for AnyHasAttrs { + #[inline] + fn from(node: AsmExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<AssocItemList> for AnyHasAttrs { + #[inline] + fn from(node: AssocItemList) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<AwaitExpr> for AnyHasAttrs { + #[inline] + fn from(node: AwaitExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<BecomeExpr> for AnyHasAttrs { + #[inline] + fn from(node: BecomeExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<BinExpr> for AnyHasAttrs { + #[inline] + fn from(node: BinExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<BlockExpr> for AnyHasAttrs { + #[inline] + fn from(node: BlockExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<BreakExpr> for AnyHasAttrs { + #[inline] + fn from(node: BreakExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<CallExpr> for AnyHasAttrs { + #[inline] + fn from(node: CallExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<CastExpr> for AnyHasAttrs { + #[inline] + fn from(node: CastExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ClosureExpr> for AnyHasAttrs { + #[inline] + fn from(node: ClosureExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Const> for AnyHasAttrs { + #[inline] + fn from(node: Const) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ConstParam> for AnyHasAttrs { + #[inline] + fn from(node: ConstParam) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ContinueExpr> for AnyHasAttrs { + #[inline] + fn from(node: ContinueExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Enum> for AnyHasAttrs { + #[inline] + fn from(node: Enum) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ExternBlock> for AnyHasAttrs { + #[inline] + fn from(node: ExternBlock) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ExternCrate> for AnyHasAttrs { + #[inline] + fn from(node: ExternCrate) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ExternItemList> for AnyHasAttrs { + #[inline] + fn from(node: ExternItemList) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<FieldExpr> for AnyHasAttrs { + #[inline] + fn from(node: FieldExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Fn> for AnyHasAttrs { + #[inline] + fn from(node: Fn) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ForExpr> for AnyHasAttrs { + #[inline] + fn from(node: ForExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<FormatArgsExpr> for AnyHasAttrs { + #[inline] + fn from(node: FormatArgsExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<IdentPat> for AnyHasAttrs { + #[inline] + fn from(node: IdentPat) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<IfExpr> for AnyHasAttrs { + #[inline] + fn from(node: IfExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Impl> for AnyHasAttrs { + #[inline] + fn from(node: Impl) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<IndexExpr> for AnyHasAttrs { + #[inline] + fn from(node: IndexExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ItemList> for AnyHasAttrs { + #[inline] + fn from(node: ItemList) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<LetExpr> for AnyHasAttrs { + #[inline] + fn from(node: LetExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<LetStmt> for AnyHasAttrs { + #[inline] + fn from(node: LetStmt) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<LifetimeParam> for AnyHasAttrs { + #[inline] + fn from(node: LifetimeParam) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Literal> for AnyHasAttrs { + #[inline] + fn from(node: Literal) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<LoopExpr> for AnyHasAttrs { + #[inline] + fn from(node: LoopExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<MacroCall> for AnyHasAttrs { + #[inline] + fn from(node: MacroCall) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<MacroDef> for AnyHasAttrs { + #[inline] + fn from(node: MacroDef) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<MacroRules> for AnyHasAttrs { + #[inline] + fn from(node: MacroRules) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<MatchArm> for AnyHasAttrs { + #[inline] + fn from(node: MatchArm) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<MatchArmList> for AnyHasAttrs { + #[inline] + fn from(node: MatchArmList) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<MatchExpr> for AnyHasAttrs { + #[inline] + fn from(node: MatchExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<MethodCallExpr> for AnyHasAttrs { + #[inline] + fn from(node: MethodCallExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Module> for AnyHasAttrs { + #[inline] + fn from(node: Module) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<OffsetOfExpr> for AnyHasAttrs { + #[inline] + fn from(node: OffsetOfExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Param> for AnyHasAttrs { + #[inline] + fn from(node: Param) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ParenExpr> for AnyHasAttrs { + #[inline] + fn from(node: ParenExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<PathExpr> for AnyHasAttrs { + #[inline] + fn from(node: PathExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<PrefixExpr> for AnyHasAttrs { + #[inline] + fn from(node: PrefixExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<RangeExpr> for AnyHasAttrs { + #[inline] + fn from(node: RangeExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<RecordExprField> for AnyHasAttrs { + #[inline] + fn from(node: RecordExprField) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<RecordExprFieldList> for AnyHasAttrs { + #[inline] + fn from(node: RecordExprFieldList) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<RecordField> for AnyHasAttrs { + #[inline] + fn from(node: RecordField) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<RecordPatField> for AnyHasAttrs { + #[inline] + fn from(node: RecordPatField) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<RefExpr> for AnyHasAttrs { + #[inline] + fn from(node: RefExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<RestPat> for AnyHasAttrs { + #[inline] + fn from(node: RestPat) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<ReturnExpr> for AnyHasAttrs { + #[inline] + fn from(node: ReturnExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<SelfParam> for AnyHasAttrs { + #[inline] + fn from(node: SelfParam) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<SourceFile> for AnyHasAttrs { + #[inline] + fn from(node: SourceFile) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Static> for AnyHasAttrs { + #[inline] + fn from(node: Static) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<StmtList> for AnyHasAttrs { + #[inline] + fn from(node: StmtList) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Struct> for AnyHasAttrs { + #[inline] + fn from(node: Struct) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Trait> for AnyHasAttrs { + #[inline] + fn from(node: Trait) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<TraitAlias> for AnyHasAttrs { + #[inline] + fn from(node: TraitAlias) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<TryExpr> for AnyHasAttrs { + #[inline] + fn from(node: TryExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<TupleExpr> for AnyHasAttrs { + #[inline] + fn from(node: TupleExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<TupleField> for AnyHasAttrs { + #[inline] + fn from(node: TupleField) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<TypeAlias> for AnyHasAttrs { + #[inline] + fn from(node: TypeAlias) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<TypeParam> for AnyHasAttrs { + #[inline] + fn from(node: TypeParam) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<UnderscoreExpr> for AnyHasAttrs { + #[inline] + fn from(node: UnderscoreExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Union> for AnyHasAttrs { + #[inline] + fn from(node: Union) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Use> for AnyHasAttrs { + #[inline] + fn from(node: Use) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<Variant> for AnyHasAttrs { + #[inline] + fn from(node: Variant) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<WhileExpr> for AnyHasAttrs { + #[inline] + fn from(node: WhileExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<YeetExpr> for AnyHasAttrs { + #[inline] + fn from(node: YeetExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} +impl From<YieldExpr> for AnyHasAttrs { + #[inline] + fn from(node: YieldExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } +} impl AnyHasDocComments { #[inline] pub fn new<T: ast::HasDocComments>(node: T) -> AnyHasDocComments { @@ -5239,6 +5539,90 @@ impl AstNode for AnyHasDocComments { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<Const> for AnyHasDocComments { + #[inline] + fn from(node: Const) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Enum> for AnyHasDocComments { + #[inline] + fn from(node: Enum) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<ExternBlock> for AnyHasDocComments { + #[inline] + fn from(node: ExternBlock) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<ExternCrate> for AnyHasDocComments { + #[inline] + fn from(node: ExternCrate) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Fn> for AnyHasDocComments { + #[inline] + fn from(node: Fn) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Impl> for AnyHasDocComments { + #[inline] + fn from(node: Impl) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<MacroCall> for AnyHasDocComments { + #[inline] + fn from(node: MacroCall) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<MacroDef> for AnyHasDocComments { + #[inline] + fn from(node: MacroDef) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<MacroRules> for AnyHasDocComments { + #[inline] + fn from(node: MacroRules) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Module> for AnyHasDocComments { + #[inline] + fn from(node: Module) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<RecordField> for AnyHasDocComments { + #[inline] + fn from(node: RecordField) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<SourceFile> for AnyHasDocComments { + #[inline] + fn from(node: SourceFile) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Static> for AnyHasDocComments { + #[inline] + fn from(node: Static) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Struct> for AnyHasDocComments { + #[inline] + fn from(node: Struct) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Trait> for AnyHasDocComments { + #[inline] + fn from(node: Trait) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<TraitAlias> for AnyHasDocComments { + #[inline] + fn from(node: TraitAlias) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<TupleField> for AnyHasDocComments { + #[inline] + fn from(node: TupleField) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<TypeAlias> for AnyHasDocComments { + #[inline] + fn from(node: TypeAlias) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Union> for AnyHasDocComments { + #[inline] + fn from(node: Union) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Use> for AnyHasDocComments { + #[inline] + fn from(node: Use) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} +impl From<Variant> for AnyHasDocComments { + #[inline] + fn from(node: Variant) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } +} impl AnyHasGenericArgs { #[inline] pub fn new<T: ast::HasGenericArgs>(node: T) -> AnyHasGenericArgs { @@ -5257,6 +5641,18 @@ impl AstNode for AnyHasGenericArgs { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<AssocTypeArg> for AnyHasGenericArgs { + #[inline] + fn from(node: AssocTypeArg) -> AnyHasGenericArgs { AnyHasGenericArgs { syntax: node.syntax } } +} +impl From<MethodCallExpr> for AnyHasGenericArgs { + #[inline] + fn from(node: MethodCallExpr) -> AnyHasGenericArgs { AnyHasGenericArgs { syntax: node.syntax } } +} +impl From<PathSegment> for AnyHasGenericArgs { + #[inline] + fn from(node: PathSegment) -> AnyHasGenericArgs { AnyHasGenericArgs { syntax: node.syntax } } +} impl AnyHasGenericParams { #[inline] pub fn new<T: ast::HasGenericParams>(node: T) -> AnyHasGenericParams { @@ -5275,6 +5671,38 @@ impl AstNode for AnyHasGenericParams { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<Enum> for AnyHasGenericParams { + #[inline] + fn from(node: Enum) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} +impl From<Fn> for AnyHasGenericParams { + #[inline] + fn from(node: Fn) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} +impl From<Impl> for AnyHasGenericParams { + #[inline] + fn from(node: Impl) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} +impl From<Struct> for AnyHasGenericParams { + #[inline] + fn from(node: Struct) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} +impl From<Trait> for AnyHasGenericParams { + #[inline] + fn from(node: Trait) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} +impl From<TraitAlias> for AnyHasGenericParams { + #[inline] + fn from(node: TraitAlias) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} +impl From<TypeAlias> for AnyHasGenericParams { + #[inline] + fn from(node: TypeAlias) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} +impl From<Union> for AnyHasGenericParams { + #[inline] + fn from(node: Union) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } +} impl AnyHasLoopBody { #[inline] pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody { @@ -5291,6 +5719,18 @@ impl AstNode for AnyHasLoopBody { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<ForExpr> for AnyHasLoopBody { + #[inline] + fn from(node: ForExpr) -> AnyHasLoopBody { AnyHasLoopBody { syntax: node.syntax } } +} +impl From<LoopExpr> for AnyHasLoopBody { + #[inline] + fn from(node: LoopExpr) -> AnyHasLoopBody { AnyHasLoopBody { syntax: node.syntax } } +} +impl From<WhileExpr> for AnyHasLoopBody { + #[inline] + fn from(node: WhileExpr) -> AnyHasLoopBody { AnyHasLoopBody { syntax: node.syntax } } +} impl AnyHasModuleItem { #[inline] pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem { @@ -5307,6 +5747,18 @@ impl AstNode for AnyHasModuleItem { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<ItemList> for AnyHasModuleItem { + #[inline] + fn from(node: ItemList) -> AnyHasModuleItem { AnyHasModuleItem { syntax: node.syntax } } +} +impl From<MacroItems> for AnyHasModuleItem { + #[inline] + fn from(node: MacroItems) -> AnyHasModuleItem { AnyHasModuleItem { syntax: node.syntax } } +} +impl From<SourceFile> for AnyHasModuleItem { + #[inline] + fn from(node: SourceFile) -> AnyHasModuleItem { AnyHasModuleItem { syntax: node.syntax } } +} impl AnyHasName { #[inline] pub fn new<T: ast::HasName>(node: T) -> AnyHasName { @@ -5347,6 +5799,86 @@ impl AstNode for AnyHasName { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<Const> for AnyHasName { + #[inline] + fn from(node: Const) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<ConstParam> for AnyHasName { + #[inline] + fn from(node: ConstParam) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Enum> for AnyHasName { + #[inline] + fn from(node: Enum) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Fn> for AnyHasName { + #[inline] + fn from(node: Fn) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<FormatArgsArg> for AnyHasName { + #[inline] + fn from(node: FormatArgsArg) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<IdentPat> for AnyHasName { + #[inline] + fn from(node: IdentPat) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<MacroDef> for AnyHasName { + #[inline] + fn from(node: MacroDef) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<MacroRules> for AnyHasName { + #[inline] + fn from(node: MacroRules) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Module> for AnyHasName { + #[inline] + fn from(node: Module) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<RecordField> for AnyHasName { + #[inline] + fn from(node: RecordField) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Rename> for AnyHasName { + #[inline] + fn from(node: Rename) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<SelfParam> for AnyHasName { + #[inline] + fn from(node: SelfParam) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Static> for AnyHasName { + #[inline] + fn from(node: Static) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Struct> for AnyHasName { + #[inline] + fn from(node: Struct) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Trait> for AnyHasName { + #[inline] + fn from(node: Trait) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<TraitAlias> for AnyHasName { + #[inline] + fn from(node: TraitAlias) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<TypeAlias> for AnyHasName { + #[inline] + fn from(node: TypeAlias) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<TypeParam> for AnyHasName { + #[inline] + fn from(node: TypeParam) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Union> for AnyHasName { + #[inline] + fn from(node: Union) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} +impl From<Variant> for AnyHasName { + #[inline] + fn from(node: Variant) -> AnyHasName { AnyHasName { syntax: node.syntax } } +} impl AnyHasTypeBounds { #[inline] pub fn new<T: ast::HasTypeBounds>(node: T) -> AnyHasTypeBounds { @@ -5368,6 +5900,30 @@ impl AstNode for AnyHasTypeBounds { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<AssocTypeArg> for AnyHasTypeBounds { + #[inline] + fn from(node: AssocTypeArg) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } } +} +impl From<LifetimeParam> for AnyHasTypeBounds { + #[inline] + fn from(node: LifetimeParam) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } } +} +impl From<Trait> for AnyHasTypeBounds { + #[inline] + fn from(node: Trait) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } } +} +impl From<TypeAlias> for AnyHasTypeBounds { + #[inline] + fn from(node: TypeAlias) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } } +} +impl From<TypeParam> for AnyHasTypeBounds { + #[inline] + fn from(node: TypeParam) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } } +} +impl From<WherePred> for AnyHasTypeBounds { + #[inline] + fn from(node: WherePred) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } } +} impl AnyHasVisibility { #[inline] pub fn new<T: ast::HasVisibility>(node: T) -> AnyHasVisibility { @@ -5406,6 +5962,78 @@ impl AstNode for AnyHasVisibility { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl From<Const> for AnyHasVisibility { + #[inline] + fn from(node: Const) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Enum> for AnyHasVisibility { + #[inline] + fn from(node: Enum) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<ExternCrate> for AnyHasVisibility { + #[inline] + fn from(node: ExternCrate) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Fn> for AnyHasVisibility { + #[inline] + fn from(node: Fn) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Impl> for AnyHasVisibility { + #[inline] + fn from(node: Impl) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<MacroDef> for AnyHasVisibility { + #[inline] + fn from(node: MacroDef) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<MacroRules> for AnyHasVisibility { + #[inline] + fn from(node: MacroRules) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Module> for AnyHasVisibility { + #[inline] + fn from(node: Module) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<RecordField> for AnyHasVisibility { + #[inline] + fn from(node: RecordField) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Static> for AnyHasVisibility { + #[inline] + fn from(node: Static) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Struct> for AnyHasVisibility { + #[inline] + fn from(node: Struct) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Trait> for AnyHasVisibility { + #[inline] + fn from(node: Trait) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<TraitAlias> for AnyHasVisibility { + #[inline] + fn from(node: TraitAlias) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<TupleField> for AnyHasVisibility { + #[inline] + fn from(node: TupleField) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<TypeAlias> for AnyHasVisibility { + #[inline] + fn from(node: TypeAlias) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Union> for AnyHasVisibility { + #[inline] + fn from(node: Union) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Use> for AnyHasVisibility { + #[inline] + fn from(node: Use) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} +impl From<Variant> for AnyHasVisibility { + #[inline] + fn from(node: Variant) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } +} impl std::fmt::Display for Adt { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -5541,6 +6169,11 @@ impl std::fmt::Display for CastExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for ClosureBinder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for ClosureExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -5741,11 +6374,6 @@ impl std::fmt::Display for MacroDef { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for MacroEagerInput { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self.syntax(), f) - } -} impl std::fmt::Display for MacroExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 35ec9b1013d..0228d9dd713 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -179,18 +179,18 @@ pub fn ty_alias( } if let Some(list) = type_param_bounds { - s.push_str(&format!(" : {}", &list)); + s.push_str(&format!(" : {list}")); } if let Some(cl) = where_clause { - s.push_str(&format!(" {}", &cl.to_string())); + s.push_str(&format!(" {cl}")); } if let Some(exp) = assignment { if let Some(cl) = exp.1 { - s.push_str(&format!(" = {} {}", &exp.0.to_string(), &cl.to_string())); + s.push_str(&format!(" = {} {cl}", exp.0)); } else { - s.push_str(&format!(" = {}", &exp.0.to_string())); + s.push_str(&format!(" = {}", exp.0)); } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index 911e3d823de..5447906206c 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -794,6 +794,8 @@ pub enum TypeBoundKind { PathType(ast::PathType), /// for<'a> ... ForType(ast::ForType), + /// use + Use(ast::GenericParamList), /// 'a Lifetime(ast::Lifetime), } @@ -804,6 +806,8 @@ impl ast::TypeBound { TypeBoundKind::PathType(path_type) } else if let Some(for_type) = support::children(self.syntax()).next() { TypeBoundKind::ForType(for_type) + } else if let Some(generic_param_list) = self.generic_param_list() { + TypeBoundKind::Use(generic_param_list) } else if let Some(lifetime) = self.lifetime() { TypeBoundKind::Lifetime(lifetime) } else { @@ -1127,21 +1131,3 @@ impl From<ast::AssocItem> for ast::AnyHasAttrs { Self::new(node) } } - -impl From<ast::Variant> for ast::AnyHasAttrs { - fn from(node: ast::Variant) -> Self { - Self::new(node) - } -} - -impl From<ast::RecordField> for ast::AnyHasAttrs { - fn from(node: ast::RecordField) -> Self { - Self::new(node) - } -} - -impl From<ast::TupleField> for ast::AnyHasAttrs { - fn from(node: ast::TupleField) -> Self { - Self::new(node) - } -} diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs index 177f48b986a..b68374848b9 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs @@ -65,7 +65,7 @@ pub use rowan::{ TokenAtOffset, WalkEvent, }; pub use rustc_lexer::unescape; -pub use smol_str::{format_smolstr, SmolStr}; +pub use smol_str::{format_smolstr, SmolStr, ToSmolStr}; /// `Parse` is the result of the parsing: a syntax tree and a collection of /// errors. @@ -150,15 +150,17 @@ impl Parse<SourceFile> { } pub fn reparse(&self, indel: &Indel, edition: Edition) -> Parse<SourceFile> { - self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel, edition)) + self.incremental_reparse(indel, edition) + .unwrap_or_else(|| self.full_reparse(indel, edition)) } - fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> { + fn incremental_reparse(&self, indel: &Indel, edition: Edition) -> Option<Parse<SourceFile>> { // FIXME: validation errors are not handled here parsing::incremental_reparse( self.tree().syntax(), indel, self.errors.as_deref().unwrap_or_default().iter().cloned(), + edition, ) .map(|(green_node, errors, _reparsed_range)| Parse { green: green_node, @@ -211,115 +213,6 @@ impl SourceFile { } } -impl ast::TokenTree { - pub fn reparse_as_comma_separated_expr( - self, - edition: parser::Edition, - ) -> Parse<ast::MacroEagerInput> { - let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); - - let mut parser_input = parser::Input::default(); - let mut was_joint = false; - for t in tokens { - let kind = t.kind(); - if kind.is_trivia() { - was_joint = false - } else if kind == SyntaxKind::IDENT { - let token_text = t.text(); - let contextual_kw = - SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT); - parser_input.push_ident(contextual_kw); - } else { - if was_joint { - parser_input.was_joint(); - } - parser_input.push(kind); - // Tag the token as joint if it is float with a fractional part - // we use this jointness to inform the parser about what token split - // event to emit when we encounter a float literal in a field access - if kind == SyntaxKind::FLOAT_NUMBER { - if !t.text().ends_with('.') { - parser_input.was_joint(); - } else { - was_joint = false; - } - } else { - was_joint = true; - } - } - } - - let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input, edition); - - let mut tokens = - self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); - let mut text = String::new(); - let mut pos = TextSize::from(0); - let mut builder = SyntaxTreeBuilder::default(); - for event in parser_output.iter() { - match event { - parser::Step::Token { kind, n_input_tokens } => { - let mut token = tokens.next().unwrap(); - while token.kind().is_trivia() { - let text = token.text(); - pos += TextSize::from(text.len() as u32); - builder.token(token.kind(), text); - - token = tokens.next().unwrap(); - } - text.push_str(token.text()); - for _ in 1..n_input_tokens { - let token = tokens.next().unwrap(); - text.push_str(token.text()); - } - - pos += TextSize::from(text.len() as u32); - builder.token(kind, &text); - text.clear(); - } - parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => { - let token = tokens.next().unwrap(); - let text = token.text(); - - match text.split_once('.') { - Some((left, right)) => { - assert!(!left.is_empty()); - builder.start_node(SyntaxKind::NAME_REF); - builder.token(SyntaxKind::INT_NUMBER, left); - builder.finish_node(); - - // here we move the exit up, the original exit has been deleted in process - builder.finish_node(); - - builder.token(SyntaxKind::DOT, "."); - - if has_pseudo_dot { - assert!(right.is_empty(), "{left}.{right}"); - } else { - assert!(!right.is_empty(), "{left}.{right}"); - builder.start_node(SyntaxKind::NAME_REF); - builder.token(SyntaxKind::INT_NUMBER, right); - builder.finish_node(); - - // the parser creates an unbalanced start node, we are required to close it here - builder.finish_node(); - } - } - None => unreachable!(), - } - pos += TextSize::from(text.len() as u32); - } - parser::Step::Enter { kind } => builder.start_node(kind), - parser::Step::Exit => builder.finish_node(), - parser::Step::Error { msg } => builder.error(msg.to_owned(), pos), - } - } - - let (green, errors) = builder.finish_raw(); - Parse::new(green, errors) - } -} - /// Matches a `SyntaxNode` against an `ast` type. /// /// # Example: diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs index 4bf2a032791..2c7828c0524 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs @@ -11,8 +11,8 @@ pub(crate) use crate::parsing::reparsing::incremental_reparse; pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) { let _p = tracing::info_span!("parse_text").entered(); - let lexed = parser::LexedStr::new(text); - let parser_input = lexed.to_input(); + let lexed = parser::LexedStr::new(edition, text); + let parser_input = lexed.to_input(edition); let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition); let (node, errors, _eof) = build_tree(lexed, parser_output); (node, errors) @@ -24,8 +24,8 @@ pub(crate) fn parse_text_at( edition: parser::Edition, ) -> (GreenNode, Vec<SyntaxError>) { let _p = tracing::info_span!("parse_text_at").entered(); - let lexed = parser::LexedStr::new(text); - let parser_input = lexed.to_input(); + let lexed = parser::LexedStr::new(edition, text); + let parser_input = lexed.to_input(edition); let parser_output = entry.parse(&parser_input, edition); let (node, errors, _eof) = build_tree(lexed, parser_output); (node, errors) diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs index 354b89fd490..a5cc4e90dfb 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs @@ -6,7 +6,7 @@ //! - otherwise, we search for the nearest `{}` block which contains the edit //! and try to parse only this block. -use parser::Reparser; +use parser::{Edition, Reparser}; use text_edit::Indel; use crate::{ @@ -21,14 +21,13 @@ pub(crate) fn incremental_reparse( node: &SyntaxNode, edit: &Indel, errors: impl IntoIterator<Item = SyntaxError>, + edition: Edition, ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { - if let Some((green, new_errors, old_range)) = reparse_token(node, edit) { + if let Some((green, new_errors, old_range)) = reparse_token(node, edit, edition) { return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); } - if let Some((green, new_errors, old_range)) = - reparse_block(node, edit, parser::Edition::CURRENT) - { + if let Some((green, new_errors, old_range)) = reparse_block(node, edit, edition) { return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); } None @@ -37,6 +36,7 @@ pub(crate) fn incremental_reparse( fn reparse_token( root: &SyntaxNode, edit: &Indel, + edition: Edition, ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { let prev_token = root.covering_element(edit.delete).as_token()?.clone(); let prev_token_kind = prev_token.kind(); @@ -51,7 +51,7 @@ fn reparse_token( } let mut new_text = get_text_after_edit(prev_token.clone().into(), edit); - let (new_token_kind, new_err) = parser::LexedStr::single_token(&new_text)?; + let (new_token_kind, new_err) = parser::LexedStr::single_token(edition, &new_text)?; if new_token_kind != prev_token_kind || (new_token_kind == IDENT && is_contextual_kw(&new_text)) @@ -64,7 +64,7 @@ fn reparse_token( // `b` no longer remains an identifier, but becomes a part of byte string literal if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) { new_text.push(next_char); - let token_with_next_char = parser::LexedStr::single_token(&new_text); + let token_with_next_char = parser::LexedStr::single_token(edition, &new_text); if let Some((_kind, _error)) = token_with_next_char { return None; } @@ -91,8 +91,8 @@ fn reparse_block( let (node, reparser) = find_reparsable_node(root, edit.delete)?; let text = get_text_after_edit(node.clone().into(), edit); - let lexed = parser::LexedStr::new(text.as_str()); - let parser_input = lexed.to_input(); + let lexed = parser::LexedStr::new(edition, text.as_str()); + let parser_input = lexed.to_input(edition); if !is_balanced(&lexed) { return None; } @@ -199,6 +199,7 @@ mod tests { before.tree().syntax(), &edit, before.errors.as_deref().unwrap_or_default().iter().cloned(), + Edition::CURRENT, ) .unwrap(); assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); diff --git a/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml b/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml index 35e39229894..f9565721dd5 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml +++ b/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml @@ -16,6 +16,7 @@ base-db.workspace = true rustc-hash.workspace = true span.workspace = true stdx.workspace = true +intern.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index e65186d3771..e1f40f5da01 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -1,5 +1,5 @@ //! A set of high-level utility fixture methods to use in tests. -use std::{iter, mem, ops::Not, str::FromStr, sync}; +use std::{iter, mem, str::FromStr, sync}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange, @@ -9,12 +9,15 @@ use cfg::CfgOptions; use hir_expand::{ change::ChangeWithProcMacros, db::ExpandDatabase, + files::FilePosition, proc_macro::{ - ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacros, + ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacrosBuilder, }, + FileRange, }; +use intern::Symbol; use rustc_hash::FxHashMap; -use span::{Edition, FileId, FilePosition, FileRange, Span}; +use span::{Edition, EditionedFileId, FileId, Span}; use test_utils::{ extract_range_or_offset, Fixture, FixtureWithProjectMeta, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER, @@ -25,7 +28,7 @@ pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0); pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { #[track_caller] - fn with_single_file(ra_fixture: &str) -> (Self, FileId) { + fn with_single_file(ra_fixture: &str) -> (Self, EditionedFileId) { let fixture = ChangeFixture::parse(ra_fixture); let mut db = Self::default(); fixture.change.apply(&mut db); @@ -34,7 +37,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { } #[track_caller] - fn with_many_files(ra_fixture: &str) -> (Self, Vec<FileId>) { + fn with_many_files(ra_fixture: &str) -> (Self, Vec<EditionedFileId>) { let fixture = ChangeFixture::parse(ra_fixture); let mut db = Self::default(); fixture.change.apply(&mut db); @@ -78,7 +81,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { } #[track_caller] - fn with_range_or_offset(ra_fixture: &str) -> (Self, FileId, RangeOrOffset) { + fn with_range_or_offset(ra_fixture: &str) -> (Self, EditionedFileId, RangeOrOffset) { let fixture = ChangeFixture::parse(ra_fixture); let mut db = Self::default(); fixture.change.apply(&mut db); @@ -101,8 +104,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { impl<DB: ExpandDatabase + SourceDatabaseExt + Default + 'static> WithFixture for DB {} pub struct ChangeFixture { - pub file_position: Option<(FileId, RangeOrOffset)>, - pub files: Vec<FileId>, + pub file_position: Option<(EditionedFileId, RangeOrOffset)>, + pub files: Vec<EditionedFileId>, pub change: ChangeWithProcMacros, } @@ -150,13 +153,14 @@ impl ChangeFixture { let mut file_position = None; for entry in fixture { + let mut range_or_offset = None; let text = if entry.text.contains(CURSOR_MARKER) { if entry.text.contains(ESCAPED_CURSOR_MARKER) { entry.text.replace(ESCAPED_CURSOR_MARKER, CURSOR_MARKER) } else { - let (range_or_offset, text) = extract_range_or_offset(&entry.text); + let (roo, text) = extract_range_or_offset(&entry.text); assert!(file_position.is_none()); - file_position = Some((file_id, range_or_offset)); + range_or_offset = Some(roo); text } } else { @@ -164,6 +168,11 @@ impl ChangeFixture { }; let meta = FileMeta::from_fixture(entry, current_source_root_kind); + if let Some(range_or_offset) = range_or_offset { + file_position = + Some((EditionedFileId::new(file_id, meta.edition), range_or_offset)); + } + assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX)); if !meta.deps.is_empty() { assert!(meta.krate.is_some(), "can't specify deps without naming the crate") @@ -215,7 +224,7 @@ impl ChangeFixture { source_change.change_file(file_id, Some(text)); let path = VfsPath::new_virtual_path(meta.path); file_set.insert(file_id, path); - files.push(file_id); + files.push(EditionedFileId::new(file_id, meta.edition)); file_id = FileId::from_raw(file_id.index() + 1); } @@ -237,6 +246,7 @@ impl ChangeFixture { for (from, to, prelude) in crate_deps { let from_id = crates[&from]; let to_id = crates[&to]; + let sysroot = crate_graph[to_id].origin.is_lang(); crate_graph .add_dep( from_id, @@ -244,7 +254,7 @@ impl ChangeFixture { CrateName::new(&to).unwrap(), to_id, prelude, - false, + sysroot, ), ) .unwrap(); @@ -266,7 +276,7 @@ impl ChangeFixture { let core_crate = crate_graph.add_crate_root( core_file, Edition::CURRENT, - Some(CrateDisplayName::from_canonical_name("core".to_owned())), + Some(CrateDisplayName::from_canonical_name("core")), None, Default::default(), Default::default(), @@ -293,7 +303,7 @@ impl ChangeFixture { } } - let mut proc_macros = ProcMacros::default(); + let mut proc_macros = ProcMacrosBuilder::default(); if !proc_macro_names.is_empty() { let proc_lib_file = file_id; @@ -313,7 +323,7 @@ impl ChangeFixture { let proc_macros_crate = crate_graph.add_crate_root( proc_lib_file, Edition::CURRENT, - Some(CrateDisplayName::from_canonical_name("proc_macros".to_owned())), + Some(CrateDisplayName::from_canonical_name("proc_macros")), None, Default::default(), Default::default(), @@ -344,7 +354,7 @@ impl ChangeFixture { let mut change = ChangeWithProcMacros { source_change, - proc_macros: proc_macros.is_empty().not().then_some(proc_macros), + proc_macros: Some(proc_macros.build()), toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()), target_data_layouts: Some( iter::repeat(target_data_layout).take(crate_graph.len()).collect(), @@ -369,7 +379,7 @@ pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream { "# .into(), ProcMacro { - name: "identity".into(), + name: Symbol::intern("identity"), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityProcMacroExpander), disabled: false, @@ -384,7 +394,7 @@ pub fn derive_identity(item: TokenStream) -> TokenStream { "# .into(), ProcMacro { - name: "DeriveIdentity".into(), + name: Symbol::intern("DeriveIdentity"), kind: ProcMacroKind::CustomDerive, expander: sync::Arc::new(IdentityProcMacroExpander), disabled: false, @@ -399,7 +409,7 @@ pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream { "# .into(), ProcMacro { - name: "input_replace".into(), + name: Symbol::intern("input_replace"), kind: ProcMacroKind::Attr, expander: sync::Arc::new(AttributeInputReplaceProcMacroExpander), disabled: false, @@ -414,7 +424,7 @@ pub fn mirror(input: TokenStream) -> TokenStream { "# .into(), ProcMacro { - name: "mirror".into(), + name: Symbol::intern("mirror"), kind: ProcMacroKind::Bang, expander: sync::Arc::new(MirrorProcMacroExpander), disabled: false, @@ -429,7 +439,7 @@ pub fn shorten(input: TokenStream) -> TokenStream { "# .into(), ProcMacro { - name: "shorten".into(), + name: Symbol::intern("shorten"), kind: ProcMacroKind::Bang, expander: sync::Arc::new(ShortenProcMacroExpander), disabled: false, @@ -447,7 +457,8 @@ fn filter_test_proc_macros( let mut proc_macros = Vec::new(); for (c, p) in proc_macro_defs { - if !proc_macro_names.iter().any(|name| name == &stdx::to_lower_snake_case(&p.name)) { + if !proc_macro_names.iter().any(|name| name == &stdx::to_lower_snake_case(p.name.as_str())) + { continue; } proc_macros.push(p); @@ -480,9 +491,9 @@ impl FileMeta { let mut cfg = CfgOptions::default(); for (k, v) in f.cfgs { if let Some(v) = v { - cfg.insert_key_value(k.into(), v.into()); + cfg.insert_key_value(Symbol::intern(&k), Symbol::intern(&v)); } else { - cfg.insert_atom(k.into()); + cfg.insert_atom(Symbol::intern(&k)); } } @@ -529,7 +540,7 @@ fn parse_crate( let origin = match LangCrateOrigin::from(&*name) { LangCrateOrigin::Other => { - let name = name.clone(); + let name = Symbol::intern(&name); if non_workspace_member { CrateOrigin::Library { repo, name } } else { @@ -640,11 +651,11 @@ impl ProcMacroExpander for ShortenProcMacroExpander { Leaf::Literal(it) => { // XXX Currently replaces any literals with an empty string, but supporting // "shortening" other literals would be nice. - it.text = "\"\"".into(); + it.symbol = Symbol::empty(); } Leaf::Punct(_) => {} Leaf::Ident(it) => { - it.text = it.text.chars().take(1).collect(); + it.sym = Symbol::intern(&it.sym.as_str().chars().take(1).collect::<String>()); } } leaf diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index d1862f7d738..2d615c34a35 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -55,6 +55,7 @@ //! size_of: sized //! sized: //! slice: +//! str: //! sync: sized //! transmute: //! try: infallible @@ -1368,6 +1369,14 @@ pub mod iter { } // endregion:iterator +// region:str +pub mod str { + pub const unsafe fn from_utf8_unchecked(v: &[u8]) -> &str { + "" + } +} +// endregion:str + // region:panic mod panic { pub macro panic_2021 { diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml index 1311e2ddf89..cea1519c2dd 100644 --- a/src/tools/rust-analyzer/crates/tt/Cargo.toml +++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml @@ -13,10 +13,14 @@ doctest = false [dependencies] arrayvec.workspace = true -smol_str.workspace = true text-size.workspace = true stdx.workspace = true +intern.workspace = true +ra-ap-rustc_lexer.workspace = true + +[features] +in-rust-tree = [] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs index cd41af03c61..1319739371f 100644 --- a/src/tools/rust-analyzer/crates/tt/src/buffer.rs +++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs @@ -134,6 +134,15 @@ pub enum TokenTreeRef<'a, Span> { Leaf(&'a Leaf<Span>, &'a TokenTree<Span>), } +impl<'a, Span: Copy> TokenTreeRef<'a, Span> { + pub fn span(&self) -> Span { + match self { + TokenTreeRef::Subtree(subtree, _) => subtree.delimiter.open, + TokenTreeRef::Leaf(leaf, _) => *leaf.span(), + } + } +} + impl<Span: Clone> TokenTreeRef<'_, Span> { pub fn cloned(&self) -> TokenTree<Span> { match self { diff --git a/src/tools/rust-analyzer/crates/tt/src/iter.rs b/src/tools/rust-analyzer/crates/tt/src/iter.rs index 175259a3e47..e96bed0319e 100644 --- a/src/tools/rust-analyzer/crates/tt/src/iter.rs +++ b/src/tools/rust-analyzer/crates/tt/src/iter.rs @@ -2,6 +2,7 @@ //! macro definition into a list of patterns and templates. use arrayvec::ArrayVec; +use intern::sym; use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree}; @@ -58,7 +59,7 @@ impl<'a, S: Copy> TtIter<'a, S> { pub fn expect_ident(&mut self) -> Result<&'a Ident<S>, ()> { match self.expect_leaf()? { - Leaf::Ident(it) if it.text != "_" => Ok(it), + Leaf::Ident(it) if it.sym != sym::underscore => Ok(it), _ => Err(()), } } @@ -74,7 +75,7 @@ impl<'a, S: Copy> TtIter<'a, S> { let it = self.expect_leaf()?; match it { Leaf::Literal(_) => Ok(it), - Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), + Leaf::Ident(ident) if ident.sym == sym::true_ || ident.sym == sym::false_ => Ok(it), _ => Err(()), } } @@ -142,6 +143,10 @@ impl<'a, S: Copy> TtIter<'a, S> { self.inner.as_slice().get(n) } + pub fn next_span(&self) -> Option<S> { + Some(self.inner.as_slice().first()?.first_span()) + } + pub fn as_slice(&self) -> &'a [TokenTree<S>] { self.inner.as_slice() } diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs index 369744d0e96..7b72f9ff108 100644 --- a/src/tools/rust-analyzer/crates/tt/src/lib.rs +++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs @@ -2,18 +2,74 @@ //! input and output) of macros. It closely mirrors `proc_macro` crate's //! `TokenTree`. +#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] + +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_lexer as rustc_lexer; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_lexer; + pub mod buffer; pub mod iter; use std::fmt; -use stdx::impl_from; +use intern::Symbol; +use stdx::{impl_from, itertools::Itertools as _}; -pub use smol_str::SmolStr; pub use text_size::{TextRange, TextSize}; +#[derive(Clone, PartialEq, Debug)] +pub struct Lit { + pub kind: LitKind, + pub symbol: Symbol, + pub suffix: Option<Symbol>, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum IdentIsRaw { + No, + Yes, +} +impl IdentIsRaw { + pub fn yes(self) -> bool { + matches!(self, IdentIsRaw::Yes) + } + pub fn no(&self) -> bool { + matches!(self, IdentIsRaw::No) + } + pub fn as_str(self) -> &'static str { + match self { + IdentIsRaw::No => "", + IdentIsRaw::Yes => "r#", + } + } + pub fn split_from_symbol(sym: &str) -> (Self, &str) { + if let Some(sym) = sym.strip_prefix("r#") { + (IdentIsRaw::Yes, sym) + } else { + (IdentIsRaw::No, sym) + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] +pub enum LitKind { + Byte, + Char, + Integer, // e.g. `1`, `1u8`, `1f32` + Float, // e.g. `1.`, `1.0`, `1e3f32` + Str, + StrRaw(u8), // raw string delimited by `n` hash symbols + ByteStr, + ByteStrRaw(u8), // raw byte string delimited by `n` hash symbols + CStr, + CStrRaw(u8), + Err(()), +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TokenTree<S> { +pub enum TokenTree<S = u32> { Leaf(Leaf<S>), Subtree(Subtree<S>), } @@ -103,6 +159,15 @@ pub struct DelimSpan<S> { pub close: S, } +impl<Span: Copy> DelimSpan<Span> { + pub fn from_single(sp: Span) -> Self { + DelimSpan { open: sp, close: sp } + } + + pub fn from_pair(open: Span, close: Span) -> Self { + DelimSpan { open, close } + } +} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct Delimiter<S> { pub open: S, @@ -134,8 +199,66 @@ pub enum DelimiterKind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Literal<S> { - pub text: SmolStr, + // escaped + pub symbol: Symbol, pub span: S, + pub kind: LitKind, + pub suffix: Option<Symbol>, +} + +pub fn token_to_literal<S>(text: &str, span: S) -> Literal<S> +where + S: Copy, +{ + use rustc_lexer::LiteralKind; + + let token = rustc_lexer::tokenize(text).next_tuple(); + let Some((rustc_lexer::Token { + kind: rustc_lexer::TokenKind::Literal { kind, suffix_start }, + .. + },)) = token + else { + return Literal { + span, + symbol: Symbol::intern(text), + kind: LitKind::Err(()), + suffix: None, + }; + }; + + let (kind, start_offset, end_offset) = match kind { + LiteralKind::Int { .. } => (LitKind::Integer, 0, 0), + LiteralKind::Float { .. } => (LitKind::Float, 0, 0), + LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize), + LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize), + LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize), + LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize), + LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize), + LiteralKind::RawStr { n_hashes } => ( + LitKind::StrRaw(n_hashes.unwrap_or_default()), + 2 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawByteStr { n_hashes } => ( + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawCStr { n_hashes } => ( + LitKind::CStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + }; + + let (lit, suffix) = text.split_at(suffix_start as usize); + let lit = &lit[start_offset..lit.len() - end_offset]; + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(suffix)), + }; + + Literal { span, symbol: Symbol::intern(lit), kind, suffix } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -145,23 +268,79 @@ pub struct Punct<S> { pub span: S, } +/// Indicates whether a token can join with the following token to form a +/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to +/// guide pretty-printing, which is where the `JointHidden` value (which isn't +/// part of `proc_macro::Spacing`) comes in useful. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Spacing { + /// The token cannot join with the following token to form a compound + /// token. + /// + /// In token streams parsed from source code, the compiler will use `Alone` + /// for any token immediately followed by whitespace, a non-doc comment, or + /// EOF. + /// + /// When constructing token streams within the compiler, use this for each + /// token that (a) should be pretty-printed with a space after it, or (b) + /// is the last token in the stream. (In the latter case the choice of + /// spacing doesn't matter because it is never used for the last token. We + /// arbitrarily use `Alone`.) + /// + /// Converts to `proc_macro::Spacing::Alone`, and + /// `proc_macro::Spacing::Alone` converts back to this. Alone, - /// Whether the following token is joint to this one. + + /// The token can join with the following token to form a compound token. + /// + /// In token streams parsed from source code, the compiler will use `Joint` + /// for any token immediately followed by punctuation (as determined by + /// `Token::is_punct`). + /// + /// When constructing token streams within the compiler, use this for each + /// token that (a) should be pretty-printed without a space after it, and + /// (b) is followed by a punctuation token. + /// + /// Converts to `proc_macro::Spacing::Joint`, and + /// `proc_macro::Spacing::Joint` converts back to this. Joint, + + /// The token can join with the following token to form a compound token, + /// but this will not be visible at the proc macro level. (This is what the + /// `Hidden` means; see below.) + /// + /// In token streams parsed from source code, the compiler will use + /// `JointHidden` for any token immediately followed by anything not + /// covered by the `Alone` and `Joint` cases: an identifier, lifetime, + /// literal, delimiter, doc comment. + /// + /// When constructing token streams, use this for each token that (a) + /// should be pretty-printed without a space after it, and (b) is followed + /// by a non-punctuation token. + /// + /// Converts to `proc_macro::Spacing::Alone`, but + /// `proc_macro::Spacing::Alone` converts back to `token::Spacing::Alone`. + /// Because of that, pretty-printing of `TokenStream`s produced by proc + /// macros is unavoidably uglier (with more whitespace between tokens) than + /// pretty-printing of `TokenStream`'s produced by other means (i.e. parsed + /// source code, internally constructed token streams, and token streams + /// produced by declarative macros). + JointHidden, } +/// Identifier or keyword. #[derive(Debug, Clone, PartialEq, Eq, Hash)] -/// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier. pub struct Ident<S> { - pub text: SmolStr, + pub sym: Symbol, pub span: S, + pub is_raw: IdentIsRaw, } impl<S> Ident<S> { - pub fn new(text: impl Into<SmolStr>, span: S) -> Self { - Ident { text: text.into(), span } + pub fn new(text: &str, span: S) -> Self { + // let raw_stripped = IdentIsRaw::split_from_symbol(text.as_ref()); + let (is_raw, text) = IdentIsRaw::split_from_symbol(text); + Ident { sym: Symbol::intern(text), span, is_raw } } } @@ -207,22 +386,35 @@ fn print_debug_token<S: fmt::Debug>( match tkn { TokenTree::Leaf(leaf) => match leaf { Leaf::Literal(lit) => { - write!(f, "{}LITERAL {}", align, lit.text)?; - fmt::Debug::fmt(&lit.span, f)?; + write!( + f, + "{}LITERAL {:?} {}{} {:#?}", + align, + lit.kind, + lit.symbol, + lit.suffix.as_ref().map(|it| it.as_str()).unwrap_or(""), + lit.span + )?; } Leaf::Punct(punct) => { write!( f, - "{}PUNCH {} [{}] ", + "{}PUNCH {} [{}] {:#?}", align, punct.char, if punct.spacing == Spacing::Alone { "alone" } else { "joint" }, + punct.span )?; - fmt::Debug::fmt(&punct.span, f)?; } Leaf::Ident(ident) => { - write!(f, "{}IDENT {} ", align, ident.text)?; - fmt::Debug::fmt(&ident.span, f)?; + write!( + f, + "{}IDENT {}{} {:#?}", + align, + ident.is_raw.as_str(), + ident.sym, + ident.span + )?; } }, TokenTree::Subtree(subtree) => { @@ -288,13 +480,52 @@ impl<S> fmt::Display for Leaf<S> { impl<S> fmt::Display for Ident<S> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.text, f) + fmt::Display::fmt(&self.is_raw.as_str(), f)?; + fmt::Display::fmt(&self.sym, f) } } impl<S> fmt::Display for Literal<S> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.text, f) + match self.kind { + LitKind::Byte => write!(f, "b'{}'", self.symbol), + LitKind::Char => write!(f, "'{}'", self.symbol), + LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{}", self.symbol), + LitKind::Str => write!(f, "\"{}\"", self.symbol), + LitKind::ByteStr => write!(f, "b\"{}\"", self.symbol), + LitKind::CStr => write!(f, "c\"{}\"", self.symbol), + LitKind::StrRaw(num_of_hashes) => { + let num_of_hashes = num_of_hashes as usize; + write!( + f, + r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, + "", + text = self.symbol + ) + } + LitKind::ByteStrRaw(num_of_hashes) => { + let num_of_hashes = num_of_hashes as usize; + write!( + f, + r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, + "", + text = self.symbol + ) + } + LitKind::CStrRaw(num_of_hashes) => { + let num_of_hashes = num_of_hashes as usize; + write!( + f, + r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, + "", + text = self.symbol + ) + } + }?; + if let Some(suffix) = &self.suffix { + write!(f, "{}", suffix)?; + } + Ok(()) } } @@ -337,9 +568,9 @@ impl<S> Subtree<S> { let s = match child { TokenTree::Leaf(it) => { let s = match it { - Leaf::Literal(it) => it.text.to_string(), + Leaf::Literal(it) => it.symbol.to_string(), Leaf::Punct(it) => it.char.to_string(), - Leaf::Ident(it) => it.text.to_string(), + Leaf::Ident(it) => format!("{}{}", it.is_raw.as_str(), it.sym), }; match (it, last) { (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => { @@ -369,8 +600,10 @@ impl<S> Subtree<S> { pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String { fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String { match tkn { - TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(), - TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(), + TokenTree::Leaf(Leaf::Ident(ident)) => { + format!("{}{}", ident.is_raw.as_str(), ident.sym) + } + TokenTree::Leaf(Leaf::Literal(literal)) => literal.symbol.as_str().to_owned(), TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char), TokenTree::Subtree(subtree) => { let content = pretty(&subtree.token_trees); diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs index b3aa6e2fe11..77f890fd7e0 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs @@ -67,16 +67,16 @@ pub struct FileId(u32); // pub struct FileId(NonMaxU32); impl FileId { - pub const MAX_FILE_ID: u32 = 0x7fff_ffff; + pub const MAX: u32 = 0x7fff_ffff; #[inline] pub const fn from_raw(raw: u32) -> FileId { - assert!(raw <= Self::MAX_FILE_ID); + assert!(raw <= Self::MAX); FileId(raw) } #[inline] - pub fn index(self) -> u32 { + pub const fn index(self) -> u32 { self.0 } } diff --git a/src/tools/rust-analyzer/docs/dev/architecture.md b/src/tools/rust-analyzer/docs/dev/architecture.md index f4e7263868c..4f8723a9368 100644 --- a/src/tools/rust-analyzer/docs/dev/architecture.md +++ b/src/tools/rust-analyzer/docs/dev/architecture.md @@ -408,7 +408,7 @@ It has a much richer vocabulary of types than `ide`, but the basic testing setup For comparisons, we use the `expect` crate for snapshot testing. To test various analysis corner cases and avoid forgetting about old tests, we use so-called marks. -See the `marks` module in the `test_utils` crate for more. +See the [cov_mark](https://docs.rs/cov-mark/latest/cov_mark/) crate documentation for more. **Architecture Invariant:** rust-analyzer tests do not use libcore or libstd. All required library code must be a part of the tests. diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index 74acb6f9940..e559f88e233 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ <!--- -lsp/ext.rs hash: 39b47906286ad9c +lsp/ext.rs hash: e92e1f12229b0071 If you need to change the above hash to make the test pass, please check if you need to adjust this doc as well and ping this issue: @@ -616,25 +616,6 @@ Reloads project information (that is, re-executes `cargo metadata`). Rebuilds build scripts and proc-macros, and runs the build scripts to reseed the build data. -## Unindexed Project - -**Experimental Client Capability:** `{ "unindexedProject": boolean }` - -**Method:** `rust-analyzer/unindexedProject` - -**Notification:** - -```typescript -interface UnindexedProjectParams { - /// A list of documents that rust-analyzer has determined are not indexed. - textDocuments: lc.TextDocumentIdentifier[] -} -``` - -This notification is sent from the server to the client. The client is expected -to determine the appropriate owners of `textDocuments` and update `linkedProjects` -if an owner can be determined successfully. - ## Server Status **Experimental Client Capability:** `{ "serverStatusNotification": boolean }` @@ -808,14 +789,6 @@ Renders rust-analyzer's crate graph as an SVG image. If `full` is `true`, the graph includes non-workspace crates (crates.io dependencies as well as sysroot crates). -## Shuffle Crate Graph - -**Method:** `rust-analyzer/shuffleCrateGraph` - -**Request:** `null` - -Shuffles the crate IDs in the crate graph, for debugging purposes. - ## Expand Macro **Method:** `rust-analyzer/expandMacro` diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index edb95abdb8e..ac95767ea5b 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -82,7 +82,7 @@ By default, a cargo invocation will be constructed for the configured targets and features, with the following base command line: ```bash -cargo check --quiet --workspace --message-format=json --all-targets +cargo check --quiet --workspace --message-format=json --all-targets --keep-going ``` . -- @@ -655,12 +655,12 @@ Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. -- Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). -- -[[rust-analyzer.inlayHints.genericParameterHints.const.enable]]rust-analyzer.inlayHints.genericParameterHints.const.enable (default: `false`):: +[[rust-analyzer.inlayHints.genericParameterHints.const.enable]]rust-analyzer.inlayHints.genericParameterHints.const.enable (default: `true`):: + -- Whether to show const generic parameter name inlay hints. -- -[[rust-analyzer.inlayHints.genericParameterHints.lifetime.enable]]rust-analyzer.inlayHints.genericParameterHints.lifetime.enable (default: `true`):: +[[rust-analyzer.inlayHints.genericParameterHints.lifetime.enable]]rust-analyzer.inlayHints.genericParameterHints.lifetime.enable (default: `false`):: + -- Whether to show generic lifetime parameter name inlay hints. @@ -764,12 +764,6 @@ Whether to show `Debug` lens. Only applies when -- Whether to show CodeLens in Rust files. -- -[[rust-analyzer.lens.forceCustomCommands]]rust-analyzer.lens.forceCustomCommands (default: `true`):: -+ --- -Internal config: use custom client-side commands even when the -client doesn't set the corresponding capability. --- [[rust-analyzer.lens.implementations.enable]]rust-analyzer.lens.implementations.enable (default: `true`):: + -- @@ -836,11 +830,6 @@ Sets the LRU capacity of the specified queries. -- Whether to show `can't find Cargo.toml` error message. -- -[[rust-analyzer.notifications.unindexedProject]]rust-analyzer.notifications.unindexedProject (default: `false`):: -+ --- -Whether to send an UnindexedProject notification to the client. --- [[rust-analyzer.numThreads]]rust-analyzer.numThreads (default: `null`):: + -- @@ -1015,6 +1004,103 @@ Show documentation. -- Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list. -- +[[rust-analyzer.workspace.discoverConfig]]rust-analyzer.workspace.discoverConfig (default: `null`):: ++ +-- +Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`]. + +[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`. +`progress_label` is used for the title in progress indicators, whereas `files_to_watch` +is used to determine which build system-specific files should be watched in order to +reload rust-analyzer. + +Below is an example of a valid configuration: +```json +"rust-analyzer.workspace.discoverConfig": { + "command": [ + "rust-project", + "develop-json" + ], + "progressLabel": "rust-analyzer", + "filesToWatch": [ + "BUCK" + ] +} +``` + +## On `DiscoverWorkspaceConfig::command` + +**Warning**: This format is provisional and subject to change. + +[`DiscoverWorkspaceConfig::command`] *must* return a JSON object +corresponding to `DiscoverProjectData::Finished`: + +```norun +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "kind")] +#[serde(rename_all = "snake_case")] +enum DiscoverProjectData { + Finished { buildfile: Utf8PathBuf, project: ProjectJsonData }, + Error { error: String, source: Option<String> }, + Progress { message: String }, +} +``` + +As JSON, `DiscoverProjectData::Finished` is: + +```json +{ + // the internally-tagged representation of the enum. + "kind": "finished", + // the file used by a non-Cargo build system to define + // a package or target. + "buildfile": "rust-analyzer/BUILD", + // the contents of a rust-project.json, elided for brevity + "project": { + "sysroot": "foo", + "crates": [] + } +} +``` + +It is encouraged, but not required, to use the other variants on +`DiscoverProjectData` to provide a more polished end-user experience. + +`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, +which will be substituted with the JSON-serialized form of the following +enum: + +```norun +#[derive(PartialEq, Clone, Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum DiscoverArgument { + Path(AbsPathBuf), + Buildfile(AbsPathBuf), +} +``` + +The JSON representation of `DiscoverArgument::Path` is: + +```json +{ + "path": "src/main.rs" +} +``` + +Similarly, the JSON representation of `DiscoverArgument::Buildfile` is: + +``` +{ + "buildfile": "BUILD" +} +``` + +`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, +and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to +to update an existing workspace. As a reference for implementors, +buck2's `rust-project` will likely be useful: +https://github.com/facebook/buck2/tree/main/integrations/rust-project. +-- [[rust-analyzer.workspace.symbol.search.kind]]rust-analyzer.workspace.symbol.search.kind (default: `"only_types"`):: + -- diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc index e1c1c54ec41..a1777209087 100644 --- a/src/tools/rust-analyzer/docs/user/manual.adoc +++ b/src/tools/rust-analyzer/docs/user/manual.adoc @@ -623,7 +623,7 @@ https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev[dev docs]! **Source:** https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs[config.rs] -The <<_installation,Installation>> section contains details on configuration for some of the editors. +The <<installation,Installation>> section contains details on configuration for some of the editors. In general `rust-analyzer` is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files. Some clients, such as <<vs-code,VS Code>> or <<coc-rust-analyzer,COC plugin in Vim>> provide `rust-analyzer` specific configuration UIs. Others may require you to know a bit more about the interaction with `rust-analyzer`. diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 7e77c7e52fa..4b594129a36 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -137,11 +137,6 @@ "category": "rust-analyzer (debug command)" }, { - "command": "rust-analyzer.shuffleCrateGraph", - "title": "Shuffle Crate Graph", - "category": "rust-analyzer (debug command)" - }, - { "command": "rust-analyzer.memoryUsage", "title": "Memory Usage (Clears Database)", "category": "rust-analyzer (debug command)" @@ -310,11 +305,6 @@ "command": "rust-analyzer.openWalkthrough", "title": "Open Walkthrough", "category": "rust-analyzer" - }, - { - "command": "rust-analyzer.openFAQ", - "title": "Open FAQ", - "category": "rust-analyzer" } ], "keybindings": [ @@ -488,6 +478,7 @@ }, "rust-analyzer.trace.extension": { "description": "Enable logging of VS Code extensions itself.", + "markdownDeprecationMessage": "Log level is now controlled by the [Developer: Set Log Level...](command:workbench.action.setLogLevel) command.You can set the log level for the current session and also the default log level from there. This is also available by clicking the gear icon on the OUTPUT tab when Rust Analyzer Client is visible or by passing the --log rust-lang.rust-analyzer:debug parameter to VS Code.", "type": "boolean", "default": false } @@ -718,7 +709,7 @@ "title": "cargo", "properties": { "rust-analyzer.cargo.buildScripts.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and\n`#rust-analyzer.cargo.buildScripts.invocationLocation#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and\n`#rust-analyzer.cargo.buildScripts.invocationLocation#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n.", "default": null, "type": [ "null", @@ -1914,7 +1905,7 @@ "properties": { "rust-analyzer.inlayHints.genericParameterHints.const.enable": { "markdownDescription": "Whether to show const generic parameter name inlay hints.", - "default": false, + "default": true, "type": "boolean" } } @@ -1924,7 +1915,7 @@ "properties": { "rust-analyzer.inlayHints.genericParameterHints.lifetime.enable": { "markdownDescription": "Whether to show generic lifetime parameter name inlay hints.", - "default": true, + "default": false, "type": "boolean" } } @@ -2146,16 +2137,6 @@ { "title": "lens", "properties": { - "rust-analyzer.lens.forceCustomCommands": { - "markdownDescription": "Internal config: use custom client-side commands even when the\nclient doesn't set the corresponding capability.", - "default": true, - "type": "boolean" - } - } - }, - { - "title": "lens", - "properties": { "rust-analyzer.lens.implementations.enable": { "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", "default": true, @@ -2257,7 +2238,8 @@ "null", "integer" ], - "minimum": 0 + "minimum": 0, + "maximum": 65535 } } }, @@ -2282,16 +2264,6 @@ } }, { - "title": "notifications", - "properties": { - "rust-analyzer.notifications.unindexedProject": { - "markdownDescription": "Whether to send an UnindexedProject notification to the client.", - "default": false, - "type": "boolean" - } - } - }, - { "title": "general", "properties": { "rust-analyzer.numThreads": { @@ -2598,6 +2570,40 @@ { "title": "workspace", "properties": { + "rust-analyzer.workspace.discoverConfig": { + "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\"\n ],\n \"progressLabel\": \"rust-analyzer\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object\ncorresponding to `DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option<String> },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on\n`DiscoverProjectData` to provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`,\nwhich will be substituted with the JSON-serialized form of the following\nenum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`,\nand therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to\nto update an existing workspace. As a reference for implementors,\nbuck2's `rust-project` will likely be useful:\nhttps://github.com/facebook/buck2/tree/main/integrations/rust-project.", + "default": null, + "anyOf": [ + { + "type": "null" + }, + { + "type": "object", + "properties": { + "command": { + "type": "array", + "items": { + "type": "string" + } + }, + "progressLabel": { + "type": "string" + }, + "filesToWatch": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + ] + } + } + }, + { + "title": "workspace", + "properties": { "rust-analyzer.workspace.symbol.search.kind": { "markdownDescription": "Workspace symbol search kind.", "default": "only_types", @@ -3187,9 +3193,6 @@ }, { "command": "rust-analyzer.openWalkthrough" - }, - { - "command": "rust-analyzer.openFAQ" } ], "editor/context": [ diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts index f2884ad0b05..42dd0760d62 100644 --- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts +++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts @@ -42,6 +42,7 @@ async function getServer( enableProposedApi: boolean | undefined; } = context.extension.packageJSON; + // check if the server path is configured explicitly const explicitPath = process.env["__RA_LSP_SERVER_DEBUG"] ?? config.serverPath; if (explicitPath) { if (explicitPath.startsWith("~/")) { @@ -51,12 +52,29 @@ async function getServer( } if (packageJson.releaseTag === null) return "rust-analyzer"; + if (vscode.workspace.workspaceFolders?.length === 1) { + // otherwise check if there is a toolchain override for the current vscode workspace + // and if the toolchain of this override has a rust-analyzer component + // if so, use the rust-analyzer component + const toolchainTomlExists = await fileExists( + vscode.Uri.joinPath(vscode.workspace.workspaceFolders[0]!.uri, "rust-toolchain.toml"), + ); + if (toolchainTomlExists) { + const res = spawnSync("rustup", ["which", "rust-analyzer"], { + encoding: "utf8", + env: { ...process.env }, + cwd: vscode.workspace.workspaceFolders[0]!.uri.fsPath, + }); + if (!res.error && res.status === 0) { + return res.stdout.trim(); + } + } + } + + // finally, use the bundled one const ext = process.platform === "win32" ? ".exe" : ""; const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`); - const bundledExists = await vscode.workspace.fs.stat(bundled).then( - () => true, - () => false, - ); + const bundledExists = await fileExists(bundled); if (bundledExists) { let server = bundled; if (await isNixOs()) { @@ -84,6 +102,13 @@ async function getServer( return undefined; } +async function fileExists(uri: vscode.Uri) { + return await vscode.workspace.fs.stat(uri).then( + () => true, + () => false, + ); +} + export function isValidExecutable(path: string, extraEnv: Env): boolean { log.debug("Checking availability of a binary at", path); @@ -92,9 +117,11 @@ export function isValidExecutable(path: string, extraEnv: Env): boolean { env: { ...process.env, ...extraEnv }, }); - const printOutput = res.error ? log.warn : log.info; - printOutput(path, "--version:", res); - + if (res.error) { + log.warn(path, "--version:", res); + } else { + log.info(path, "--version:", res); + } return res.status === 0; } diff --git a/src/tools/rust-analyzer/editors/code/src/client.ts b/src/tools/rust-analyzer/editors/code/src/client.ts index 542233e7b91..bf58112916b 100644 --- a/src/tools/rust-analyzer/editors/code/src/client.ts +++ b/src/tools/rust-analyzer/editors/code/src/client.ts @@ -42,16 +42,7 @@ export async function createClient( const resp = await next(params, token); if (resp && Array.isArray(resp)) { return resp.map((val) => { - return prepareVSCodeConfig(val, (key, cfg) => { - // we only want to set discovered workspaces on the right key - // and if a workspace has been discovered. - if ( - key === "linkedProjects" && - config.discoveredWorkspaces.length > 0 - ) { - cfg[key] = config.discoveredWorkspaces; - } - }); + return prepareVSCodeConfig(val); }); } else { return resp; @@ -347,7 +338,8 @@ class ExperimentalFeatures implements lc.StaticFeature { "rust-analyzer.debugSingle", "rust-analyzer.showReferences", "rust-analyzer.gotoLocation", - "editor.action.triggerParameterHints", + "rust-analyzer.triggerParameterHints", + "rust-analyzer.rename", ], }, ...capabilities.experimental, @@ -400,14 +392,18 @@ function isCodeActionWithoutEditsAndCommands(value: any): boolean { // to proxy around that. We store the last hover's reference command link // here, as only one hover can be active at a time, and we don't need to // keep a history of these. -export let HOVER_REFERENCE_COMMAND: ra.CommandLink | undefined = undefined; +export let HOVER_REFERENCE_COMMAND: ra.CommandLink[] = []; function renderCommand(cmd: ra.CommandLink): string { - HOVER_REFERENCE_COMMAND = cmd; - return `[${cmd.title}](command:rust-analyzer.hoverRefCommandProxy '${cmd.tooltip}')`; + HOVER_REFERENCE_COMMAND.push(cmd); + return `[${cmd.title}](command:rust-analyzer.hoverRefCommandProxy?${ + HOVER_REFERENCE_COMMAND.length - 1 + } '${cmd.tooltip}')`; } function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownString { + // clean up the previous hover ref command + HOVER_REFERENCE_COMMAND = []; const text = actions .map( (group) => diff --git a/src/tools/rust-analyzer/editors/code/src/commands.ts b/src/tools/rust-analyzer/editors/code/src/commands.ts index 2b0b3001062..7ebc186a3ea 100644 --- a/src/tools/rust-analyzer/editors/code/src/commands.ts +++ b/src/tools/rust-analyzer/editors/code/src/commands.ts @@ -100,12 +100,6 @@ export function memoryUsage(ctx: CtxInit): Cmd { }; } -export function shuffleCrateGraph(ctx: CtxInit): Cmd { - return async () => { - return ctx.client.sendRequest(ra.shuffleCrateGraph); - }; -} - export function triggerParameterHints(_: CtxInit): Cmd { return async () => { const parameterHintsEnabled = vscode.workspace @@ -118,6 +112,12 @@ export function triggerParameterHints(_: CtxInit): Cmd { }; } +export function rename(_: CtxInit): Cmd { + return async () => { + await vscode.commands.executeCommand("editor.action.rename"); + }; +} + export function openLogs(ctx: CtxInit): Cmd { return async () => { if (ctx.client.outputChannel) { @@ -1197,9 +1197,10 @@ export function newDebugConfig(ctx: CtxInit): Cmd { } export function hoverRefCommandProxy(_: Ctx): Cmd { - return async () => { - if (HOVER_REFERENCE_COMMAND) { - const { command, arguments: args = [] } = HOVER_REFERENCE_COMMAND; + return async (index: number) => { + const link = HOVER_REFERENCE_COMMAND[index]; + if (link) { + const { command, arguments: args = [] } = link; await vscode.commands.executeCommand(command, ...args); } }; @@ -1512,13 +1513,3 @@ export function openWalkthrough(_: Ctx): Cmd { ); }; } - -export function openFAQ(_: Ctx): Cmd { - return async () => { - await vscode.commands.executeCommand( - "workbench.action.openWalkthrough", - "rust-lang.rust-analyzer#faq", - true, - ); - }; -} diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index ca77215004d..dc0165df71e 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -2,9 +2,9 @@ import * as Is from "vscode-languageclient/lib/common/utils/is"; import * as os from "os"; import * as path from "path"; import * as vscode from "vscode"; -import { type Env, log, unwrapUndefinable, expectNotUndefined } from "./util"; -import type { JsonProject } from "./rust_project"; -import type { Disposable } from "./ctx"; +import { expectNotUndefined, log, unwrapUndefinable } from "./util"; +import type { Env } from "./util"; +import type { Disposable } from "vscode"; export type RunnableEnvCfgItem = { mask?: string; @@ -31,7 +31,6 @@ export class Config { ); constructor(disposables: Disposable[]) { - this.discoveredWorkspaces = []; vscode.workspace.onDidChangeConfiguration(this.onDidChangeConfiguration, this, disposables); this.refreshLogging(); this.configureLanguage(); @@ -42,7 +41,6 @@ export class Config { } private refreshLogging() { - log.setEnabled(this.traceExtension ?? false); log.info( "Extension version:", vscode.extensions.getExtension(this.extensionId)!.packageJSON.version, @@ -52,8 +50,6 @@ export class Config { log.info("Using configuration", Object.fromEntries(cfg)); } - public discoveredWorkspaces: JsonProject[]; - private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) { this.refreshLogging(); @@ -256,10 +252,6 @@ export class Config { await this.cfg.update("checkOnSave", !(value || false), target || null, overrideInLanguage); } - get traceExtension() { - return this.get<boolean>("trace.extension"); - } - get discoverProjectRunner(): string | undefined { return this.get<string | undefined>("discoverProjectRunner"); } @@ -342,18 +334,7 @@ export class Config { } } -// the optional `cb?` parameter is meant to be used to add additional -// key/value pairs to the VS Code configuration. This needed for, e.g., -// including a `rust-project.json` into the `linkedProjects` key as part -// of the configuration/InitializationParams _without_ causing VS Code -// configuration to be written out to workspace-level settings. This is -// undesirable behavior because rust-project.json files can be tens of -// thousands of lines of JSON, most of which is not meant for humans -// to interact with. -export function prepareVSCodeConfig<T>( - resp: T, - cb?: (key: Extract<keyof T, string>, res: { [key: string]: any }) => void, -): T { +export function prepareVSCodeConfig<T>(resp: T): T { if (Is.string(resp)) { return substituteVSCodeVariableInString(resp) as T; } else if (resp && Is.array<any>(resp)) { @@ -365,9 +346,6 @@ export function prepareVSCodeConfig<T>( for (const key in resp) { const val = resp[key]; res[key] = prepareVSCodeConfig(val); - if (cb) { - cb(key, res); - } } return res as T; } diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index caa99d76194..0f2a758db42 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -1,5 +1,5 @@ import * as vscode from "vscode"; -import * as lc from "vscode-languageclient/node"; +import type * as lc from "vscode-languageclient/node"; import * as ra from "./lsp_ext"; import { Config, prepareVSCodeConfig } from "./config"; @@ -22,11 +22,10 @@ import { import { execRevealDependency } from "./commands"; import { PersistentState } from "./persistent_state"; import { bootstrap } from "./bootstrap"; -import type { RustAnalyzerExtensionApi } from "./main"; -import type { JsonProject } from "./rust_project"; import { prepareTestExplorer } from "./test_explorer"; import { spawn } from "node:child_process"; import { text } from "node:stream/consumers"; +import type { RustAnalyzerExtensionApi } from "./main"; // We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if // only those are in use. We use "Empty" to represent these scenarios @@ -71,7 +70,7 @@ export type CtxInit = Ctx & { export class Ctx implements RustAnalyzerExtensionApi { readonly statusBar: vscode.StatusBarItem; - config: Config; + readonly config: Config; readonly workspace: Workspace; readonly version: string; @@ -187,19 +186,7 @@ export class Ctx implements RustAnalyzerExtensionApi { } if (!this._client) { - this._serverPath = await bootstrap(this.extCtx, this.config, this.state).catch( - (err) => { - let message = "bootstrap error. "; - - message += - 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). '; - message += - 'To enable verbose logs use { "rust-analyzer.trace.extension": true }'; - - log.error("Bootstrap error", err); - throw new Error(message); - }, - ); + this._serverPath = await this.bootstrap(); text(spawn(this._serverPath, ["--version"]).stdout.setEncoding("utf-8")).then( (data) => { const prefix = `rust-analyzer `; @@ -224,15 +211,6 @@ export class Ctx implements RustAnalyzerExtensionApi { }; let rawInitializationOptions = vscode.workspace.getConfiguration("rust-analyzer"); - if (this.config.discoverProjectRunner) { - const command = `${this.config.discoverProjectRunner}.discoverWorkspaceCommand`; - log.info(`running command: ${command}`); - const uris = vscode.workspace.textDocuments - .filter(isRustDocument) - .map((document) => document.uri); - const projects: JsonProject[] = await vscode.commands.executeCommand(command, uris); - this.setWorkspaces(projects); - } if (this.workspace.kind === "Detached Files") { rawInitializationOptions = { @@ -241,16 +219,7 @@ export class Ctx implements RustAnalyzerExtensionApi { }; } - const initializationOptions = prepareVSCodeConfig( - rawInitializationOptions, - (key, obj) => { - // we only want to set discovered workspaces on the right key - // and if a workspace has been discovered. - if (key === "linkedProjects" && this.config.discoveredWorkspaces.length > 0) { - obj["linkedProjects"] = this.config.discoveredWorkspaces; - } - }, - ); + const initializationOptions = prepareVSCodeConfig(rawInitializationOptions); this._client = await createClient( this.traceOutputChannel, @@ -270,27 +239,24 @@ export class Ctx implements RustAnalyzerExtensionApi { this.outputChannel!.show(); }), ); - this.pushClientCleanup( - this._client.onNotification(ra.unindexedProject, async (params) => { - if (this.config.discoverProjectRunner) { - const command = `${this.config.discoverProjectRunner}.discoverWorkspaceCommand`; - log.info(`running command: ${command}`); - const uris = params.textDocuments.map((doc) => - vscode.Uri.parse(doc.uri, true), - ); - const projects: JsonProject[] = await vscode.commands.executeCommand( - command, - uris, - ); - this.setWorkspaces(projects); - await this.notifyRustAnalyzer(); - } - }), - ); } return this._client; } + private async bootstrap(): Promise<string> { + return bootstrap(this.extCtx, this.config, this.state).catch((err) => { + let message = "bootstrap error. "; + + message += + 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). '; + message += + 'To enable verbose logs, click the gear icon in the "OUTPUT" tab and select "Debug".'; + + log.error("Bootstrap error", err); + throw new Error(message); + }); + } + async start() { log.info("Starting language client"); const client = await this.getOrCreateClient(); @@ -399,19 +365,6 @@ export class Ctx implements RustAnalyzerExtensionApi { return this.extCtx.subscriptions; } - setWorkspaces(workspaces: JsonProject[]) { - this.config.discoveredWorkspaces = workspaces; - } - - async notifyRustAnalyzer(): Promise<void> { - // this is a workaround to avoid needing writing the `rust-project.json` into - // a workspace-level VS Code-specific settings folder. We'd like to keep the - // `rust-project.json` entirely in-memory. - await this.client?.sendNotification(lc.DidChangeConfigurationNotification.type, { - settings: "", - }); - } - private updateCommands(forceDisable?: "disable") { this.commandDisposables.forEach((disposable) => disposable.dispose()); this.commandDisposables = []; @@ -501,7 +454,7 @@ export class Ctx implements RustAnalyzerExtensionApi { const toggleCheckOnSave = this.config.checkOnSave ? "Disable" : "Enable"; statusBar.tooltip.appendMarkdown( - `[Extension Info](command:analyzer.serverVersion "Show version and server binary info"): Version ${this.version}, Server Version ${this._serverVersion}` + + `[Extension Info](command:rust-analyzer.serverVersion "Show version and server binary info"): Version ${this.version}, Server Version ${this._serverVersion}` + "\n\n---\n\n" + '[$(terminal) Open Logs](command:rust-analyzer.openLogs "Open the server logs")' + "\n\n" + diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts index f23e3680933..d9622b4a0d2 100644 --- a/src/tools/rust-analyzer/editors/code/src/debug.ts +++ b/src/tools/rust-analyzer/editors/code/src/debug.ts @@ -3,10 +3,10 @@ import * as vscode from "vscode"; import * as path from "path"; import type * as ra from "./lsp_ext"; -import { Cargo, getRustcId, getSysroot } from "./toolchain"; +import { Cargo } from "./toolchain"; import type { Ctx } from "./ctx"; import { prepareEnv } from "./run"; -import { isCargoRunnableArgs, unwrapUndefinable } from "./util"; +import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util"; const debugOutput = vscode.window.createOutputChannel("Debug"); type DebugConfigProvider = ( @@ -142,18 +142,29 @@ async function getDebugConfiguration( const executable = await getDebugExecutable(runnableArgs, env); let sourceFileMap = debugOptions.sourceFileMap; if (sourceFileMap === "auto") { - // let's try to use the default toolchain - const [commitHash, sysroot] = await Promise.all([ - getRustcId(wsFolder), - getSysroot(wsFolder), - ]); - const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust"); sourceFileMap = {}; - sourceFileMap[`/rustc/${commitHash}/`] = rustlib; + const sysroot = env["RUSTC_TOOLCHAIN"]; + if (sysroot) { + // let's try to use the default toolchain + const data = await execute(`rustc -V -v`, { cwd: wsFolder, env }); + const rx = /commit-hash:\s(.*)$/m; + + const commitHash = rx.exec(data)?.[1]; + if (commitHash) { + const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust"); + sourceFileMap[`/rustc/${commitHash}/`] = rustlib; + } + } } const provider = unwrapUndefinable(knownEngines[debugEngine.id]); - const debugConfig = provider(runnable, runnableArgs, simplifyPath(executable), env); + const debugConfig = provider( + runnable, + runnableArgs, + simplifyPath(executable), + env, + sourceFileMap, + ); if (debugConfig.type in debugOptions.engineSettings) { const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type]; for (var key in settingsMap) { diff --git a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts index e24893b2509..d52e314e219 100644 --- a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts +++ b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts @@ -45,7 +45,6 @@ export const rebuildProcMacros = new lc.RequestType0<null, void>("rust-analyzer/ export const runFlycheck = new lc.NotificationType<{ textDocument: lc.TextDocumentIdentifier | null; }>("rust-analyzer/runFlycheck"); -export const shuffleCrateGraph = new lc.RequestType0<null, void>("rust-analyzer/shuffleCrateGraph"); export const syntaxTree = new lc.RequestType<SyntaxTreeParams, string, void>( "rust-analyzer/syntaxTree", ); @@ -303,9 +302,3 @@ export type RecursiveMemoryLayoutNode = { export type RecursiveMemoryLayout = { nodes: RecursiveMemoryLayoutNode[]; }; - -export const unindexedProject = new lc.NotificationType<UnindexedProjectParams>( - "rust-analyzer/unindexedProject", -); - -export type UnindexedProjectParams = { textDocuments: lc.TextDocumentIdentifier[] }; diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index c96f2ae869e..4769fdd864a 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -6,16 +6,12 @@ import { type CommandFactory, Ctx, fetchWorkspace } from "./ctx"; import * as diagnostics from "./diagnostics"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; -import type { JsonProject } from "./rust_project"; const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; -// This API is not stable and may break in between minor releases. export interface RustAnalyzerExtensionApi { + // FIXME: this should be non-optional readonly client?: lc.LanguageClient; - - setWorkspaces(workspaces: JsonProject[]): void; - notifyRustAnalyzer(): Promise<void>; } export async function deactivate() { @@ -145,7 +141,6 @@ function createCommands(): Record<string, CommandFactory> { analyzerStatus: { enabled: commands.analyzerStatus }, memoryUsage: { enabled: commands.memoryUsage }, - shuffleCrateGraph: { enabled: commands.shuffleCrateGraph }, reloadWorkspace: { enabled: commands.reloadWorkspace }, rebuildProcMacros: { enabled: commands.rebuildProcMacros }, matchingBrace: { enabled: commands.matchingBrace }, @@ -179,7 +174,6 @@ function createCommands(): Record<string, CommandFactory> { toggleCheckOnSave: { enabled: commands.toggleCheckOnSave }, toggleLSPLogs: { enabled: commands.toggleLSPLogs }, openWalkthrough: { enabled: commands.openWalkthrough }, - openFAQ: { enabled: commands.openFAQ }, // Internal commands which are invoked by the server. applyActionGroup: { enabled: commands.applyActionGroup }, applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand }, @@ -190,6 +184,7 @@ function createCommands(): Record<string, CommandFactory> { runSingle: { enabled: commands.runSingle }, showReferences: { enabled: commands.showReferences }, triggerParameterHints: { enabled: commands.triggerParameterHints }, + rename: { enabled: commands.rename }, openLogs: { enabled: commands.openLogs }, revealDependency: { enabled: commands.revealDependency }, }; diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index 783bbc1607d..7179eb37447 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -8,7 +8,6 @@ import { makeDebugConfig } from "./debug"; import type { Config, RunnableEnvCfg, RunnableEnvCfgItem } from "./config"; import type { LanguageClient } from "vscode-languageclient/node"; import { unwrapUndefinable, type RustEditor } from "./util"; -import * as toolchain from "./toolchain"; const quickPickButtons = [ { iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." }, @@ -115,7 +114,7 @@ export async function createTaskFromRunnable( let definition: tasks.TaskDefinition; let options; - let cargo; + let cargo = "cargo"; if (runnable.kind === "cargo") { const runnableArgs = runnable.args; let args = createCargoArgs(runnableArgs); @@ -126,8 +125,6 @@ export async function createTaskFromRunnable( cargo = unwrapUndefinable(cargoParts[0]); args = [...cargoParts.slice(1), ...args]; - } else { - cargo = await toolchain.cargoPath(); } definition = { @@ -200,7 +197,7 @@ async function getRunnables( continue; } - if (debuggeeOnly && (r.label.startsWith("doctest") || r.label.startsWith("cargo"))) { + if (debuggeeOnly && r.label.startsWith("doctest")) { continue; } items.push(new RunnableQuickPick(r)); diff --git a/src/tools/rust-analyzer/editors/code/src/rust_project.ts b/src/tools/rust-analyzer/editors/code/src/rust_project.ts deleted file mode 100644 index c983874fc00..00000000000 --- a/src/tools/rust-analyzer/editors/code/src/rust_project.ts +++ /dev/null @@ -1,110 +0,0 @@ -export interface JsonProject { - /// Path to the sysroot directory. - /// - /// The sysroot is where rustc looks for the - /// crates that are built-in to rust, such as - /// std. - /// - /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root - /// - /// To see the current value of sysroot, you - /// can query rustc: - /// - /// ``` - /// $ rustc --print sysroot - /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin - /// ``` - sysroot?: string; - /// Path to the directory with *source code* of - /// sysroot crates. - /// - /// By default, this is `lib/rustlib/src/rust/library` - /// relative to the sysroot. - /// - /// It should point to the directory where std, - /// core, and friends can be found: - /// - /// https://github.com/rust-lang/rust/tree/master/library. - /// - /// If provided, rust-analyzer automatically adds - /// dependencies on sysroot crates. Conversely, - /// if you omit this path, you can specify sysroot - /// dependencies yourself and, for example, have - /// several different "sysroots" in one graph of - /// crates. - sysroot_src?: string; - /// The set of crates comprising the current - /// project. Must include all transitive - /// dependencies as well as sysroot crate (libstd, - /// libcore and such). - crates: Crate[]; -} - -export interface Crate { - /// Optional crate name used for display purposes, - /// without affecting semantics. See the `deps` - /// key for semantically-significant crate names. - display_name?: string; - /// Path to the root module of the crate. - root_module: string; - /// Edition of the crate. - edition: "2015" | "2018" | "2021"; - /// Dependencies - deps: Dep[]; - /// Should this crate be treated as a member of - /// current "workspace". - /// - /// By default, inferred from the `root_module` - /// (members are the crates which reside inside - /// the directory opened in the editor). - /// - /// Set this to `false` for things like standard - /// library and 3rd party crates to enable - /// performance optimizations (rust-analyzer - /// assumes that non-member crates don't change). - is_workspace_member?: boolean; - /// Optionally specify the (super)set of `.rs` - /// files comprising this crate. - /// - /// By default, rust-analyzer assumes that only - /// files under `root_module.parent` can belong - /// to a crate. `include_dirs` are included - /// recursively, unless a subdirectory is in - /// `exclude_dirs`. - /// - /// Different crates can share the same `source`. - /// - /// If two crates share an `.rs` file in common, - /// they *must* have the same `source`. - /// rust-analyzer assumes that files from one - /// source can't refer to files in another source. - source?: { - include_dirs: string[]; - exclude_dirs: string[]; - }; - /// The set of cfgs activated for a given crate, like - /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`. - cfg: string[]; - /// Target triple for this Crate. - /// - /// Used when running `rustc --print cfg` - /// to get target-specific cfgs. - target?: string; - /// Environment variables, used for - /// the `env!` macro - env: { [key: string]: string }; - - /// Whether the crate is a proc-macro crate. - is_proc_macro: boolean; - /// For proc-macro crates, path to compiled - /// proc-macro (.so file). - proc_macro_dylib_path?: string; -} - -export interface Dep { - /// Index of a crate in the `crates` array. - crate: number; - /// Name as should appear in the (implicit) - /// `extern crate name` declaration. - name: string; -} diff --git a/src/tools/rust-analyzer/editors/code/src/tasks.ts b/src/tools/rust-analyzer/editors/code/src/tasks.ts index fac1cc6394f..730ec6d1e90 100644 --- a/src/tools/rust-analyzer/editors/code/src/tasks.ts +++ b/src/tools/rust-analyzer/editors/code/src/tasks.ts @@ -125,7 +125,7 @@ export async function targetToExecution( let command, args; if (isCargoTask(definition)) { // FIXME: The server should provide cargo - command = cargo || (await toolchain.cargoPath()); + command = cargo || (await toolchain.cargoPath(options?.env)); args = [definition.command].concat(definition.args || []); } else { command = definition.command; diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts index 6a0b5c26d82..850a6a55616 100644 --- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts +++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts @@ -3,7 +3,7 @@ import * as os from "os"; import * as path from "path"; import * as readline from "readline"; import * as vscode from "vscode"; -import { execute, log, memoizeAsync, unwrapNullable, unwrapUndefinable } from "./util"; +import { log, memoizeAsync, unwrapUndefinable } from "./util"; import type { CargoRunnableArgs } from "./lsp_ext"; interface CompilationArtifact { @@ -55,7 +55,10 @@ export class Cargo { return result; } - private async getArtifacts(spec: ArtifactSpec): Promise<CompilationArtifact[]> { + private async getArtifacts( + spec: ArtifactSpec, + env?: Record<string, string>, + ): Promise<CompilationArtifact[]> { const artifacts: CompilationArtifact[] = []; try { @@ -78,6 +81,7 @@ export class Cargo { } }, (stderr) => this.output.append(stderr), + env, ); } catch (err) { this.output.show(true); @@ -90,6 +94,7 @@ export class Cargo { async executableFromArgs(runnableArgs: CargoRunnableArgs): Promise<string> { const artifacts = await this.getArtifacts( Cargo.artifactSpec(runnableArgs.cargoArgs, runnableArgs.executableArgs), + runnableArgs.environment, ); if (artifacts.length === 0) { @@ -106,8 +111,9 @@ export class Cargo { cargoArgs: string[], onStdoutJson: (obj: any) => void, onStderrString: (data: string) => void, + env?: Record<string, string>, ): Promise<number> { - const path = await cargoPath(); + const path = await cargoPath(env); return await new Promise((resolve, reject) => { const cargo = cp.spawn(path, cargoArgs, { stdio: ["ignore", "pipe", "pipe"], @@ -133,29 +139,12 @@ export class Cargo { } } -/** Mirrors `project_model::sysroot::discover_sysroot_dir()` implementation*/ -export async function getSysroot(dir: string): Promise<string> { - const rustcPath = await getPathForExecutable("rustc"); - - // do not memoize the result because the toolchain may change between runs - return await execute(`${rustcPath} --print sysroot`, { cwd: dir }); -} - -export async function getRustcId(dir: string): Promise<string> { - const rustcPath = await getPathForExecutable("rustc"); - - // do not memoize the result because the toolchain may change between runs - const data = await execute(`${rustcPath} -V -v`, { cwd: dir }); - const rx = /commit-hash:\s(.*)$/m; - - const result = unwrapNullable(rx.exec(data)); - const first = unwrapUndefinable(result[1]); - return first; -} - /** Mirrors `toolchain::cargo()` implementation */ // FIXME: The server should provide this -export function cargoPath(): Promise<string> { +export function cargoPath(env?: Record<string, string>): Promise<string> { + if (env?.["RUSTC_TOOLCHAIN"]) { + return Promise.resolve("cargo"); + } return getPathForExecutable("cargo"); } diff --git a/src/tools/rust-analyzer/editors/code/src/util.ts b/src/tools/rust-analyzer/editors/code/src/util.ts index dd1cbe38ff9..db64a013fda 100644 --- a/src/tools/rust-analyzer/editors/code/src/util.ts +++ b/src/tools/rust-analyzer/editors/code/src/util.ts @@ -17,49 +17,48 @@ export type Env = { [name: string]: string; }; -export const log = new (class { - private enabled = true; - private readonly output = vscode.window.createOutputChannel("Rust Analyzer Client"); +class Log { + private readonly output = vscode.window.createOutputChannel("Rust Analyzer Client", { + log: true, + }); - setEnabled(yes: boolean): void { - log.enabled = yes; + trace(...messages: [unknown, ...unknown[]]): void { + this.output.trace(this.stringify(messages)); } - // Hint: the type [T, ...T[]] means a non-empty array - debug(...msg: [unknown, ...unknown[]]): void { - if (!log.enabled) return; - log.write("DEBUG", ...msg); + debug(...messages: [unknown, ...unknown[]]): void { + this.output.debug(this.stringify(messages)); } - info(...msg: [unknown, ...unknown[]]): void { - log.write("INFO", ...msg); + info(...messages: [unknown, ...unknown[]]): void { + this.output.info(this.stringify(messages)); } - warn(...msg: [unknown, ...unknown[]]): void { - debugger; - log.write("WARN", ...msg); + warn(...messages: [unknown, ...unknown[]]): void { + this.output.warn(this.stringify(messages)); } - error(...msg: [unknown, ...unknown[]]): void { - debugger; - log.write("ERROR", ...msg); - log.output.show(true); + error(...messages: [unknown, ...unknown[]]): void { + this.output.error(this.stringify(messages)); + this.output.show(true); } - private write(label: string, ...messageParts: unknown[]): void { - const message = messageParts.map(log.stringify).join(" "); - const dateTime = new Date().toLocaleString(); - log.output.appendLine(`${label} [${dateTime}]: ${message}`); + private stringify(messages: unknown[]): string { + return messages + .map((message) => { + if (typeof message === "string") { + return message; + } + if (message instanceof Error) { + return message.stack || message.message; + } + return inspect(message, { depth: 6, colors: false }); + }) + .join(" "); } +} - private stringify(val: unknown): string { - if (typeof val === "string") return val; - return inspect(val, { - colors: false, - depth: 6, // heuristic - }); - } -})(); +export const log = new Log(); export function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); @@ -135,7 +134,7 @@ export function execute(command: string, options: ExecOptions): Promise<string> return new Promise((resolve, reject) => { exec(command, options, (err, stdout, stderr) => { if (err) { - log.error(err); + log.error("error:", err); reject(err); return; } diff --git a/src/tools/rust-analyzer/lib/line-index/src/lib.rs b/src/tools/rust-analyzer/lib/line-index/src/lib.rs index 1ab62e99235..66875e25242 100644 --- a/src/tools/rust-analyzer/lib/line-index/src/lib.rs +++ b/src/tools/rust-analyzer/lib/line-index/src/lib.rs @@ -275,21 +275,21 @@ unsafe fn analyze_source_file_sse2( let ptr = src_bytes.as_ptr() as *const __m128i; // We don't know if the pointer is aligned to 16 bytes, so we // use `loadu`, which supports unaligned loading. - let chunk = _mm_loadu_si128(ptr.add(chunk_index)); + let chunk = unsafe { _mm_loadu_si128(ptr.add(chunk_index)) }; // For character in the chunk, see if its byte value is < 0, which // indicates that it's part of a UTF-8 char. - let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)); + let multibyte_test = unsafe { _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)) }; // Create a bit mask from the comparison results. - let multibyte_mask = _mm_movemask_epi8(multibyte_test); + let multibyte_mask = unsafe { _mm_movemask_epi8(multibyte_test) }; // If the bit mask is all zero, we only have ASCII chars here: if multibyte_mask == 0 { assert!(intra_chunk_offset == 0); // Check for newlines in the chunk - let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)); - let newlines_mask = _mm_movemask_epi8(newlines_test); + let newlines_test = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)) }; + let newlines_mask = unsafe { _mm_movemask_epi8(newlines_test) }; if newlines_mask != 0 { // All control characters are newlines, record them @@ -349,8 +349,8 @@ unsafe fn analyze_source_file_sse2( unsafe fn move_mask(v: std::arch::aarch64::uint8x16_t) -> u64 { use std::arch::aarch64::*; - let nibble_mask = vshrn_n_u16(vreinterpretq_u16_u8(v), 4); - vget_lane_u64(vreinterpret_u64_u8(nibble_mask), 0) + let nibble_mask = unsafe { vshrn_n_u16(vreinterpretq_u16_u8(v), 4) }; + unsafe { vget_lane_u64(vreinterpret_u64_u8(nibble_mask), 0) } } #[target_feature(enable = "neon")] @@ -368,7 +368,7 @@ unsafe fn analyze_source_file_neon( let chunk_count = src.len() / CHUNK_SIZE; - let newline = vdupq_n_s8(b'\n' as i8); + let newline = unsafe { vdupq_n_s8(b'\n' as i8) }; // This variable keeps track of where we should start decoding a // chunk. If a multi-byte character spans across chunk boundaries, @@ -378,21 +378,21 @@ unsafe fn analyze_source_file_neon( for chunk_index in 0..chunk_count { let ptr = src_bytes.as_ptr() as *const i8; - let chunk = vld1q_s8(ptr.add(chunk_index * CHUNK_SIZE)); + let chunk = unsafe { vld1q_s8(ptr.add(chunk_index * CHUNK_SIZE)) }; // For character in the chunk, see if its byte value is < 0, which // indicates that it's part of a UTF-8 char. - let multibyte_test = vcltzq_s8(chunk); + let multibyte_test = unsafe { vcltzq_s8(chunk) }; // Create a bit mask from the comparison results. - let multibyte_mask = move_mask(multibyte_test); + let multibyte_mask = unsafe { move_mask(multibyte_test) }; // If the bit mask is all zero, we only have ASCII chars here: if multibyte_mask == 0 { assert!(intra_chunk_offset == 0); // Check for newlines in the chunk - let newlines_test = vceqq_s8(chunk, newline); - let mut newlines_mask = move_mask(newlines_test); + let newlines_test = unsafe { vceqq_s8(chunk, newline) }; + let mut newlines_mask = unsafe { move_mask(newlines_test) }; // If the bit mask is not all zero, there are newlines in this chunk. if newlines_mask != 0 { diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 424c93a7521..001b900b207 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -bcf1f6db4594ae6132378b179a30cdb3599a863d +1b51d80027919563004918eaadfa0d890ac0eb93 diff --git a/src/tools/rust-analyzer/xtask/src/codegen.rs b/src/tools/rust-analyzer/xtask/src/codegen.rs index acaa65129df..aeb0c00ae6a 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen.rs @@ -162,11 +162,12 @@ fn add_preamble(cg: CodegenType, mut text: String) -> String { /// Checks that the `file` has the specified `contents`. If that is not the /// case, updates the file and then fails the test. #[allow(clippy::print_stderr)] -fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: bool) { +fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: bool) -> bool { + let contents = normalize_newlines(contents); if let Ok(old_contents) = fs::read_to_string(file) { - if normalize_newlines(&old_contents) == normalize_newlines(contents) { + if normalize_newlines(&old_contents) == contents { // File is already up to date. - return; + return false; } } @@ -193,6 +194,7 @@ fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: boo let _ = fs::create_dir_all(parent); } fs::write(file, contents).unwrap(); + true } } diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs index 45fa2d37c8f..0352539754b 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs @@ -17,15 +17,22 @@ use quote::{format_ident, quote}; use ungrammar::{Grammar, Rule}; use crate::{ - codegen::{add_preamble, ensure_file_contents, reformat}, + codegen::{add_preamble, ensure_file_contents, grammar::ast_src::generate_kind_src, reformat}, project_root, }; mod ast_src; -use self::ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}; +use self::ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc}; pub(crate) fn generate(check: bool) { - let syntax_kinds = generate_syntax_kinds(KINDS_SRC); + let grammar = fs::read_to_string(project_root().join("crates/syntax/rust.ungram")) + .unwrap() + .parse() + .unwrap(); + let ast = lower(&grammar); + let kinds_src = generate_kind_src(&ast.nodes, &ast.enums, &grammar); + + let syntax_kinds = generate_syntax_kinds(kinds_src); let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs"); ensure_file_contents( crate::flags::CodegenType::Grammar, @@ -34,12 +41,6 @@ pub(crate) fn generate(check: bool) { check, ); - let grammar = fs::read_to_string(project_root().join("crates/syntax/rust.ungram")) - .unwrap() - .parse() - .unwrap(); - let ast = lower(&grammar); - let ast_tokens = generate_tokens(&ast); let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs"); ensure_file_contents( @@ -49,7 +50,7 @@ pub(crate) fn generate(check: bool) { check, ); - let ast_nodes = generate_nodes(KINDS_SRC, &ast); + let ast_nodes = generate_nodes(kinds_src, &ast); let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs"); ensure_file_contents( crate::flags::CodegenType::Grammar, @@ -96,7 +97,7 @@ fn generate_tokens(grammar: &AstSrc) -> String { .replace("#[derive", "\n#[derive") } -fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { +fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String { let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar .nodes .iter() @@ -117,7 +118,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { }); let methods = node.fields.iter().map(|field| { - let method_name = field.method_name(); + let method_name = format_ident!("{}", field.method_name()); let ty = field.ty(); if field.is_many() { @@ -260,7 +261,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { .iter() .map(|name| format_ident!("{}", to_upper_snake_case(&name.name.to_string()))) .collect(); - + let nodes = nodes.iter().map(|node| format_ident!("{}", node.name)); ( quote! { #[pretty_doc_comment_placeholder_workaround] @@ -293,6 +294,15 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { &self.syntax } } + + #( + impl From<#nodes> for #name { + #[inline] + fn from(node: #nodes) -> #name { + #name { syntax: node.syntax } + } + } + )* }, ) }) @@ -366,7 +376,7 @@ fn write_doc_comment(contents: &[String], dest: &mut String) { } } -fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String { +fn generate_syntax_kinds(grammar: KindsSrc) -> String { let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar .punct .iter() @@ -481,7 +491,9 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String { #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)* #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)* [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT }; + [int_number] => { $crate::SyntaxKind::INT_NUMBER }; [ident] => { $crate::SyntaxKind::IDENT }; + [string] => { $crate::SyntaxKind::STRING }; [shebang] => { $crate::SyntaxKind::SHEBANG }; } }; @@ -550,7 +562,7 @@ impl Field { _ => None, } } - fn method_name(&self) -> proc_macro2::Ident { + fn method_name(&self) -> String { match self { Field::Token(name) => { let name = match name.as_str() { @@ -585,13 +597,13 @@ impl Field { "~" => "tilde", _ => name, }; - format_ident!("{}_token", name) + format!("{name}_token",) } Field::Node { name, .. } => { if name == "type" { - format_ident!("ty") + String::from("ty") } else { - format_ident!("{}", name) + name.to_owned() } } } @@ -604,6 +616,15 @@ impl Field { } } +fn clean_token_name(name: &str) -> String { + let cleaned = name.trim_start_matches(['@', '#', '?']); + if cleaned.is_empty() { + name.to_owned() + } else { + cleaned.to_owned() + } +} + fn lower(grammar: &Grammar) -> AstSrc { let mut res = AstSrc { tokens: @@ -683,14 +704,12 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r } Rule::Token(token) => { assert!(label.is_none()); - let mut name = grammar[*token].name.clone(); - if name != "int_number" && name != "string" { - if "[]{}()".contains(&name) { - name = format!("'{name}'"); - } - let field = Field::Token(name); - acc.push(field); + let mut name = clean_token_name(&grammar[*token].name); + if "[]{}()".contains(&name) { + name = format!("'{name}'"); } + let field = Field::Token(name); + acc.push(field); } Rule::Rep(inner) => { if let Rule::Node(node) = &**inner { @@ -863,7 +882,7 @@ fn extract_struct_traits(ast: &mut AstSrc) { fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) { let mut to_remove = Vec::new(); for (i, field) in node.fields.iter().enumerate() { - let method_name = field.method_name().to_string(); + let method_name = field.method_name(); if methods.iter().any(|&it| it == method_name) { to_remove.push(i); } diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs index c246ee9950c..3444f89908b 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs @@ -1,241 +1,154 @@ //! Defines input for code generation process. -pub(crate) struct KindsSrc<'a> { - pub(crate) punct: &'a [(&'a str, &'a str)], - pub(crate) keywords: &'a [&'a str], - pub(crate) contextual_keywords: &'a [&'a str], - pub(crate) literals: &'a [&'a str], - pub(crate) tokens: &'a [&'a str], - pub(crate) nodes: &'a [&'a str], +use crate::codegen::grammar::to_upper_snake_case; + +#[derive(Copy, Clone, Debug)] +pub(crate) struct KindsSrc { + pub(crate) punct: &'static [(&'static str, &'static str)], + pub(crate) keywords: &'static [&'static str], + pub(crate) contextual_keywords: &'static [&'static str], + pub(crate) literals: &'static [&'static str], + pub(crate) tokens: &'static [&'static str], + pub(crate) nodes: &'static [&'static str], } -pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { - punct: &[ - (";", "SEMICOLON"), - (",", "COMMA"), - ("(", "L_PAREN"), - (")", "R_PAREN"), - ("{", "L_CURLY"), - ("}", "R_CURLY"), - ("[", "L_BRACK"), - ("]", "R_BRACK"), - ("<", "L_ANGLE"), - (">", "R_ANGLE"), - ("@", "AT"), - ("#", "POUND"), - ("~", "TILDE"), - ("?", "QUESTION"), - ("$", "DOLLAR"), - ("&", "AMP"), - ("|", "PIPE"), - ("+", "PLUS"), - ("*", "STAR"), - ("/", "SLASH"), - ("^", "CARET"), - ("%", "PERCENT"), - ("_", "UNDERSCORE"), - (".", "DOT"), - ("..", "DOT2"), - ("...", "DOT3"), - ("..=", "DOT2EQ"), - (":", "COLON"), - ("::", "COLON2"), - ("=", "EQ"), - ("==", "EQ2"), - ("=>", "FAT_ARROW"), - ("!", "BANG"), - ("!=", "NEQ"), - ("-", "MINUS"), - ("->", "THIN_ARROW"), - ("<=", "LTEQ"), - (">=", "GTEQ"), - ("+=", "PLUSEQ"), - ("-=", "MINUSEQ"), - ("|=", "PIPEEQ"), - ("&=", "AMPEQ"), - ("^=", "CARETEQ"), - ("/=", "SLASHEQ"), - ("*=", "STAREQ"), - ("%=", "PERCENTEQ"), - ("&&", "AMP2"), - ("||", "PIPE2"), - ("<<", "SHL"), - (">>", "SHR"), - ("<<=", "SHLEQ"), - (">>=", "SHREQ"), - ], - keywords: &[ - "abstract", "as", "async", "await", "become", "box", "break", "const", "continue", "crate", - "do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", - "let", "loop", "macro", "match", "mod", "move", "mut", "override", "priv", "pub", "ref", - "return", "self", "Self", "static", "struct", "super", "trait", "true", "try", "type", - "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", - ], - contextual_keywords: &[ - "auto", - "builtin", - "default", - "existential", - "union", - "raw", - "macro_rules", - "yeet", - "offset_of", - "asm", - "format_args", - ], - literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING", "C_STRING"], - tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"], - nodes: &[ - "SOURCE_FILE", - "STRUCT", - "UNION", - "ENUM", - "FN", - "RET_TYPE", - "EXTERN_CRATE", - "MODULE", - "USE", - "STATIC", - "CONST", - "TRAIT", - "TRAIT_ALIAS", - "IMPL", - "TYPE_ALIAS", - "MACRO_CALL", - "MACRO_RULES", - "MACRO_ARM", - "TOKEN_TREE", - "MACRO_DEF", - "PAREN_TYPE", - "TUPLE_TYPE", - "MACRO_TYPE", - "NEVER_TYPE", - "PATH_TYPE", - "PTR_TYPE", - "ARRAY_TYPE", - "SLICE_TYPE", - "REF_TYPE", - "INFER_TYPE", - "FN_PTR_TYPE", - "FOR_TYPE", - "IMPL_TRAIT_TYPE", - "DYN_TRAIT_TYPE", - "OR_PAT", - "PAREN_PAT", - "REF_PAT", - "BOX_PAT", - "IDENT_PAT", - "WILDCARD_PAT", - "REST_PAT", - "PATH_PAT", - "RECORD_PAT", - "RECORD_PAT_FIELD_LIST", - "RECORD_PAT_FIELD", - "TUPLE_STRUCT_PAT", - "TUPLE_PAT", - "SLICE_PAT", - "RANGE_PAT", - "LITERAL_PAT", - "MACRO_PAT", - "CONST_BLOCK_PAT", - // atoms - "TUPLE_EXPR", - "ARRAY_EXPR", - "PAREN_EXPR", - "PATH_EXPR", - "CLOSURE_EXPR", - "IF_EXPR", - "WHILE_EXPR", - "LOOP_EXPR", - "FOR_EXPR", - "CONTINUE_EXPR", - "BREAK_EXPR", - "LABEL", - "BLOCK_EXPR", - "STMT_LIST", - "RETURN_EXPR", - "BECOME_EXPR", - "YIELD_EXPR", - "YEET_EXPR", - "LET_EXPR", - "UNDERSCORE_EXPR", - "MACRO_EXPR", - "MATCH_EXPR", - "MATCH_ARM_LIST", - "MATCH_ARM", - "MATCH_GUARD", - "RECORD_EXPR", - "RECORD_EXPR_FIELD_LIST", - "RECORD_EXPR_FIELD", - "OFFSET_OF_EXPR", - "ASM_EXPR", - "FORMAT_ARGS_EXPR", - "FORMAT_ARGS_ARG", - // postfix - "CALL_EXPR", - "INDEX_EXPR", - "METHOD_CALL_EXPR", - "FIELD_EXPR", - "AWAIT_EXPR", - "TRY_EXPR", - "CAST_EXPR", - // unary - "REF_EXPR", - "PREFIX_EXPR", - "RANGE_EXPR", // just weird - "BIN_EXPR", - "EXTERN_BLOCK", - "EXTERN_ITEM_LIST", - "VARIANT", - "RECORD_FIELD_LIST", - "RECORD_FIELD", - "TUPLE_FIELD_LIST", - "TUPLE_FIELD", - "VARIANT_LIST", - "ITEM_LIST", - "ASSOC_ITEM_LIST", - "ATTR", - "META", - "USE_TREE", - "USE_TREE_LIST", - "PATH", - "PATH_SEGMENT", - "LITERAL", - "RENAME", - "VISIBILITY", - "WHERE_CLAUSE", - "WHERE_PRED", - "ABI", - "NAME", - "NAME_REF", - "LET_STMT", - "LET_ELSE", - "EXPR_STMT", - "GENERIC_PARAM_LIST", - "GENERIC_PARAM", - "LIFETIME_PARAM", - "TYPE_PARAM", - "RETURN_TYPE_ARG", - "CONST_PARAM", - "GENERIC_ARG_LIST", - "LIFETIME", - "LIFETIME_ARG", - "TYPE_ARG", - "ASSOC_TYPE_ARG", - "CONST_ARG", - "PARAM_LIST", - "PARAM", - "SELF_PARAM", - "ARG_LIST", - "TYPE_BOUND", - "TYPE_BOUND_LIST", - // macro related - "MACRO_ITEMS", - "MACRO_STMTS", - "MACRO_EAGER_INPUT", - ], -}; +/// The punctuations of the language. +const PUNCT: &[(&str, &str)] = &[ + // KEEP THE DOLLAR AT THE TOP ITS SPECIAL + ("$", "DOLLAR"), + (";", "SEMICOLON"), + (",", "COMMA"), + ("(", "L_PAREN"), + (")", "R_PAREN"), + ("{", "L_CURLY"), + ("}", "R_CURLY"), + ("[", "L_BRACK"), + ("]", "R_BRACK"), + ("<", "L_ANGLE"), + (">", "R_ANGLE"), + ("@", "AT"), + ("#", "POUND"), + ("~", "TILDE"), + ("?", "QUESTION"), + ("&", "AMP"), + ("|", "PIPE"), + ("+", "PLUS"), + ("*", "STAR"), + ("/", "SLASH"), + ("^", "CARET"), + ("%", "PERCENT"), + ("_", "UNDERSCORE"), + (".", "DOT"), + ("..", "DOT2"), + ("...", "DOT3"), + ("..=", "DOT2EQ"), + (":", "COLON"), + ("::", "COLON2"), + ("=", "EQ"), + ("==", "EQ2"), + ("=>", "FAT_ARROW"), + ("!", "BANG"), + ("!=", "NEQ"), + ("-", "MINUS"), + ("->", "THIN_ARROW"), + ("<=", "LTEQ"), + (">=", "GTEQ"), + ("+=", "PLUSEQ"), + ("-=", "MINUSEQ"), + ("|=", "PIPEEQ"), + ("&=", "AMPEQ"), + ("^=", "CARETEQ"), + ("/=", "SLASHEQ"), + ("*=", "STAREQ"), + ("%=", "PERCENTEQ"), + ("&&", "AMP2"), + ("||", "PIPE2"), + ("<<", "SHL"), + (">>", "SHR"), + ("<<=", "SHLEQ"), + (">>=", "SHREQ"), +]; +const TOKENS: &[&str] = &["ERROR", "WHITESPACE", "NEWLINE", "COMMENT"]; +// &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],; + +const EOF: &str = "EOF"; + +const RESERVED: &[&str] = &[ + "abstract", "become", "box", "do", "final", "macro", "override", "priv", "typeof", "unsized", + "virtual", "yield", "try", +]; +const CONTEXTUAL_RESERVED: &[&str] = &[]; + +pub(crate) fn generate_kind_src( + nodes: &[AstNodeSrc], + enums: &[AstEnumSrc], + grammar: &ungrammar::Grammar, +) -> KindsSrc { + let mut keywords: Vec<&_> = Vec::new(); + let mut contextual_keywords: Vec<&_> = Vec::new(); + let mut tokens: Vec<&_> = TOKENS.to_vec(); + let mut literals: Vec<&_> = Vec::new(); + let mut used_puncts = vec![false; PUNCT.len()]; + // Mark $ as used + used_puncts[0] = true; + grammar.tokens().for_each(|token| { + let name = &*grammar[token].name; + if name == EOF { + return; + } + match name.split_at(1) { + ("@", lit) if !lit.is_empty() => { + literals.push(String::leak(to_upper_snake_case(lit))); + } + ("#", token) if !token.is_empty() => { + tokens.push(String::leak(to_upper_snake_case(token))); + } + ("?", kw) if !kw.is_empty() => { + contextual_keywords.push(String::leak(kw.to_owned())); + } + _ if name.chars().all(char::is_alphabetic) => { + keywords.push(String::leak(name.to_owned())); + } + _ => { + let idx = PUNCT + .iter() + .position(|(punct, _)| punct == &name) + .unwrap_or_else(|| panic!("Grammar references unknown punctuation {name:?}")); + used_puncts[idx] = true; + } + } + }); + PUNCT.iter().zip(used_puncts).filter(|(_, used)| !used).for_each(|((punct, _), _)| { + panic!("Punctuation {punct:?} is not used in grammar"); + }); + keywords.extend(RESERVED.iter().copied()); + keywords.sort(); + keywords.dedup(); + contextual_keywords.extend(CONTEXTUAL_RESERVED.iter().copied()); + contextual_keywords.sort(); + contextual_keywords.dedup(); + + // we leak things here for simplicity, that way we don't have to deal with lifetimes + // The execution is a one shot job so thats fine + let nodes = nodes + .iter() + .map(|it| &it.name) + .chain(enums.iter().map(|it| &it.name)) + .map(|it| to_upper_snake_case(it)) + .map(String::leak) + .map(|it| &*it) + .collect(); + let nodes = Vec::leak(nodes); + nodes.sort(); + let keywords = Vec::leak(keywords); + let contextual_keywords = Vec::leak(contextual_keywords); + let literals = Vec::leak(literals); + literals.sort(); + let tokens = Vec::leak(tokens); + tokens.sort(); + + KindsSrc { punct: PUNCT, nodes, keywords, contextual_keywords, literals, tokens } +} #[derive(Default, Debug)] pub(crate) struct AstSrc { diff --git a/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs b/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs index 5983b06e1b9..f9f73df8eb7 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs @@ -6,52 +6,139 @@ use std::{ collections::HashMap, fs, iter, path::{Path, PathBuf}, + time::SystemTime, }; +use anyhow::Result; +use itertools::Itertools as _; + use crate::{ - codegen::{ensure_file_contents, CommentBlock}, + codegen::{ensure_file_contents, reformat, CommentBlock}, project_root, util::list_rust_files, }; +const PARSER_CRATE_ROOT: &str = "crates/parser"; +const PARSER_TEST_DATA: &str = "crates/parser/test_data"; +const PARSER_TEST_DATA_INLINE: &str = "crates/parser/test_data/parser/inline"; + pub(crate) fn generate(check: bool) { - let grammar_dir = project_root().join(Path::new("crates/parser/src/grammar")); - let tests = tests_from_dir(&grammar_dir); + let tests = tests_from_dir( + &project_root().join(Path::new(&format!("{PARSER_CRATE_ROOT}/src/grammar"))), + ); - install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok", check); - install_tests(&tests.err, "crates/parser/test_data/parser/inline/err", check); + let mut some_file_was_updated = false; + some_file_was_updated |= + install_tests(&tests.ok, &format!("{PARSER_TEST_DATA_INLINE}/ok"), check).unwrap(); + some_file_was_updated |= + install_tests(&tests.err, &format!("{PARSER_TEST_DATA_INLINE}/err"), check).unwrap(); - fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) { - let tests_dir = project_root().join(into); - if !tests_dir.is_dir() { - fs::create_dir_all(&tests_dir).unwrap(); - } - // ok is never actually read, but it needs to be specified to create a Test in existing_tests - let existing = existing_tests(&tests_dir, true); - if let Some(t) = existing.keys().find(|&t| !tests.contains_key(t)) { - panic!("Test is deleted: {t}"); - } + if some_file_was_updated { + let _ = fs::File::open(format!("{PARSER_CRATE_ROOT}/src/tests.rs")) + .unwrap() + .set_modified(SystemTime::now()); - let mut new_idx = existing.len() + 1; - for (name, test) in tests { - let path = match existing.get(name) { - Some((path, _test)) => path.clone(), + let ok_tests = tests.ok.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| { + let test_name = quote::format_ident!("{}", test.name); + let test_file = format!("test_data/parser/inline/ok/{test_name}.rs"); + let (test_func, args) = match &test.edition { + Some(edition) => { + let edition = quote::format_ident!("Edition{edition}"); + ( + quote::format_ident!("run_and_expect_no_errors_with_edition"), + quote::quote! {#test_file, crate::Edition::#edition}, + ) + } None => { - let file_name = format!("{new_idx:04}_{name}.rs"); - new_idx += 1; - tests_dir.join(file_name) + (quote::format_ident!("run_and_expect_no_errors"), quote::quote! {#test_file}) + } + }; + quote::quote! { + #[test] + fn #test_name() { + #test_func(#args); + } + } + }); + let err_tests = tests.err.values().sorted_by(|a, b| a.name.cmp(&b.name)).map(|test| { + let test_name = quote::format_ident!("{}", test.name); + let test_file = format!("test_data/parser/inline/err/{test_name}.rs"); + let (test_func, args) = match &test.edition { + Some(edition) => { + let edition = quote::format_ident!("Edition{edition}"); + ( + quote::format_ident!("run_and_expect_errors_with_edition"), + quote::quote! {#test_file, crate::Edition::#edition}, + ) } + None => (quote::format_ident!("run_and_expect_errors"), quote::quote! {#test_file}), }; - ensure_file_contents(crate::flags::CodegenType::ParserTests, &path, &test.text, check); + quote::quote! { + #[test] + fn #test_name() { + #test_func(#args); + } + } + }); + + let output = quote::quote! { + mod ok { + use crate::tests::*; + #(#ok_tests)* + } + mod err { + use crate::tests::*; + #(#err_tests)* + } + }; + + let pretty = reformat(output.to_string()); + ensure_file_contents( + crate::flags::CodegenType::ParserTests, + format!("{PARSER_TEST_DATA}/generated/runner.rs").as_ref(), + &pretty, + check, + ); + } +} + +fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) -> Result<bool> { + let tests_dir = project_root().join(into); + if !tests_dir.is_dir() { + fs::create_dir_all(&tests_dir)?; + } + let existing = existing_tests(&tests_dir, TestKind::Ok)?; + if let Some((t, (path, _))) = existing.iter().find(|&(t, _)| !tests.contains_key(t)) { + panic!("Test `{t}` is deleted: {}", path.display()); + } + + let mut some_file_was_updated = false; + + for (name, test) in tests { + let path = match existing.get(name) { + Some((path, _test)) => path.clone(), + None => tests_dir.join(name).with_extension("rs"), + }; + if ensure_file_contents(crate::flags::CodegenType::ParserTests, &path, &test.text, check) { + some_file_was_updated = true; } } + + Ok(some_file_was_updated) } #[derive(Debug)] struct Test { name: String, text: String, - ok: bool, + kind: TestKind, + edition: Option<String>, +} + +#[derive(Copy, Clone, Debug)] +enum TestKind { + Ok, + Err, } #[derive(Default, Debug)] @@ -64,21 +151,31 @@ fn collect_tests(s: &str) -> Vec<Test> { let mut res = Vec::new(); for comment_block in CommentBlock::extract_untagged(s) { let first_line = &comment_block.contents[0]; - let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") { - (name.to_owned(), true) + let (name, kind) = if let Some(name) = first_line.strip_prefix("test ") { + (name.to_owned(), TestKind::Ok) } else if let Some(name) = first_line.strip_prefix("test_err ") { - (name.to_owned(), false) + (name.to_owned(), TestKind::Err) } else { continue; }; - let text: String = comment_block.contents[1..] - .iter() - .cloned() + let (name, edition) = match *name.split(' ').collect_vec().as_slice() { + [name, edition] => { + assert!(!edition.contains(' ')); + (name.to_owned(), Some(edition.to_owned())) + } + [name] => (name.to_owned(), None), + _ => panic!("invalid test name: {:?}", name), + }; + let text: String = edition + .as_ref() + .map(|edition| format!("// {edition}")) + .into_iter() + .chain(comment_block.contents[1..].iter().cloned()) .chain(iter::once(String::new())) .collect::<Vec<_>>() .join("\n"); assert!(!text.trim().is_empty() && text.ends_with('\n')); - res.push(Test { name, text, ok }) + res.push(Test { name, edition, text, kind }) } res } @@ -96,7 +193,7 @@ fn tests_from_dir(dir: &Path) -> Tests { let text = fs::read_to_string(path).unwrap(); for test in collect_tests(&text) { - if test.ok { + if let TestKind::Ok = test.kind { if let Some(old_test) = res.ok.insert(test.name.clone(), test) { panic!("Duplicate test: {}", old_test.name); } @@ -107,25 +204,24 @@ fn tests_from_dir(dir: &Path) -> Tests { } } -fn existing_tests(dir: &Path, ok: bool) -> HashMap<String, (PathBuf, Test)> { - let mut res = HashMap::default(); - for file in fs::read_dir(dir).unwrap() { - let file = file.unwrap(); - let path = file.path(); - if path.extension().unwrap_or_default() != "rs" { - continue; - } - let name = { - let file_name = path.file_name().unwrap().to_str().unwrap(); - file_name[5..file_name.len() - 3].to_string() - }; - let text = fs::read_to_string(&path).unwrap(); - let test = Test { name: name.clone(), text, ok }; - if let Some(old) = res.insert(name, (path, test)) { - println!("Duplicate test: {old:?}"); +fn existing_tests(dir: &Path, ok: TestKind) -> Result<HashMap<String, (PathBuf, Test)>> { + let mut res = HashMap::new(); + for file in fs::read_dir(dir)? { + let path = file?.path(); + let rust_file = path.extension().and_then(|ext| ext.to_str()) == Some("rs"); + + if rust_file { + let name = path.file_stem().map(|x| x.to_string_lossy().to_string()).unwrap(); + let text = fs::read_to_string(&path)?; + let edition = + text.lines().next().and_then(|it| it.strip_prefix("// ")).map(ToOwned::to_owned); + let test = Test { name: name.clone(), text, kind: ok, edition }; + if let Some(old) = res.insert(name, (path, test)) { + println!("Duplicate test: {:?}", old); + } } } - res + Ok(res) } #[test] diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs index cf4a22d476f..ebb9e71a4f4 100644 --- a/src/tools/rust-analyzer/xtask/src/flags.rs +++ b/src/tools/rust-analyzer/xtask/src/flags.rs @@ -2,7 +2,7 @@ use std::{fmt, str::FromStr}; -use crate::install::{ClientOpt, ServerOpt}; +use crate::install::{ClientOpt, ProcMacroServerOpt, ServerOpt}; xflags::xflags! { src "./src/flags.rs" @@ -23,6 +23,10 @@ xflags::xflags! { optional --mimalloc /// Use jemalloc allocator for server. optional --jemalloc + + /// Install the proc-macro server. + optional --proc-macro-server + /// build in release with debug info set to 2. optional --dev-rel } @@ -109,6 +113,7 @@ pub struct Install { pub client: bool, pub code_bin: Option<String>, pub server: bool, + pub proc_macro_server: bool, pub mimalloc: bool, pub jemalloc: bool, pub dev_rel: bool, @@ -284,7 +289,7 @@ impl Malloc { impl Install { pub(crate) fn server(&self) -> Option<ServerOpt> { - if self.client && !self.server { + if (self.client || self.proc_macro_server) && !self.server { return None; } let malloc = if self.mimalloc { @@ -296,8 +301,14 @@ impl Install { }; Some(ServerOpt { malloc, dev_rel: self.dev_rel }) } + pub(crate) fn proc_macro_server(&self) -> Option<ProcMacroServerOpt> { + if !self.proc_macro_server { + return None; + } + Some(ProcMacroServerOpt { dev_rel: self.dev_rel }) + } pub(crate) fn client(&self) -> Option<ClientOpt> { - if !self.client && self.server { + if (self.server || self.proc_macro_server) && !self.client { return None; } Some(ClientOpt { code_bin: self.code_bin.clone() }) diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs index 72e612f9e1d..eb33d6f9be6 100644 --- a/src/tools/rust-analyzer/xtask/src/install.rs +++ b/src/tools/rust-analyzer/xtask/src/install.rs @@ -15,6 +15,9 @@ impl flags::Install { if let Some(server) = self.server() { install_server(sh, server).context("install server")?; } + if let Some(server) = self.proc_macro_server() { + install_proc_macro_server(sh, server).context("install proc-macro server")?; + } if let Some(client) = self.client() { install_client(sh, client).context("install client")?; } @@ -34,6 +37,10 @@ pub(crate) struct ServerOpt { pub(crate) dev_rel: bool, } +pub(crate) struct ProcMacroServerOpt { + pub(crate) dev_rel: bool, +} + fn fix_path_for_mac(sh: &Shell) -> anyhow::Result<()> { let mut vscode_path: Vec<PathBuf> = { const COMMON_APP_PATH: &str = @@ -132,3 +139,11 @@ fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> { cmd.run()?; Ok(()) } + +fn install_proc_macro_server(sh: &Shell, opts: ProcMacroServerOpt) -> anyhow::Result<()> { + let profile = if opts.dev_rel { "dev-rel" } else { "release" }; + + let cmd = cmd!(sh, "cargo +nightly install --path crates/proc-macro-srv-cli --profile={profile} --locked --force --features sysroot-abi"); + cmd.run()?; + Ok(()) +} diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs index 9a7785dd438..21001c28da6 100644 --- a/src/tools/rust-analyzer/xtask/src/metrics.rs +++ b/src/tools/rust-analyzer/xtask/src/metrics.rs @@ -6,7 +6,7 @@ use std::{ time::{Instant, SystemTime, UNIX_EPOCH}, }; -use anyhow::{bail, format_err}; +use anyhow::format_err; use xshell::{cmd, Shell}; use crate::flags::{self, MeasurementType}; @@ -193,7 +193,7 @@ impl Metrics { impl Host { fn new(sh: &Shell) -> anyhow::Result<Host> { if cfg!(not(target_os = "linux")) { - bail!("can only collect metrics on Linux "); + return Ok(Host { os: "unknown".into(), cpu: "unknown".into(), mem: "unknown".into() }); } let os = read_field(sh, "/etc/os-release", "PRETTY_NAME=")?.trim_matches('"').to_owned(); diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs index 3eda2bc0274..1a1364c7d10 100644 --- a/src/tools/rust-analyzer/xtask/src/release.rs +++ b/src/tools/rust-analyzer/xtask/src/release.rs @@ -202,7 +202,10 @@ impl flags::RustcPush { let head = cmd!(sh, "git rev-parse HEAD").read()?; let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?; if head != fetch_head { - bail!("Josh created a non-roundtrip push! Do NOT merge this into rustc!"); + bail!( + "Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\ + Expected {head}, got {fetch_head}." + ); } println!("Confirmed that the push round-trips back to rust-analyzer properly. Please create a rustc PR:"); // https://github.com/github-linguist/linguist/compare/master...octocat:linguist:master |
