diff options
275 files changed, 7363 insertions, 4424 deletions
diff --git a/src/tools/rust-analyzer/.github/workflows/release.yaml b/src/tools/rust-analyzer/.github/workflows/release.yaml index 11014338d72..e11d6e15d10 100644 --- a/src/tools/rust-analyzer/.github/workflows/release.yaml +++ b/src/tools/rust-analyzer/.github/workflows/release.yaml @@ -132,7 +132,7 @@ jobs: run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std - name: Upload artifacts - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: dist-${{ matrix.target }} path: ./dist @@ -177,7 +177,7 @@ jobs: - run: rm -rf editors/code/server - name: Upload artifacts - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: dist-x86_64-unknown-linux-musl path: ./dist @@ -206,39 +206,39 @@ jobs: - run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV - run: 'echo "HEAD_SHA: $HEAD_SHA"' - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-aarch64-apple-darwin path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-apple-darwin path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-unknown-linux-gnu path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-unknown-linux-musl path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-aarch64-unknown-linux-gnu path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-arm-unknown-linux-gnueabihf path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-pc-windows-msvc path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-i686-pc-windows-msvc path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-aarch64-pc-windows-msvc path: dist diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 57d43dad3fd..e9ebe26f42c 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -167,9 +167,9 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" [[package]] name = "chalk-derive" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92a0aedc4ac2adc5c0b7dc9ec38c5c816284ad28da6d4ecd01873b9683f54972" +checksum = "9426c8fd0fe61c3da880b801d3b510524df17843a8f9ec1f5b9cec24fb7412df" dependencies = [ "proc-macro2", "quote", @@ -179,9 +179,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db18493569b190f7266a04901e520fc3a5c00564475154287906f8a27302c119" +checksum = "d5f2eb1cd6054da221bd1ac0197fb2fe5e2caf3dcb93619398fc1433f8f09093" dependencies = [ "bitflags 2.5.0", "chalk-derive", @@ -189,9 +189,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae4ba8ce5bd2e1b59f1f79495bc8704db09a8285e51cc5ddf01d9baee1bf447d" +checksum = "129dc03458f71cfb9c3cd621c9c68166a94e87b85b16ccd29af015d7ff9a1c61" dependencies = [ "chalk-derive", "chalk-ir", @@ -202,9 +202,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ec1b3b7f7b1ec38f099ef39c2bc3ea29335be1b8316d114baff46d96d131e9" +checksum = "d7e8a8c1e928f98cdf227b868416ef21dcd8cc3c61b347576d783713444d41c8" dependencies = [ "chalk-derive", "chalk-ir", @@ -221,11 +221,6 @@ name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" -dependencies = [ - "dashmap", - "once_cell", - "rustc-hash", -] [[package]] name = "cov-mark" @@ -548,10 +543,10 @@ dependencies = [ "limit", "mbe", "once_cell", - "profile", "ra-ap-rustc_abi", "ra-ap-rustc_parse_format", "rustc-hash", + "rustc_apfloat", "smallvec", "span", "stdx", @@ -616,9 +611,10 @@ dependencies = [ "oorandom", "project-model", "ra-ap-rustc_abi", - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "ra-ap-rustc_pattern_analysis", "rustc-hash", + "rustc_apfloat", "scoped-tls", "smallvec", "span", @@ -664,6 +660,7 @@ dependencies = [ "profile", "pulldown-cmark", "pulldown-cmark-to-cmark", + "rustc_apfloat", "smallvec", "span", "stdx", @@ -809,7 +806,6 @@ checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown", - "serde", ] [[package]] @@ -1046,6 +1042,7 @@ checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" name = "mbe" version = "0.0.0" dependencies = [ + "arrayvec", "cov-mark", "parser", "rustc-hash", @@ -1250,7 +1247,6 @@ dependencies = [ "expect-test", "limit", "ra-ap-rustc_lexer", - "sourcegen", "stdx", "tracing", ] @@ -1328,18 +1324,14 @@ dependencies = [ "base-db", "indexmap", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "memmap2", - "object 0.33.0", "paths", "rustc-hash", "serde", "serde_json", - "snap", "span", "stdx", "text-size", "tracing", - "triomphe", "tt", ] @@ -1357,6 +1349,7 @@ dependencies = [ "proc-macro-api", "proc-macro-test", "ra-ap-rustc_lexer", + "snap", "span", "stdx", "tt", @@ -1403,13 +1396,9 @@ name = "profile" version = "0.0.0" dependencies = [ "cfg-if", - "countme", - "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc", - "once_cell", "perf-event", "tikv-jemalloc-ctl", - "tracing", "windows-sys 0.52.0", ] @@ -1492,46 +1481,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46" dependencies = [ "bitflags 2.5.0", - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "tracing", ] [[package]] name = "ra-ap-rustc_index" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ad68bacffb87dcdbb23a3ce11261375078aaa06b85d348c49f39ffd5510dc20" -dependencies = [ - "arrayvec", - "ra-ap-rustc_index_macros 0.44.0", - "smallvec", -] - -[[package]] -name = "ra-ap-rustc_index" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros 0.53.0", + "ra-ap-rustc_index_macros", "smallvec", ] [[package]] name = "ra-ap-rustc_index_macros" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8782aaf3a113837c533dfb1c45df91cd17e1fdd1d2f9a20c2e0d1976025c4f1f" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - -[[package]] -name = "ra-ap-rustc_index_macros" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82f3d6dcb30a66905388e14756b8f2216131d9f8004922c07f13335840e058d1" @@ -1558,17 +1524,17 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135" dependencies = [ - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "ra-ap-rustc_lexer", ] [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.44.0" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d63d1e1d5b2a13273cee1a10011147418f40e12b70f70578ce1dee0f1cafc334" +checksum = "34768e1faf88c31f2e9ad57b48318a52b507dafac0cddbf01b5d63bfc0b0a365" dependencies = [ - "ra-ap-rustc_index 0.44.0", + "ra-ap-rustc_index", "rustc-hash", "rustc_apfloat", "smallvec", @@ -1685,7 +1651,6 @@ dependencies = [ "ide", "ide-db", "ide-ssr", - "indexmap", "itertools", "load-cargo", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1708,7 +1673,6 @@ dependencies = [ "semver", "serde", "serde_json", - "sourcegen", "stdx", "syntax", "test-fixture", @@ -1908,13 +1872,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" [[package]] -name = "sourcegen" -version = "0.0.0" -dependencies = [ - "xshell", -] - -[[package]] name = "span" version = "0.0.0" dependencies = [ @@ -1985,6 +1942,7 @@ dependencies = [ "rayon", "rowan", "rustc-hash", + "rustc_apfloat", "smol_str", "stdx", "test-utils", @@ -2251,6 +2209,7 @@ dependencies = [ name = "tt" version = "0.0.0" dependencies = [ + "arrayvec", "smol_str", "stdx", "text-size", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 583c7bbe338..d4c3b7a3bfb 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -10,9 +10,7 @@ license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] [profile.dev] -# Disabling debug info speeds up builds a bunch, -# and we don't rely on it for debugging that much. -debug = 0 +debug = 1 [profile.dev.package] # These speed up local tests. @@ -89,10 +87,9 @@ ra-ap-rustc_lexer = { version = "0.53.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false } ra-ap-rustc_index = { version = "0.53.0", default-features = false } ra-ap-rustc_abi = { version = "0.53.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.44.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. -sourcegen = { path = "./crates/sourcegen" } test-fixture = { path = "./crates/test-fixture" } test-utils = { path = "./crates/test-utils" } @@ -107,10 +104,10 @@ arrayvec = "0.7.4" bitflags = "2.4.1" cargo_metadata = "0.18.1" camino = "1.1.6" -chalk-solve = { version = "0.97.0", default-features = false } -chalk-ir = "0.97.0" -chalk-recursive = { version = "0.97.0", default-features = false } -chalk-derive = "0.97.0" +chalk-solve = { version = "0.98.0", default-features = false } +chalk-ir = "0.98.0" +chalk-recursive = { version = "0.98.0", default-features = false } +chalk-derive = "0.98.0" crossbeam-channel = "0.5.8" dissimilar = "1.0.7" dot = "0.1.4" @@ -122,6 +119,8 @@ hashbrown = { version = "0.14", features = [ indexmap = "2.1.0" itertools = "0.12.0" libc = "0.2.150" +libloading = "0.8.0" +memmap2 = "0.5.4" nohash-hasher = "0.2.0" oorandom = "11.1.3" object = { version = "0.33.0", default-features = false, features = [ @@ -145,6 +144,7 @@ smallvec = { version = "1.10.0", features = [ "const_generics", ] } smol_str = "0.2.1" +snap = "1.1.0" text-size = "1.1.1" tracing = "0.1.40" tracing-tree = "0.3.0" @@ -158,6 +158,7 @@ url = "2.3.1" xshell = "0.2.5" + # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.5.3", features = ["raw-api"] } diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index f5165ea8a7b..96fbbc317d4 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -1,7 +1,5 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod change; mod input; diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index 9a365889e6a..8b30286a0a8 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -1,7 +1,5 @@ //! cfg defines conditional compiling options, `cfg` attribute parser and evaluator -#![warn(rust_2018_idioms, unused_lifetimes)] - mod cfg_expr; mod dnf; #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs index 4584400e66f..778def5d2be 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs +++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs @@ -6,8 +6,6 @@ // addition to `cargo check`. Either split it into 3 crates (one for test, one for check // and one common utilities) or change its name and docs to reflect the current state. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{fmt, io, process::Command, time::Duration}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; @@ -428,6 +426,8 @@ impl FlycheckActor { } } + cmd.arg("--keep-going"); + options.apply_on_command(&mut cmd); (cmd, options.extra_args.clone()) } diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index 41c59ea0d93..8ac2d003137 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -28,6 +28,7 @@ tracing.workspace = true smallvec.workspace = true hashbrown.workspace = true triomphe.workspace = true +rustc_apfloat = "0.2.0" ra-ap-rustc_parse_format.workspace = true ra-ap-rustc_abi.workspace = true @@ -37,7 +38,6 @@ stdx.workspace = true intern.workspace = true base-db.workspace = true syntax.workspace = true -profile.workspace = true hir-expand.workspace = true mbe.workspace = true cfg.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index faba9050fc4..be7068c807a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -15,8 +15,8 @@ use span::AstIdMap; use stdx::never; use syntax::{ ast::{ - self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, - RangeItem, SlicePatComponents, + self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasGenericArgs, + HasLoopBody, HasName, RangeItem, SlicePatComponents, }, AstNode, AstPtr, AstToken as _, SyntaxNodePtr, }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs index 61b2481978e..f9e55559dab 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs @@ -30,8 +30,10 @@ pub enum BuiltinUint { #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum BuiltinFloat { + F16, F32, F64, + F128, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -65,8 +67,10 @@ impl BuiltinType { (name![u64], BuiltinType::Uint(BuiltinUint::U64)), (name![u128], BuiltinType::Uint(BuiltinUint::U128)), + (name![f16], BuiltinType::Float(BuiltinFloat::F16)), (name![f32], BuiltinType::Float(BuiltinFloat::F32)), (name![f64], BuiltinType::Float(BuiltinFloat::F64)), + (name![f128], BuiltinType::Float(BuiltinFloat::F128)), ]; pub fn by_name(name: &Name) -> Option<Self> { @@ -97,8 +101,10 @@ impl AsName for BuiltinType { BuiltinUint::U128 => name![u128], }, BuiltinType::Float(it) => match it { + BuiltinFloat::F16 => name![f16], BuiltinFloat::F32 => name![f32], BuiltinFloat::F64 => name![f64], + BuiltinFloat::F128 => name![f128], }, } } @@ -155,8 +161,10 @@ impl BuiltinUint { impl BuiltinFloat { pub fn from_suffix(suffix: &str) -> Option<BuiltinFloat> { let res = match suffix { + "f16" => BuiltinFloat::F16, "f32" => BuiltinFloat::F32, "f64" => BuiltinFloat::F64, + "f128" => BuiltinFloat::F128, _ => return None, }; Some(res) @@ -192,8 +200,10 @@ impl fmt::Display for BuiltinUint { impl fmt::Display for BuiltinFloat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { + BuiltinFloat::F16 => "f16", BuiltinFloat::F32 => "f32", BuiltinFloat::F64 => "f64", + BuiltinFloat::F128 => "f128", }) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs index 106109eb184..0438278ca27 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs @@ -214,8 +214,8 @@ impl ChildBySource for GenericDefId { } let generic_params = db.generic_params(*self); - let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); - let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); + let mut toc_idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx); + let lts_idx_iter = generic_params.iter_lt().map(|(idx, _)| idx); // For traits the first type index is `Self`, skip it. if let GenericDefId::TraitId(_) = *self { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index 43381636721..55043fdc4b0 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -323,7 +323,7 @@ impl TraitAliasData { pub struct ImplData { pub target_trait: Option<Interned<TraitRef>>, pub self_ty: Interned<TypeRef>, - pub items: Vec<AssocItemId>, + pub items: Box<[AssocItemId]>, pub is_negative: bool, pub is_unsafe: bool, // box it as the vec is usually empty anyways @@ -637,10 +637,6 @@ impl<'a> AssocItemCollector<'a> { attr, ) { Ok(ResolvedAttr::Macro(call_id)) => { - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - continue 'attrs; - } let loc = self.db.lookup_intern_macro_call(call_id); if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind { // If there's no expander for the proc macro (e.g. the diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 61fed71218e..0eb9e7d30b2 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -80,9 +80,11 @@ pub trait InternDatabase: SourceDatabase { #[salsa::query_group(DefDatabaseStorage)] pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> { + /// Whether to expand procedural macros during name resolution. #[salsa::input] fn expand_proc_attr_macros(&self) -> bool; + /// Computes an [`ItemTree`] for the given file or macro expansion. #[salsa::invoke(ItemTree::file_item_tree_query)] fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>; @@ -96,6 +98,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba #[salsa::invoke(DefMap::block_def_map_query)] fn block_def_map(&self, block: BlockId) -> Arc<DefMap>; + /// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution. fn macro_def(&self, m: MacroId) -> MacroDefId; // region:data @@ -190,6 +193,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba #[salsa::invoke(Attrs::fields_attrs_query)] fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>; + // should this really be a query? #[salsa::invoke(crate::attr::fields_attrs_source_map)] fn fields_attrs_source_map( &self, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index 58a1872ef25..9a3c0495414 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -183,6 +183,8 @@ fn find_path_for_module( let kind = if name_already_occupied_in_type_ns { cov_mark::hit!(ambiguous_crate_start); PathKind::Abs + } else if ctx.cfg.prefer_absolute { + PathKind::Abs } else { PathKind::Plain }; @@ -564,7 +566,13 @@ mod tests { /// item the `path` refers to returns that same path when called from the /// module the cursor is in. #[track_caller] - fn check_found_path_(ra_fixture: &str, path: &str, prefer_prelude: bool, expect: Expect) { + fn check_found_path_( + ra_fixture: &str, + path: &str, + prefer_prelude: bool, + prefer_absolute: bool, + expect: Expect, + ) { let (db, pos) = TestDB::with_position(ra_fixture); let module = db.module_at_position(pos); let parsed_path_file = @@ -604,7 +612,7 @@ mod tests { module, prefix, ignore_local_imports, - ImportPathConfig { prefer_no_std: false, prefer_prelude }, + ImportPathConfig { prefer_no_std: false, prefer_prelude, prefer_absolute }, ); format_to!( res, @@ -619,11 +627,15 @@ mod tests { } fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, false, expect); + check_found_path_(ra_fixture, path, false, false, expect); } fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, true, expect); + check_found_path_(ra_fixture, path, true, false, expect); + } + + fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) { + check_found_path_(ra_fixture, path, false, true, expect); } #[test] @@ -871,6 +883,39 @@ pub mod ast { } #[test] + fn partially_imported_with_prefer_absolute() { + cov_mark::check!(partially_imported); + // Similar to partially_imported test case above, but with prefer_absolute enabled. + // Even if the actual imported item is in external crate, if the path to that item + // is starting from the imported name, then the path should not start from "::". + // i.e. The first line in the expected output should not start from "::". + check_found_path_absolute( + r#" +//- /main.rs crate:main deps:syntax + +use syntax::ast; +$0 + +//- /lib.rs crate:syntax +pub mod ast { + pub enum ModuleItem { + A, B, C, + } +} + "#, + "syntax::ast::ModuleItem", + expect![[r#" + Plain (imports ✔): ast::ModuleItem + Plain (imports ✖): ::syntax::ast::ModuleItem + ByCrate(imports ✔): crate::ast::ModuleItem + ByCrate(imports ✖): ::syntax::ast::ModuleItem + BySelf (imports ✔): self::ast::ModuleItem + BySelf (imports ✖): ::syntax::ast::ModuleItem + "#]], + ); + } + + #[test] fn same_crate_reexport() { check_found_path( r#" @@ -1770,6 +1815,43 @@ pub mod foo { } #[test] + fn respects_absolute_setting() { + let ra_fixture = r#" +//- /main.rs crate:main deps:krate +$0 +//- /krate.rs crate:krate +pub mod foo { + pub struct Foo; +} +"#; + check_found_path( + ra_fixture, + "krate::foo::Foo", + expect![[r#" + Plain (imports ✔): krate::foo::Foo + Plain (imports ✖): krate::foo::Foo + ByCrate(imports ✔): krate::foo::Foo + ByCrate(imports ✖): krate::foo::Foo + BySelf (imports ✔): krate::foo::Foo + BySelf (imports ✖): krate::foo::Foo + "#]], + ); + + check_found_path_absolute( + ra_fixture, + "krate::foo::Foo", + expect![[r#" + Plain (imports ✔): ::krate::foo::Foo + Plain (imports ✖): ::krate::foo::Foo + ByCrate(imports ✔): ::krate::foo::Foo + ByCrate(imports ✖): ::krate::foo::Foo + BySelf (imports ✔): ::krate::foo::Foo + BySelf (imports ✖): ::krate::foo::Foo + "#]], + ); + } + + #[test] fn respect_segment_length() { check_found_path( r#" diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs index b9f8082391f..ebaaef66db6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs @@ -28,6 +28,7 @@ use crate::{ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; +/// The index of the self param in the generic of the non-parent definition. const SELF_PARAM_ID_IN_SELF: la_arena::Idx<TypeOrConstParamData> = LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0)); @@ -158,9 +159,9 @@ pub enum GenericParamDataRef<'a> { /// Data about the generic parameters of a function, struct, impl, etc. #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct GenericParams { - pub type_or_consts: Arena<TypeOrConstParamData>, - pub lifetimes: Arena<LifetimeParamData>, - pub where_predicates: Box<[WherePredicate]>, + type_or_consts: Arena<TypeOrConstParamData>, + lifetimes: Arena<LifetimeParamData>, + where_predicates: Box<[WherePredicate]>, } impl ops::Index<LocalTypeOrConstParamId> for GenericParams { @@ -205,6 +206,219 @@ pub enum WherePredicateTypeTarget { TypeOrConstParam(LocalTypeOrConstParamId), } +impl GenericParams { + /// Number of Generic parameters (type_or_consts + lifetimes) + #[inline] + pub fn len(&self) -> usize { + self.type_or_consts.len() + self.lifetimes.len() + } + + #[inline] + pub fn len_lifetimes(&self) -> usize { + self.lifetimes.len() + } + + #[inline] + pub fn len_type_or_consts(&self) -> usize { + self.type_or_consts.len() + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + #[inline] + pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> { + self.where_predicates.iter() + } + + /// Iterator of type_or_consts field + #[inline] + pub fn iter_type_or_consts( + &self, + ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> { + self.type_or_consts.iter() + } + + /// Iterator of lifetimes field + #[inline] + pub fn iter_lt( + &self, + ) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> { + self.lifetimes.iter() + } + + pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.type_param().is_some() { + Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.const_param().is_some() { + Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + #[inline] + pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> { + if self.type_or_consts.is_empty() { + return None; + } + matches!( + self.type_or_consts[SELF_PARAM_ID_IN_SELF], + TypeOrConstParamData::TypeParamData(TypeParamData { + provenance: TypeParamProvenance::TraitSelf, + .. + }) + ) + .then(|| SELF_PARAM_ID_IN_SELF) + } + + pub fn find_lifetime_by_name( + &self, + name: &Name, + parent: GenericDefId, + ) -> Option<LifetimeParamId> { + self.lifetimes.iter().find_map(|(id, p)| { + if &p.name == name { + Some(LifetimeParamId { local_id: id, parent }) + } else { + None + } + }) + } + + pub(crate) fn generic_params_query( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> Interned<GenericParams> { + let _p = tracing::info_span!("generic_params_query").entered(); + + let krate = def.krate(db); + let cfg_options = db.crate_graph(); + let cfg_options = &cfg_options[krate].cfg_options; + + // Returns the generic parameters that are enabled under the current `#[cfg]` options + let enabled_params = + |params: &Interned<GenericParams>, item_tree: &ItemTree, parent: GenericModItem| { + let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); + let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); + let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); + + // In the common case, no parameters will by disabled by `#[cfg]` attributes. + // Therefore, make a first pass to check if all parameters are enabled and, if so, + // clone the `Interned<GenericParams>` instead of recreating an identical copy. + let all_type_or_consts_enabled = + params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx))); + let all_lifetimes_enabled = + params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx))); + + if all_type_or_consts_enabled && all_lifetimes_enabled { + params.clone() + } else { + Interned::new(GenericParams { + type_or_consts: all_type_or_consts_enabled + .then(|| params.type_or_consts.clone()) + .unwrap_or_else(|| { + params + .type_or_consts + .iter() + .filter(|&(idx, _)| enabled(attr_owner_ct(idx))) + .map(|(_, param)| param.clone()) + .collect() + }), + lifetimes: all_lifetimes_enabled + .then(|| params.lifetimes.clone()) + .unwrap_or_else(|| { + params + .lifetimes + .iter() + .filter(|&(idx, _)| enabled(attr_owner_lt(idx))) + .map(|(_, param)| param.clone()) + .collect() + }), + where_predicates: params.where_predicates.clone(), + }) + } + }; + fn id_to_generics<Id: GenericsItemTreeNode>( + db: &dyn DefDatabase, + id: impl for<'db> Lookup< + Database<'db> = dyn DefDatabase + 'db, + Data = impl ItemTreeLoc<Id = Id>, + >, + enabled_params: impl Fn( + &Interned<GenericParams>, + &ItemTree, + GenericModItem, + ) -> Interned<GenericParams>, + ) -> Interned<GenericParams> + where + FileItemTreeId<Id>: Into<GenericModItem>, + { + let id = id.lookup(db).item_tree_id(); + let tree = id.item_tree(db); + let item = &tree[id.value]; + enabled_params(item.generic_params(), &tree, id.value.into()) + } + + match def { + GenericDefId::FunctionId(id) => { + let loc = id.lookup(db); + let tree = loc.id.item_tree(db); + let item = &tree[loc.id.value]; + + let enabled_params = + enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into()); + + let module = loc.container.module(db); + let func_data = db.function_data(id); + if func_data.params.is_empty() { + enabled_params + } else { + let mut generic_params = GenericParamsCollector { + type_or_consts: enabled_params.type_or_consts.clone(), + lifetimes: enabled_params.lifetimes.clone(), + where_predicates: enabled_params.where_predicates.clone().into(), + }; + + // Don't create an `Expander` if not needed since this + // could cause a reparse after the `ItemTree` has been created due to the spanmap. + let mut expander = Lazy::new(|| { + (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) + }); + for param in func_data.params.iter() { + generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); + } + Interned::new(generic_params.finish()) + } + } + GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::ConstId(_) => Interned::new(GenericParams { + type_or_consts: Default::default(), + lifetimes: Default::default(), + where_predicates: Default::default(), + }), + } + } +} + #[derive(Clone, Default)] pub(crate) struct GenericParamsCollector { pub(crate) type_or_consts: Arena<TypeOrConstParamData>, @@ -441,202 +655,3 @@ impl GenericParamsCollector { } } } - -impl GenericParams { - /// Number of Generic parameters (type_or_consts + lifetimes) - #[inline] - pub fn len(&self) -> usize { - self.type_or_consts.len() + self.lifetimes.len() - } - - #[inline] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Iterator of type_or_consts field - #[inline] - pub fn iter_type_or_consts( - &self, - ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> { - self.type_or_consts.iter() - } - - /// Iterator of lifetimes field - #[inline] - pub fn iter_lt( - &self, - ) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> { - self.lifetimes.iter() - } - - pub(crate) fn generic_params_query( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> Interned<GenericParams> { - let _p = tracing::info_span!("generic_params_query").entered(); - - let krate = def.module(db).krate; - let cfg_options = db.crate_graph(); - let cfg_options = &cfg_options[krate].cfg_options; - - // Returns the generic parameters that are enabled under the current `#[cfg]` options - let enabled_params = - |params: &Interned<GenericParams>, item_tree: &ItemTree, parent: GenericModItem| { - let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); - let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); - let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); - - // In the common case, no parameters will by disabled by `#[cfg]` attributes. - // Therefore, make a first pass to check if all parameters are enabled and, if so, - // clone the `Interned<GenericParams>` instead of recreating an identical copy. - let all_type_or_consts_enabled = - params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx))); - let all_lifetimes_enabled = - params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx))); - - if all_type_or_consts_enabled && all_lifetimes_enabled { - params.clone() - } else { - Interned::new(GenericParams { - type_or_consts: all_type_or_consts_enabled - .then(|| params.type_or_consts.clone()) - .unwrap_or_else(|| { - params - .type_or_consts - .iter() - .filter(|&(idx, _)| enabled(attr_owner_ct(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - lifetimes: all_lifetimes_enabled - .then(|| params.lifetimes.clone()) - .unwrap_or_else(|| { - params - .lifetimes - .iter() - .filter(|&(idx, _)| enabled(attr_owner_lt(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - where_predicates: params.where_predicates.clone(), - }) - } - }; - fn id_to_generics<Id: GenericsItemTreeNode>( - db: &dyn DefDatabase, - id: impl for<'db> Lookup< - Database<'db> = dyn DefDatabase + 'db, - Data = impl ItemTreeLoc<Id = Id>, - >, - enabled_params: impl Fn( - &Interned<GenericParams>, - &ItemTree, - GenericModItem, - ) -> Interned<GenericParams>, - ) -> Interned<GenericParams> - where - FileItemTreeId<Id>: Into<GenericModItem>, - { - let id = id.lookup(db).item_tree_id(); - let tree = id.item_tree(db); - let item = &tree[id.value]; - enabled_params(item.generic_params(), &tree, id.value.into()) - } - - match def { - GenericDefId::FunctionId(id) => { - let loc = id.lookup(db); - let tree = loc.id.item_tree(db); - let item = &tree[loc.id.value]; - - let enabled_params = - enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into()); - - let module = loc.container.module(db); - let func_data = db.function_data(id); - if func_data.params.is_empty() { - enabled_params - } else { - let mut generic_params = GenericParamsCollector { - type_or_consts: enabled_params.type_or_consts.clone(), - lifetimes: enabled_params.lifetimes.clone(), - where_predicates: enabled_params.where_predicates.clone().into(), - }; - - // Don't create an `Expander` if not needed since this - // could cause a reparse after the `ItemTree` has been created due to the spanmap. - let mut expander = Lazy::new(|| { - (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) - }); - for param in func_data.params.iter() { - generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); - } - Interned::new(generic_params.finish()) - } - } - GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => { - Interned::new(GenericParams { - type_or_consts: Default::default(), - lifetimes: Default::default(), - where_predicates: Default::default(), - }) - } - } - } - - pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.type_param().is_some() { - Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.const_param().is_some() { - Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> { - if self.type_or_consts.is_empty() { - return None; - } - matches!( - self.type_or_consts[SELF_PARAM_ID_IN_SELF], - TypeOrConstParamData::TypeParamData(TypeParamData { - provenance: TypeParamProvenance::TraitSelf, - .. - }) - ) - .then(|| SELF_PARAM_ID_IN_SELF) - } - - pub fn find_lifetime_by_name( - &self, - name: &Name, - parent: GenericDefId, - ) -> Option<LifetimeParamId> { - self.lifetimes.iter().find_map(|(id, p)| { - if &p.name == name { - Some(LifetimeParamId { local_id: id, parent }) - } else { - None - } - }) - } -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs index fd6f4a3d089..d306f9be657 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs @@ -20,6 +20,7 @@ use std::fmt; use hir_expand::name::Name; use intern::Interned; use la_arena::{Idx, RawIdx}; +use rustc_apfloat::ieee::{Half as f16, Quad as f128}; use smallvec::SmallVec; use syntax::ast; @@ -56,29 +57,38 @@ pub struct Label { } pub type LabelId = Idx<Label>; -// We convert float values into bits and that's how we don't need to deal with f32 and f64. -// For PartialEq, bits comparison should work, as ordering is not important +// We leave float values as a string to avoid double rounding. +// For PartialEq, string comparison should work, as ordering is not important // https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360 -#[derive(Default, Debug, Clone, Copy, Eq, PartialEq)] -pub struct FloatTypeWrapper(u64); +#[derive(Default, Debug, Clone, Eq, PartialEq)] +pub struct FloatTypeWrapper(Box<str>); +// FIXME(#17451): Use builtin types once stabilised. impl FloatTypeWrapper { - pub fn new(value: f64) -> Self { - Self(value.to_bits()) + pub fn new(value: String) -> Self { + Self(value.into()) } - pub fn into_f64(self) -> f64 { - f64::from_bits(self.0) + pub fn to_f128(&self) -> f128 { + self.0.parse().unwrap_or_default() } - pub fn into_f32(self) -> f32 { - f64::from_bits(self.0) as f32 + pub fn to_f64(&self) -> f64 { + self.0.parse().unwrap_or_default() + } + + pub fn to_f32(&self) -> f32 { + self.0.parse().unwrap_or_default() + } + + pub fn to_f16(&self) -> f16 { + self.0.parse().unwrap_or_default() } } impl fmt::Display for FloatTypeWrapper { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", f64::from_bits(self.0)) + f.write_str(&self.0) } } @@ -91,7 +101,7 @@ pub enum Literal { Bool(bool), Int(i128, Option<BuiltinInt>), Uint(u128, Option<BuiltinUint>), - // Here we are using a wrapper around float because f32 and f64 do not implement Eq, so they + // Here we are using a wrapper around float because float primitives do not implement Eq, so they // could not be used directly here, to understand how the wrapper works go to definition of // FloatTypeWrapper Float(FloatTypeWrapper, Option<BuiltinFloat>), @@ -120,10 +130,7 @@ impl From<ast::LiteralKind> for Literal { match ast_lit_kind { LiteralKind::IntNumber(lit) => { if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { - Literal::Float( - FloatTypeWrapper::new(lit.float_value().unwrap_or(Default::default())), - builtin, - ) + Literal::Float(FloatTypeWrapper::new(lit.value_string()), builtin) } else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinUint::from_suffix) { Literal::Uint(lit.value().unwrap_or(0), builtin) } else { @@ -133,7 +140,7 @@ impl From<ast::LiteralKind> for Literal { } LiteralKind::FloatNumber(lit) => { let ty = lit.suffix().and_then(BuiltinFloat::from_suffix); - Literal::Float(FloatTypeWrapper::new(lit.value().unwrap_or(Default::default())), ty) + Literal::Float(FloatTypeWrapper::new(lit.value_string()), ty) } LiteralKind::ByteString(bs) => { let text = bs.value().map_or_else(|_| Default::default(), Box::from); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index ec207a7f965..7272ed98ceb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -10,7 +10,7 @@ use hir_expand::{ AstId, }; use intern::Interned; -use syntax::ast::{self, HasName, IsString}; +use syntax::ast::{self, HasGenericArgs, HasName, IsString}; use crate::{ builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, @@ -245,7 +245,13 @@ impl TypeRef { // for types are close enough for our purposes to the inner type for now... ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), ast::Type::ImplTraitType(inner) => { - TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) + if ctx.outer_impl_trait() { + // Disallow nested impl traits + TypeRef::Error + } else { + let _guard = ctx.outer_impl_trait_scope(true); + TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) + } } ast::Type::DynTraitType(inner) => { TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index 9c7dfa05b0e..d86c0667a0b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -8,7 +8,6 @@ use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCall use itertools::Itertools; use la_arena::Idx; use once_cell::sync::Lazy; -use profile::Count; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; use stdx::format_to; @@ -65,8 +64,6 @@ pub struct ImportId { #[derive(Debug, Default, PartialEq, Eq)] pub struct ItemScope { - _c: Count<Self>, - /// Defs visible in this scope. This includes `declarations`, but also /// imports. The imports belong to this module and can be resolved by using them on /// the `use_imports_*` fields. @@ -722,7 +719,6 @@ impl ItemScope { pub(crate) fn shrink_to_fit(&mut self) { // Exhaustive match to require handling new fields. let Self { - _c: _, types, values, macros, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index c3b7a78301d..7650dfe9f37 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -48,6 +48,7 @@ use either::Either; use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile}; use intern::Interned; use la_arena::{Arena, Idx, IdxRange, RawIdx}; +use once_cell::sync::OnceCell; use rustc_hash::FxHashMap; use smallvec::SmallVec; use span::{AstIdNode, FileAstId, SyntaxContextId}; @@ -100,6 +101,7 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> { let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered(); + static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new(); let syntax = db.parse_or_expand(file_id); @@ -131,18 +133,47 @@ impl ItemTree { if let Some(attrs) = top_attrs { item_tree.attrs.insert(AttrOwner::TopLevel, attrs); } - item_tree.shrink_to_fit(); - Arc::new(item_tree) + if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty() + { + EMPTY + .get_or_init(|| { + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }) + }) + .clone() + } else { + item_tree.shrink_to_fit(); + Arc::new(item_tree) + } } pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> { + let _p = tracing::info_span!("block_item_tree_query", ?block).entered(); + static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new(); + let loc = block.lookup(db); let block = loc.ast_id.to_node(db.upcast()); let ctx = lower::Ctx::new(db, loc.ast_id.file_id); let mut item_tree = ctx.lower_block(&block); - item_tree.shrink_to_fit(); - Arc::new(item_tree) + if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty() + { + EMPTY + .get_or_init(|| { + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }) + }) + .clone() + } else { + item_tree.shrink_to_fit(); + Arc::new(item_tree) + } } /// Returns an iterator over all items located at the top level of the `HirFileId` this @@ -585,24 +616,30 @@ impl Index<RawVisibilityId> for ItemTree { type Output = RawVisibility; fn index(&self, index: RawVisibilityId) -> &Self::Output { static VIS_PUB: RawVisibility = RawVisibility::Public; - static VIS_PRIV_IMPLICIT: RawVisibility = RawVisibility::Module( - ModPath::from_kind(PathKind::SELF), - VisibilityExplicitness::Implicit, - ); - static VIS_PRIV_EXPLICIT: RawVisibility = RawVisibility::Module( - ModPath::from_kind(PathKind::SELF), - VisibilityExplicitness::Explicit, - ); - static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module( - ModPath::from_kind(PathKind::Crate), - VisibilityExplicitness::Explicit, - ); + static VIS_PRIV_IMPLICIT: OnceCell<RawVisibility> = OnceCell::new(); + static VIS_PRIV_EXPLICIT: OnceCell<RawVisibility> = OnceCell::new(); + static VIS_PUB_CRATE: OnceCell<RawVisibility> = OnceCell::new(); match index { - RawVisibilityId::PRIV_IMPLICIT => &VIS_PRIV_IMPLICIT, - RawVisibilityId::PRIV_EXPLICIT => &VIS_PRIV_EXPLICIT, + RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::SELF)), + VisibilityExplicitness::Implicit, + ) + }), + RawVisibilityId::PRIV_EXPLICIT => VIS_PRIV_EXPLICIT.get_or_init(|| { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::SELF)), + VisibilityExplicitness::Explicit, + ) + }), RawVisibilityId::PUB => &VIS_PUB, - RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE, + RawVisibilityId::PUB_CRATE => VIS_PUB_CRATE.get_or_init(|| { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::Crate)), + VisibilityExplicitness::Explicit, + ) + }), _ => &self.data().vis.arena[Idx::from_raw(index.0.into())], } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 2803678a330..6283ae23b52 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -532,7 +532,7 @@ impl Printer<'_> { w!(self, "<"); let mut first = true; - for (idx, lt) in params.lifetimes.iter() { + for (idx, lt) in params.iter_lt() { if !first { w!(self, ", "); } @@ -540,7 +540,7 @@ impl Printer<'_> { self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " "); w!(self, "{}", lt.name.display(self.db.upcast())); } - for (idx, x) in params.type_or_consts.iter() { + for (idx, x) in params.iter_type_or_consts() { if !first { w!(self, ", "); } @@ -570,13 +570,13 @@ impl Printer<'_> { } fn print_where_clause(&mut self, params: &GenericParams) -> bool { - if params.where_predicates.is_empty() { + if params.where_predicates().next().is_none() { return false; } w!(self, "\nwhere"); self.indented(|this| { - for (i, pred) in params.where_predicates.iter().enumerate() { + for (i, pred) in params.where_predicates().enumerate() { if i != 0 { wln!(this, ","); } @@ -607,12 +607,10 @@ impl Printer<'_> { match target { WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty), - WherePredicateTypeTarget::TypeOrConstParam(id) => { - match ¶ms.type_or_consts[*id].name() { - Some(name) => w!(this, "{}", name.display(self.db.upcast())), - None => w!(this, "_anon_{}", id.into_raw()), - } - } + WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { + Some(name) => w!(this, "{}", name.display(self.db.upcast())), + None => w!(this, "_anon_{}", id.into_raw()), + }, } w!(this, ": "); this.print_type_bounds(std::slice::from_ref(bound)); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index f6fe0c618a2..fc026a14d44 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -7,7 +7,6 @@ //! Note that `hir_def` is a work in progress, so not all of the above is //! actually true. -#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[cfg(feature = "in-rust-tree")] @@ -117,6 +116,8 @@ pub struct ImportPathConfig { pub prefer_no_std: bool, /// If true, prefer import paths containing a prelude module. pub prefer_prelude: bool, + /// If true, prefer abs path (starting with `::`) where it is available. + pub prefer_absolute: bool, } #[derive(Debug)] @@ -689,7 +690,7 @@ pub enum TypeOwnerId { } impl TypeOwnerId { - fn as_generic_def_id(self) -> Option<GenericDefId> { + fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option<GenericDefId> { Some(match self { TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it), TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it), @@ -698,7 +699,9 @@ impl TypeOwnerId { TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it), TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it), TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it), - TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it), + TypeOwnerId::EnumVariantId(it) => { + GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent)) + } TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None, }) } @@ -740,7 +743,6 @@ impl From<GenericDefId> for TypeOwnerId { GenericDefId::TraitAliasId(it) => it.into(), GenericDefId::TypeAliasId(it) => it.into(), GenericDefId::ImplId(it) => it.into(), - GenericDefId::EnumVariantId(it) => it.into(), GenericDefId::ConstId(it) => it.into(), } } @@ -849,8 +851,8 @@ impl GeneralConstId { pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> { match self { GeneralConstId::ConstId(it) => Some(it.into()), - GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(), - GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(), + GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db), + GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db), } } @@ -888,12 +890,12 @@ impl From<EnumVariantId> for DefWithBodyId { } impl DefWithBodyId { - pub fn as_generic_def_id(self) -> Option<GenericDefId> { + pub fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option<GenericDefId> { match self { DefWithBodyId::FunctionId(f) => Some(f.into()), DefWithBodyId::StaticId(_) => None, DefWithBodyId::ConstId(c) => Some(c.into()), - DefWithBodyId::VariantId(c) => Some(c.into()), + DefWithBodyId::VariantId(c) => Some(c.lookup(db).parent.into()), // FIXME: stable rust doesn't allow generics in constants, but we should // use `TypeOwnerId::as_generic_def_id` when it does. DefWithBodyId::InTypeConstId(_) => None, @@ -921,10 +923,6 @@ pub enum GenericDefId { TraitAliasId(TraitAliasId), TypeAliasId(TypeAliasId), ImplId(ImplId), - // enum variants cannot have generics themselves, but their parent enums - // can, and this makes some code easier to write - // FIXME: Try to remove this as that will reduce the amount of query slots generated per enum? - EnumVariantId(EnumVariantId), // consts can have type parameters from their parents (i.e. associated consts of traits) ConstId(ConstId), } @@ -935,7 +933,6 @@ impl_from!( TraitAliasId, TypeAliasId, ImplId, - EnumVariantId, ConstId for GenericDefId ); @@ -967,7 +964,6 @@ impl GenericDefId { GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None), - GenericDefId::EnumVariantId(it) => (it.lookup(db).id.file_id(), None), } } @@ -982,6 +978,14 @@ impl GenericDefId { _ => None, } } + + pub fn from_callable(db: &dyn DefDatabase, def: CallableDefId) -> GenericDefId { + match def { + CallableDefId::FunctionId(f) => f.into(), + CallableDefId::StructId(s) => s.into(), + CallableDefId::EnumVariantId(e) => e.lookup(db).parent.into(), + } + } } impl From<AssocItemId> for GenericDefId { @@ -995,6 +999,36 @@ impl From<AssocItemId> for GenericDefId { } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum CallableDefId { + FunctionId(FunctionId), + StructId(StructId), + EnumVariantId(EnumVariantId), +} + +impl InternValueTrivial for CallableDefId {} + +impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); +impl From<CallableDefId> for ModuleDefId { + fn from(def: CallableDefId) -> ModuleDefId { + match def { + CallableDefId::FunctionId(f) => ModuleDefId::FunctionId(f), + CallableDefId::StructId(s) => ModuleDefId::AdtId(AdtId::StructId(s)), + CallableDefId::EnumVariantId(e) => ModuleDefId::EnumVariantId(e), + } + } +} + +impl CallableDefId { + pub fn krate(self, db: &dyn DefDatabase) -> CrateId { + match self { + CallableDefId::FunctionId(f) => f.krate(db), + CallableDefId::StructId(s) => s.krate(db), + CallableDefId::EnumVariantId(e) => e.krate(db), + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum AttrDefId { ModuleId(ModuleId), FieldId(FieldId), @@ -1310,7 +1344,6 @@ impl HasModule for GenericDefId { GenericDefId::TraitAliasId(it) => it.module(db), GenericDefId::TypeAliasId(it) => it.module(db), GenericDefId::ImplId(it) => it.module(db), - GenericDefId::EnumVariantId(it) => it.module(db), GenericDefId::ConstId(it) => it.module(db), } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs index ecd8d79f20b..e4786a1dd40 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs @@ -18,6 +18,26 @@ pub struct LowerCtx<'a> { span_map: OnceCell<SpanMap>, ast_id_map: OnceCell<Arc<AstIdMap>>, impl_trait_bounds: RefCell<Vec<Vec<Interned<TypeBound>>>>, + // Prevent nested impl traits like `impl Foo<impl Bar>`. + outer_impl_trait: RefCell<bool>, +} + +pub(crate) struct OuterImplTraitGuard<'a> { + ctx: &'a LowerCtx<'a>, + old: bool, +} + +impl<'a> OuterImplTraitGuard<'a> { + fn new(ctx: &'a LowerCtx<'a>, impl_trait: bool) -> Self { + let old = ctx.outer_impl_trait.replace(impl_trait); + Self { ctx, old } + } +} + +impl<'a> Drop for OuterImplTraitGuard<'a> { + fn drop(&mut self) { + self.ctx.outer_impl_trait.replace(self.old); + } } impl<'a> LowerCtx<'a> { @@ -28,6 +48,7 @@ impl<'a> LowerCtx<'a> { span_map: OnceCell::new(), ast_id_map: OnceCell::new(), impl_trait_bounds: RefCell::new(Vec::new()), + outer_impl_trait: RefCell::default(), } } @@ -42,6 +63,7 @@ impl<'a> LowerCtx<'a> { span_map, ast_id_map: OnceCell::new(), impl_trait_bounds: RefCell::new(Vec::new()), + outer_impl_trait: RefCell::default(), } } @@ -67,4 +89,12 @@ impl<'a> LowerCtx<'a> { pub fn take_impl_traits_bounds(&self) -> Vec<Vec<Interned<TypeBound>>> { self.impl_trait_bounds.take() } + + pub(crate) fn outer_impl_trait(&self) -> bool { + *self.outer_impl_trait.borrow() + } + + pub(crate) fn outer_impl_trait_scope(&'a self, impl_trait: bool) -> OuterImplTraitGuard<'a> { + OuterImplTraitGuard::new(self, impl_trait) + } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index fb5797d6e53..c365a603d2a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -36,6 +36,7 @@ macro_rules! m { let _ = 'c'; let _ = 1000; let _ = 12E+99_f64; + let _ = 45E+1234_f128; let _ = "rust1"; let _ = -92; } @@ -50,6 +51,7 @@ macro_rules! m { let _ = 'c'; let _ = 1000; let _ = 12E+99_f64; + let _ = 45E+1234_f128; let _ = "rust1"; let _ = -92; } @@ -58,6 +60,7 @@ fn f() { let _ = 'c'; let _ = 1000; let _ = 12E+99_f64; + let _ = 45E+1234_f128; let _ = "rust1"; let _ = -92; } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 162b6429c34..8e7ef48112f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -103,12 +103,13 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[ /// is computed by the `block_def_map` query. #[derive(Debug, PartialEq, Eq)] pub struct DefMap { + /// The crate this `DefMap` belongs to. + krate: CrateId, /// When this is a block def map, this will hold the block id of the block and module that /// contains this block. block: Option<BlockInfo>, /// The modules and their data declared in this crate. pub modules: Arena<ModuleData>, - krate: CrateId, /// The prelude module for this crate. This either comes from an import /// marked with the `prelude_import` attribute, or (in the normal case) from /// a dependency (`std` or `core`). @@ -124,6 +125,7 @@ pub struct DefMap { /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper /// attributes. + // FIXME: Figure out a better way for the IDE layer to resolve these? derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>, /// The diagnostics that need to be emitted for this crate. diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 6d2eb71549e..b5045efb621 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -83,7 +83,9 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI let name = Name::new_text_dont_use(it.name.clone()); ( name, - if it.disabled { + if !db.expand_proc_attr_macros() { + CustomProcMacroExpander::dummy() + } else if it.disabled { CustomProcMacroExpander::disabled() } else { CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new( @@ -1331,16 +1333,6 @@ impl DefCollector<'_> { let call_id = call_id(); if let MacroDefKind::ProcMacro(_, exp, _) = def.kind { - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( - directive.module_id, - self.db.lookup_intern_macro_call(call_id).kind, - def.krate, - )); - return recollect_without(self); - } - // If there's no expander for the proc macro (e.g. // because proc macros are disabled, or building the // proc macro crate failed), report this and skip diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs index 523a4c107b3..4ab53d20b57 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs @@ -17,16 +17,47 @@ use crate::{ #[derive(Debug, PartialEq, Eq)] pub enum DefDiagnosticKind { - UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> }, - UnresolvedExternCrate { ast: AstId<ast::ExternCrate> }, - UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> }, - UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions }, - UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId }, - UnresolvedMacroCall { ast: MacroCallKind, path: ModPath }, - UnimplementedBuiltinMacro { ast: AstId<ast::Macro> }, - InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize }, - MalformedDerive { ast: AstId<ast::Adt>, id: usize }, - MacroDefError { ast: AstId<ast::Macro>, message: String }, + UnresolvedModule { + ast: AstId<ast::Module>, + candidates: Box<[String]>, + }, + UnresolvedExternCrate { + ast: AstId<ast::ExternCrate>, + }, + UnresolvedImport { + id: ItemTreeId<item_tree::Use>, + index: Idx<ast::UseTree>, + }, + UnconfiguredCode { + ast: ErasedAstId, + cfg: CfgExpr, + opts: CfgOptions, + }, + /// A proc-macro that is lacking an expander, this might be due to build scripts not yet having + /// run or proc-macro expansion being disabled. + UnresolvedProcMacro { + ast: MacroCallKind, + krate: CrateId, + }, + UnresolvedMacroCall { + ast: MacroCallKind, + path: ModPath, + }, + UnimplementedBuiltinMacro { + ast: AstId<ast::Macro>, + }, + InvalidDeriveTarget { + ast: AstId<ast::Item>, + id: usize, + }, + MalformedDerive { + ast: AstId<ast::Adt>, + id: usize, + }, + MacroDefError { + ast: AstId<ast::Macro>, + message: String, + }, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -92,10 +123,6 @@ impl DefDiagnostic { Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } } } - // FIXME: Whats the difference between this and unresolved_macro_call - // FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc - // yet the diagnostic handler in ide-diagnostics has to figure out what happened because this - // struct loses all that information! pub fn unresolved_proc_macro( container: LocalModuleId, ast: MacroCallKind, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs index 2b555b3998a..cee9e055459 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs @@ -9,7 +9,7 @@ use hir_expand::{ name::{name, AsName}, }; use intern::Interned; -use syntax::ast::{self, AstNode, HasTypeBounds}; +use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds}; use crate::{ path::{AssociatedTypeBinding, GenericArg, GenericArgs, ModPath, Path, PathKind}, @@ -202,6 +202,8 @@ pub(super) fn lower_generic_args( continue; } if let Some(name_ref) = assoc_type_arg.name_ref() { + // Nested impl traits like `impl Foo<Assoc = impl Bar>` are allowed + let _guard = lower_ctx.outer_impl_trait_scope(false); let name = name_ref.as_name(); let args = assoc_type_arg .generic_arg_list() diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 9794963203b..e5c1f93bbde 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -596,7 +596,7 @@ impl Resolver { Scope::GenericParams { params, def } => Some((params, def)), _ => None, }) - .flat_map(|(params, def)| params.where_predicates.iter().zip(iter::repeat(def))) + .flat_map(|(params, def)| params.where_predicates().zip(iter::repeat(def))) } pub fn generic_def(&self) -> Option<GenericDefId> { @@ -758,10 +758,10 @@ impl Scope { } Scope::GenericParams { params, def: parent } => { let parent = *parent; - for (local_id, param) in params.type_or_consts.iter() { + for (local_id, param) in params.iter_type_or_consts() { if let Some(name) = ¶m.name() { let id = TypeOrConstParamId { parent, local_id }; - let data = &db.generic_params(parent).type_or_consts[local_id]; + let data = &db.generic_params(parent)[local_id]; acc.add( name, ScopeDef::GenericParam(match data { @@ -775,7 +775,7 @@ impl Scope { ); } } - for (local_id, param) in params.lifetimes.iter() { + for (local_id, param) in params.iter_lt() { let id = LifetimeParamId { parent, local_id }; acc.add(¶m.name, ScopeDef::GenericParam(id.into())) } @@ -1164,7 +1164,6 @@ impl HasResolver for GenericDefId { GenericDefId::TraitAliasId(inner) => inner.resolver(db), GenericDefId::TypeAliasId(inner) => inner.resolver(db), GenericDefId::ImplId(inner) => inner.resolver(db), - GenericDefId::EnumVariantId(inner) => inner.resolver(db), GenericDefId::ConstId(inner) => inner.resolver(db), } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs index 2b1da8c34e1..a0d2079e0d4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs @@ -64,7 +64,7 @@ impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId { db: &dyn DefDatabase, ) -> InFile<ArenaMap<LocalTypeOrConstParamId, Self::Value>> { let generic_params = db.generic_params(*self); - let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); + let mut idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx); let (file_id, generic_params_list) = self.file_id_and_params_of(db); @@ -103,7 +103,7 @@ impl HasChildSource<LocalLifetimeParamId> for GenericDefId { db: &dyn DefDatabase, ) -> InFile<ArenaMap<LocalLifetimeParamId, Self::Value>> { let generic_params = db.generic_params(*self); - let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); + let idx_iter = generic_params.iter_lt().map(|(idx, _)| idx); let (file_id, generic_params_list) = self.file_id_and_params_of(db); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs index e08718fc836..11d91513f12 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs @@ -2,6 +2,7 @@ use std::iter; +use intern::Interned; use la_arena::ArenaMap; use span::SyntaxContextId; use syntax::ast; @@ -20,14 +21,17 @@ use crate::{ pub enum RawVisibility { /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is /// equivalent to `pub(self)`. - Module(ModPath, VisibilityExplicitness), + Module(Interned<ModPath>, VisibilityExplicitness), /// `pub`. Public, } impl RawVisibility { - pub(crate) const fn private() -> RawVisibility { - RawVisibility::Module(ModPath::from_kind(PathKind::SELF), VisibilityExplicitness::Implicit) + pub(crate) fn private() -> RawVisibility { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::SELF)), + VisibilityExplicitness::Implicit, + ) } pub(crate) fn from_ast( @@ -60,7 +64,7 @@ impl RawVisibility { ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF), ast::VisibilityKind::Pub => return RawVisibility::Public, }; - RawVisibility::Module(path, VisibilityExplicitness::Explicit) + RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit) } pub fn resolve( diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs index 1a3dd0e7ddb..08491db3726 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs @@ -25,7 +25,8 @@ impl ChangeWithProcMacros { pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) { self.source_change.apply(db); - if let Some(proc_macros) = self.proc_macros { + if let Some(mut proc_macros) = self.proc_macros { + proc_macros.shrink_to_fit(); db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } if let Some(target_data_layouts) = self.target_data_layouts { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs index 7c3bf995b12..29408902f16 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs @@ -172,15 +172,30 @@ impl DeclarativeMacroExpander { ), ast::Macro::MacroDef(macro_def) => ( match macro_def.body() { - Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), + Some(body) => { + let span = + map.span_for_range(macro_def.macro_token().unwrap().text_range()); + let args = macro_def.args().map(|args| { + mbe::syntax_node_to_token_tree( + args.syntax(), + map.as_ref(), + span, + DocCommentDesugarMode::Mbe, + ) + }); + let body = mbe::syntax_node_to_token_tree( + body.syntax(), map.as_ref(), - map.span_for_range(macro_def.macro_token().unwrap().text_range()), + span, DocCommentDesugarMode::Mbe, ); - mbe::DeclarativeMacro::parse_macro2(&tt, edition, new_meta_vars) + mbe::DeclarativeMacro::parse_macro2( + args.as_ref(), + &body, + edition, + new_meta_vars, + ) } None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( "expected a token tree".into(), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index 743fac50f4e..fc9fa93268e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -1,4 +1,6 @@ //! Things to wrap other things in file ids. +use std::borrow::Borrow; + use either::Either; use span::{ AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr, @@ -76,6 +78,13 @@ impl<FileKind: Copy, T> InFileWrapper<FileKind, T> { pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> { self.with_value(&self.value) } + + pub fn borrow<U>(&self) -> InFileWrapper<FileKind, &U> + where + T: Borrow<U>, + { + self.with_value(self.value.borrow()) + } } impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> { @@ -156,14 +165,61 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> { } // region:specific impls +impl<SN: Borrow<SyntaxNode>> InRealFile<SN> { + pub fn file_range(&self) -> FileRange { + FileRange { file_id: self.file_id, range: self.value.borrow().text_range() } + } +} + +impl<SN: Borrow<SyntaxNode>> InFile<SN> { + pub fn parent_ancestors_with_macros( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ { + let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => db + .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .to_node_item(db) + .syntax() + .cloned() + .map(|node| node.parent()) + .transpose(), + }; + std::iter::successors(succ(&self.borrow().cloned()), succ) + } + + pub fn ancestors_with_macros( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ { + let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => db + .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .to_node_item(db) + .syntax() + .cloned() + .map(|node| node.parent()) + .transpose(), + }; + std::iter::successors(Some(self.borrow().cloned()), succ) + } + + pub fn kind(&self) -> parser::SyntaxKind { + self.value.borrow().kind() + } + + pub fn text_range(&self) -> TextRange { + self.value.borrow().text_range() + } -impl InFile<&SyntaxNode> { /// Falls back to the macro call range if the node cannot be mapped up fully. /// /// For attributes and derives, this will point back to the attribute only. /// For the entire item use [`InFile::original_file_range_full`]. pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange { - self.map(SyntaxNode::text_range).original_node_file_range_rooted(db) + self.borrow().map(SyntaxNode::text_range).original_node_file_range_rooted(db) } /// Falls back to the macro call range if the node cannot be mapped up fully. @@ -171,15 +227,7 @@ impl InFile<&SyntaxNode> { self, db: &dyn db::ExpandDatabase, ) -> FileRange { - self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db) - } - - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt( - self, - db: &dyn db::ExpandDatabase, - ) -> Option<(FileRange, SyntaxContextId)> { - self.map(SyntaxNode::text_range).original_node_file_range_opt(db) + self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db) } pub fn original_syntax_node_rooted( @@ -190,16 +238,19 @@ impl InFile<&SyntaxNode> { // as we don't have node inputs otherwise and therefore can't find an `N` node in the input let file_id = match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => { - return Some(InRealFile { file_id, value: self.value.clone() }) + return Some(InRealFile { file_id, value: self.value.borrow().clone() }) } HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m, _ => return None, }; - let FileRange { file_id, range } = - map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?; + let FileRange { file_id, range } = map_node_range_up_rooted( + db, + &db.expansion_span_map(file_id), + self.value.borrow().text_range(), + )?; - let kind = self.value.kind(); + let kind = self.kind(); let value = db .parse(file_id) .syntax_node() @@ -211,6 +262,16 @@ impl InFile<&SyntaxNode> { } } +impl InFile<&SyntaxNode> { + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range_opt( + self, + db: &dyn db::ExpandDatabase, + ) -> Option<(FileRange, SyntaxContextId)> { + self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db) + } +} + impl InMacroFile<SyntaxToken> { pub fn upmap_once( self, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index a7150cf3087..e7c34e51e85 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -4,7 +4,6 @@ //! tree originates not from the text of some `FileId`, but from some macro //! expansion. #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] -#![warn(rust_2018_idioms, unused_lifetimes)] pub mod attrs; pub mod builtin_attr_macro; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 8f74bffc2b9..fe754bc8249 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -275,8 +275,10 @@ pub mod known { u32, u64, u128, + f16, f32, f64, + f128, bool, char, str, diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index a83ee9824e2..b6c33683ff6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -33,6 +33,7 @@ triomphe.workspace = true nohash-hasher.workspace = true typed-arena = "2.0.1" indexmap.workspace = true +rustc_apfloat = "0.2.0" ra-ap-rustc_abi.workspace = true ra-ap-rustc_index.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs index 52411f94ad0..76d9c60f6f9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs @@ -63,7 +63,14 @@ impl<D> TyBuilder<D> { } fn build_internal(self) -> (D, Substitution) { - assert_eq!(self.vec.len(), self.param_kinds.len(), "{:?}", &self.param_kinds); + assert_eq!( + self.vec.len(), + self.param_kinds.len(), + "{} args received, {} expected ({:?})", + self.vec.len(), + self.param_kinds.len(), + &self.param_kinds + ); for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) { self.assert_match_kind(a, e); } @@ -252,8 +259,9 @@ impl TyBuilder<()> { /// This method prepopulates the builder with placeholder substitution of `parent`, so you /// should only push exactly 3 `GenericArg`s before building. pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> { - let parent_subst = - parent.as_generic_def_id().map(|p| generics(db.upcast(), p).placeholder_subst(db)); + let parent_subst = parent + .as_generic_def_id(db.upcast()) + .map(|p| generics(db.upcast(), p).placeholder_subst(db)); // These represent resume type, yield type, and return type of coroutine. let params = std::iter::repeat(ParamKind::Type).take(3).collect(); TyBuilder::new((), params, parent_subst) @@ -266,7 +274,7 @@ impl TyBuilder<()> { ) -> Substitution { let sig_ty = sig_ty.cast(Interner); let self_subst = iter::once(&sig_ty); - let Some(parent) = parent.as_generic_def_id() else { + let Some(parent) = parent.as_generic_def_id(db.upcast()) else { return Substitution::from_iter(Interner, self_subst); }; Substitution::from_iter( @@ -296,7 +304,8 @@ impl TyBuilder<hir_def::AdtId> { ) -> Self { // Note that we're building ADT, so we never have parent generic parameters. let defaults = db.generic_defaults(self.data.into()); - for default_ty in defaults.iter().skip(self.vec.len()) { + + for default_ty in &defaults[self.vec.len()..] { // NOTE(skip_binders): we only check if the arg type is error type. if let Some(x) = default_ty.skip_binders().ty(Interner) { if x.is_unknown() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index debae1fe123..3ac8cbaaf8b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -13,7 +13,8 @@ use hir_def::{ data::adt::StructFlags, hir::Movability, lang_item::{LangItem, LangItemTarget}, - AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId, + AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup, + TypeAliasId, VariantId, }; use hir_expand::name::name; @@ -28,9 +29,9 @@ use crate::{ to_assoc_type_id, to_chalk_trait_id, traits::ChalkContext, utils::ClosureSubst, - wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, - Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, - TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause, + wrap_empty_binders, AliasEq, AliasTy, BoundVar, DebruijnIndex, FnDefId, Interner, ProjectionTy, + ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, + TyExt, TyKind, WhereClause, }; pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>; @@ -102,7 +103,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> { &self, fn_def_id: chalk_ir::FnDefId<Interner>, ) -> Arc<rust_ir::FnDefDatum<Interner>> { - self.db.fn_def_datum(self.krate, fn_def_id) + self.db.fn_def_datum(fn_def_id) } fn impls_for_trait( @@ -912,16 +913,13 @@ fn type_alias_associated_ty_value( Arc::new(value) } -pub(crate) fn fn_def_datum_query( - db: &dyn HirDatabase, - _krate: CrateId, - fn_def_id: FnDefId, -) -> Arc<FnDefDatum> { +pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Arc<FnDefDatum> { let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_params = generics(db.upcast(), callable_def.into()); + let generic_def = GenericDefId::from_callable(db.upcast(), callable_def); + let generic_params = generics(db.upcast(), generic_def); let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders(); let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars); + let where_clauses = convert_where_clauses(db, generic_def, &bound_vars); let bound = rust_ir::FnDefDatumBound { // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway inputs_and_output: chalk_ir::Binders::empty( @@ -948,7 +946,8 @@ pub(crate) fn fn_def_datum_query( pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances { let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_params = generics(db.upcast(), callable_def.into()); + let generic_params = + generics(db.upcast(), GenericDefId::from_callable(db.upcast(), callable_def)); Variances::from_iter( Interner, std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index 4279c756519..5765262b08b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -119,8 +119,10 @@ impl TyExt for Ty { TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool), TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char), TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty { + FloatTy::F128 => BuiltinFloat::F128, FloatTy::F64 => BuiltinFloat::F64, FloatTy::F32 => BuiltinFloat::F32, + FloatTy::F16 => BuiltinFloat::F16, })), TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity { IntTy::Isize => BuiltinInt::Isize, @@ -188,9 +190,10 @@ impl TyExt for Ty { fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> { match *self.kind(Interner) { TyKind::Adt(AdtId(adt), ..) => Some(adt.into()), - TyKind::FnDef(callable, ..) => { - Some(db.lookup_intern_callable_def(callable.into()).into()) - } + TyKind::FnDef(callable, ..) => Some(GenericDefId::from_callable( + db.upcast(), + db.lookup_intern_callable_def(callable.into()), + )), TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()), TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()), _ => None, @@ -308,7 +311,7 @@ impl TyExt for Ty { TyKind::Placeholder(idx) => { let id = from_placeholder_idx(db, *idx); let generic_params = db.generic_params(id.parent); - let param_data = &generic_params.type_or_consts[id.local_id]; + let param_data = &generic_params[id.local_id]; match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { hir_def::generics::TypeParamProvenance::ArgumentImplTrait => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index 1b4584a18d7..095f2eb6c9f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -1,6 +1,10 @@ use base_db::FileId; use chalk_ir::Substitution; use hir_def::db::DefDatabase; +use rustc_apfloat::{ + ieee::{Half as f16, Quad as f128}, + Float, +}; use test_fixture::WithFixture; use test_utils::skip_slow_tests; @@ -141,6 +145,14 @@ fn bit_op() { #[test] fn floating_point() { check_number( + r#"const GOAL: f128 = 2.0 + 3.0 * 5.5 - 8.;"#, + "10.5".parse::<f128>().unwrap().to_bits() as i128, + ); + check_number( + r#"const GOAL: f128 = -90.0 + 36.0;"#, + "-54.0".parse::<f128>().unwrap().to_bits() as i128, + ); + check_number( r#"const GOAL: f64 = 2.0 + 3.0 * 5.5 - 8.;"#, i128::from_le_bytes(pad16(&f64::to_le_bytes(10.5), true)), ); @@ -152,6 +164,20 @@ fn floating_point() { r#"const GOAL: f32 = -90.0 + 36.0;"#, i128::from_le_bytes(pad16(&f32::to_le_bytes(-54.0), true)), ); + check_number( + r#"const GOAL: f16 = 2.0 + 3.0 * 5.5 - 8.;"#, + i128::from_le_bytes(pad16( + &u16::try_from("10.5".parse::<f16>().unwrap().to_bits()).unwrap().to_le_bytes(), + true, + )), + ); + check_number( + r#"const GOAL: f16 = -90.0 + 36.0;"#, + i128::from_le_bytes(pad16( + &u16::try_from("-54.0".parse::<f16>().unwrap().to_bits()).unwrap().to_le_bytes(), + true, + )), + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs index 44a4ac27af0..5972b80d169 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs @@ -411,6 +411,7 @@ fn likely() { #[test] fn floating_point() { + // FIXME(#17451): Add `f16` and `f128` tests once intrinsics are added. check_number( r#" extern "rust-intrinsic" { @@ -426,6 +427,7 @@ fn floating_point() { true, )), ); + #[allow(unknown_lints, clippy::unnecessary_min_or_max)] check_number( r#" extern "rust-intrinsic" { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index e951048021d..734aad49458 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -9,8 +9,8 @@ use base_db::{ CrateId, Upcast, }; use hir_def::{ - db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId, - DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, + db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId, + ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TypeAliasId, TypeOrConstParamId, VariantId, }; use la_arena::ArenaMap; @@ -24,9 +24,8 @@ use crate::{ lower::{GenericDefaults, GenericPredicates}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, - Binders, CallableDefId, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, - Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment, TraitRef, Ty, - TyDefId, ValueTyDefId, + Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner, + PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, }; use hir_expand::name::Name; @@ -81,8 +80,32 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)] fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>; + #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] + fn lookup_impl_method( + &self, + env: Arc<TraitEnvironment>, + func: FunctionId, + fn_subst: Substitution, + ) -> (FunctionId, Substitution); + // endregion:mir + #[salsa::invoke(crate::layout::layout_of_adt_query)] + #[salsa::cycle(crate::layout::layout_of_adt_recover)] + fn layout_of_adt( + &self, + def: AdtId, + subst: Substitution, + env: Arc<TraitEnvironment>, + ) -> Result<Arc<Layout>, LayoutError>; + + #[salsa::invoke(crate::layout::layout_of_ty_query)] + #[salsa::cycle(crate::layout::layout_of_ty_recover)] + fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>; + + #[salsa::invoke(crate::layout::target_data_layout_query)] + fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>; + #[salsa::invoke(crate::lower::ty_query)] #[salsa::cycle(crate::lower::ty_recover)] fn ty(&self, def: TyDefId) -> Binders<Ty>; @@ -105,30 +128,6 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { #[salsa::invoke(crate::lower::field_types_query)] fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>; - #[salsa::invoke(crate::layout::layout_of_adt_query)] - #[salsa::cycle(crate::layout::layout_of_adt_recover)] - fn layout_of_adt( - &self, - def: AdtId, - subst: Substitution, - env: Arc<TraitEnvironment>, - ) -> Result<Arc<Layout>, LayoutError>; - - #[salsa::invoke(crate::layout::layout_of_ty_query)] - #[salsa::cycle(crate::layout::layout_of_ty_recover)] - fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>; - - #[salsa::invoke(crate::layout::target_data_layout_query)] - fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>; - - #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] - fn lookup_impl_method( - &self, - env: Arc<TraitEnvironment>, - func: FunctionId, - fn_subst: Substitution, - ) -> (FunctionId, Substitution); - #[salsa::invoke(crate::lower::callable_item_sig)] fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; @@ -145,7 +144,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { def: GenericDefId, param_id: TypeOrConstParamId, assoc_name: Option<Name>, - ) -> Arc<[Binders<QuantifiedWhereClause>]>; + ) -> GenericPredicates; #[salsa::invoke(crate::lower::generic_predicates_query)] fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates; @@ -232,7 +231,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { ) -> sync::Arc<chalk_db::ImplDatum>; #[salsa::invoke(chalk_db::fn_def_datum_query)] - fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>; + fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>; #[salsa::invoke(chalk_db::fn_def_variance_query)] fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index ce3fa53f7ad..c28ab2e98af 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -196,6 +196,9 @@ impl ExprValidator { let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else { return; }; + if pat_ty.contains_unknown() { + return; + } // We only include patterns whose type matches the type // of the scrutinee expression. If we had an InvalidMatchArmPattern diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index 8d6e502c6ab..8dcc14feb27 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -51,6 +51,7 @@ pub(crate) struct Pat { #[derive(Clone, Debug, PartialEq)] pub(crate) enum PatKind { Wild, + Never, /// `x`, `ref x`, `x @ P`, etc. Binding { @@ -294,6 +295,7 @@ impl HirDisplay for Pat { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match &*self.kind { PatKind::Wild => write!(f, "_"), + PatKind::Never => write!(f, "!"), PatKind::Binding { name, subpattern } => { write!(f, "{}", name.display(f.db.upcast()))?; if let Some(subpattern) = subpattern { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index c171dbc1700..bf2ff1a917c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -4,12 +4,10 @@ use std::fmt; use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use once_cell::unsync::Lazy; -use rustc_hash::FxHashMap; use rustc_pattern_analysis::{ constructor::{Constructor, ConstructorSet, VariantVisibility}, - index::IdxContainer, usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport}, - Captures, PatCx, PrivateUninhabitedField, + Captures, IndexVec, PatCx, PrivateUninhabitedField, }; use smallvec::{smallvec, SmallVec}; use stdx::never; @@ -26,10 +24,10 @@ use super::{is_box, FieldPat, Pat, PatKind}; use Constructor::*; // Re-export r-a-specific versions of all these types. -pub(crate) type DeconstructedPat<'p> = - rustc_pattern_analysis::pat::DeconstructedPat<MatchCheckCtx<'p>>; -pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>; -pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'p>>; +pub(crate) type DeconstructedPat<'db> = + rustc_pattern_analysis::pat::DeconstructedPat<MatchCheckCtx<'db>>; +pub(crate) type MatchArm<'db> = rustc_pattern_analysis::MatchArm<'db, MatchCheckCtx<'db>>; +pub(crate) type WitnessPat<'db> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'db>>; /// [Constructor] uses this in unimplemented variants. /// It allows porting match expressions from upstream algorithm without losing semantics. @@ -54,23 +52,27 @@ impl EnumVariantContiguousIndex { } } +impl rustc_pattern_analysis::Idx for EnumVariantContiguousIndex { + fn new(idx: usize) -> Self { + EnumVariantContiguousIndex(idx) + } + + fn index(self) -> usize { + self.0 + } +} + #[derive(Clone)] -pub(crate) struct MatchCheckCtx<'p> { +pub(crate) struct MatchCheckCtx<'db> { module: ModuleId, body: DefWithBodyId, - pub(crate) db: &'p dyn HirDatabase, + pub(crate) db: &'db dyn HirDatabase, exhaustive_patterns: bool, min_exhaustive_patterns: bool, } -#[derive(Clone)] -pub(crate) struct PatData<'p> { - /// Keep db around so that we can print variant names in `Debug`. - pub(crate) db: &'p dyn HirDatabase, -} - -impl<'p> MatchCheckCtx<'p> { - pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self { +impl<'db> MatchCheckCtx<'db> { + pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); let min_exhaustive_patterns = @@ -80,9 +82,9 @@ impl<'p> MatchCheckCtx<'p> { pub(crate) fn compute_match_usefulness( &self, - arms: &[MatchArm<'p>], + arms: &[MatchArm<'db>], scrut_ty: Ty, - ) -> Result<UsefulnessReport<'p, Self>, ()> { + ) -> Result<UsefulnessReport<'db, Self>, ()> { // FIXME: Determine place validity correctly. For now, err on the safe side. let place_validity = PlaceValidity::MaybeInvalid; // Measured to take ~100ms on modern hardware. @@ -101,7 +103,7 @@ impl<'p> MatchCheckCtx<'p> { } fn variant_id_for_adt( - db: &'p dyn HirDatabase, + db: &'db dyn HirDatabase, ctor: &Constructor<Self>, adt: hir_def::AdtId, ) -> Option<VariantId> { @@ -126,7 +128,7 @@ impl<'p> MatchCheckCtx<'p> { &'a self, ty: &'a Ty, variant: VariantId, - ) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> { + ) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'db> { let (_, substs) = ty.as_adt().unwrap(); let field_tys = self.db.field_types(variant); @@ -139,8 +141,8 @@ impl<'p> MatchCheckCtx<'p> { }) } - pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> { - let singleton = |pat: DeconstructedPat<'p>| vec![pat.at_index(0)]; + pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'db> { + let singleton = |pat: DeconstructedPat<'db>| vec![pat.at_index(0)]; let ctor; let mut fields: Vec<_>; let arity; @@ -228,6 +230,11 @@ impl<'p> MatchCheckCtx<'p> { fields = Vec::new(); arity = 0; } + PatKind::Never => { + ctor = Never; + fields = Vec::new(); + arity = 0; + } PatKind::Or { pats } => { ctor = Or; fields = pats @@ -238,11 +245,10 @@ impl<'p> MatchCheckCtx<'p> { arity = pats.len(); } } - let data = PatData { db: self.db }; - DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), data) + DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), ()) } - pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat { + pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'db>) -> Pat { let mut subpatterns = pat.iter_fields().map(|p| self.hoist_witness_pat(p)); let kind = match pat.ctor() { &Bool(value) => PatKind::LiteralBool { value }, @@ -290,6 +296,7 @@ impl<'p> MatchCheckCtx<'p> { Slice(_) => unimplemented!(), &Str(void) => match void {}, Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild, + Never => PatKind::Never, Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => { never!("can't convert to pattern: {:?}", pat.ctor()); PatKind::Wild @@ -299,13 +306,13 @@ impl<'p> MatchCheckCtx<'p> { } } -impl<'p> PatCx for MatchCheckCtx<'p> { +impl<'db> PatCx for MatchCheckCtx<'db> { type Error = (); type Ty = Ty; type VariantIdx = EnumVariantContiguousIndex; type StrLit = Void; type ArmData = (); - type PatData = PatData<'p>; + type PatData = (); fn is_exhaustive_patterns_feature_on(&self) -> bool { self.exhaustive_patterns @@ -339,8 +346,8 @@ impl<'p> PatCx for MatchCheckCtx<'p> { }, Ref => 1, Slice(..) => unimplemented!(), - Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..) - | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0, + Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) + | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0, Or => { never!("The `Or` constructor doesn't have a fixed arity"); 0 @@ -402,8 +409,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> { } }, Slice(_) => unreachable!("Found a `Slice` constructor in match checking"), - Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..) - | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => smallvec![], + Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) + | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => { + smallvec![] + } Or => { never!("called `Fields::wildcards` on an `Or` ctor"); smallvec![] @@ -442,11 +451,8 @@ impl<'p> PatCx for MatchCheckCtx<'p> { if enum_data.variants.is_empty() && !is_declared_nonexhaustive { ConstructorSet::NoConstructors } else { - let mut variants = FxHashMap::with_capacity_and_hasher( - enum_data.variants.len(), - Default::default(), - ); - for (i, &(variant, _)) in enum_data.variants.iter().enumerate() { + let mut variants = IndexVec::with_capacity(enum_data.variants.len()); + for &(variant, _) in enum_data.variants.iter() { let is_uninhabited = is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module); let visibility = if is_uninhabited { @@ -454,13 +460,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> { } else { VariantVisibility::Visible }; - variants.insert(EnumVariantContiguousIndex(i), visibility); + variants.push(visibility); } - ConstructorSet::Variants { - variants: IdxContainer(variants), - non_exhaustive: is_declared_nonexhaustive, - } + ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive } } } TyKind::Adt(AdtId(hir_def::AdtId::UnionId(_)), _) => ConstructorSet::Union, @@ -476,26 +479,27 @@ impl<'p> PatCx for MatchCheckCtx<'p> { fn write_variant_name( f: &mut fmt::Formatter<'_>, - pat: &rustc_pattern_analysis::pat::DeconstructedPat<Self>, + _ctor: &Constructor<Self>, + _ty: &Self::Ty, ) -> fmt::Result { - let db = pat.data().db; - let variant = - pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, pat.ctor(), adt)); - - if let Some(variant) = variant { - match variant { - VariantId::EnumVariantId(v) => { - write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?; - } - VariantId::StructId(s) => { - write!(f, "{}", db.struct_data(s).name.display(db.upcast()))? - } - VariantId::UnionId(u) => { - write!(f, "{}", db.union_data(u).name.display(db.upcast()))? - } - } - } - Ok(()) + write!(f, "<write_variant_name unsupported>") + // We lack the database here ... + // let variant = ty.as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, ctor, adt)); + + // if let Some(variant) = variant { + // match variant { + // VariantId::EnumVariantId(v) => { + // write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?; + // } + // VariantId::StructId(s) => { + // write!(f, "{}", db.struct_data(s).name.display(db.upcast()))? + // } + // VariantId::UnionId(u) => { + // write!(f, "{}", db.union_data(u).name.display(db.upcast()))? + // } + // } + // } + // Ok(()) } fn bug(&self, fmt: fmt::Arguments<'_>) { @@ -507,7 +511,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> { } } -impl<'p> fmt::Debug for MatchCheckCtx<'p> { +impl<'db> fmt::Debug for MatchCheckCtx<'db> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("MatchCheckCtx").finish() } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 66b5398b88e..a9a5d829f5f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -21,13 +21,17 @@ use hir_def::{ path::{Path, PathKind}, type_ref::{TraitBoundModifier, TypeBound, TypeRef}, visibility::Visibility, - HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, - TraitId, + GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, + ModuleId, TraitId, }; use hir_expand::name::Name; use intern::{Internable, Interned}; use itertools::Itertools; use la_arena::ArenaMap; +use rustc_apfloat::{ + ieee::{Half as f16, Quad as f128}, + Float, +}; use smallvec::SmallVec; use stdx::{never, IsNoneOr}; use triomphe::Arc; @@ -545,6 +549,17 @@ fn render_const_scalar( write!(f, "{it}") } Scalar::Float(fl) => match fl { + chalk_ir::FloatTy::F16 => { + // FIXME(#17451): Replace with builtins once they are stabilised. + let it = f16::from_bits(u16::from_le_bytes(b.try_into().unwrap()).into()); + let s = it.to_string(); + if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) { + // Match Rust debug formatting + write!(f, "{s}.0") + } else { + write!(f, "{s}") + } + } chalk_ir::FloatTy::F32 => { let it = f32::from_le_bytes(b.try_into().unwrap()); write!(f, "{it:?}") @@ -553,6 +568,17 @@ fn render_const_scalar( let it = f64::from_le_bytes(b.try_into().unwrap()); write!(f, "{it:?}") } + chalk_ir::FloatTy::F128 => { + // FIXME(#17451): Replace with builtins once they are stabilised. + let it = f128::from_bits(u128::from_le_bytes(b.try_into().unwrap())); + let s = it.to_string(); + if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) { + // Match Rust debug formatting + write!(f, "{s}.0") + } else { + write!(f, "{s}") + } + } }, }, TyKind::Ref(_, _, t) => match t.kind(Interner) { @@ -988,7 +1014,8 @@ impl HirDisplay for Ty { f.end_location_link(); if parameters.len(Interner) > 0 { - let generics = generics(db.upcast(), def.into()); + let generic_def_id = GenericDefId::from_callable(db.upcast(), def); + let generics = generics(db.upcast(), generic_def_id); let (parent_len, self_param, type_, const_, impl_, lifetime) = generics.provenance_split(); let parameters = parameters.as_slice(Interner); @@ -1002,8 +1029,9 @@ impl HirDisplay for Ty { debug_assert_eq!(parent_params.len(), parent_len); let parent_params = - generic_args_sans_defaults(f, Some(def.into()), parent_params); - let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params); + generic_args_sans_defaults(f, Some(generic_def_id), parent_params); + let fn_params = + generic_args_sans_defaults(f, Some(generic_def_id), fn_params); write!(f, "<")?; hir_fmt_generic_arguments(f, parent_params, None)?; @@ -1041,7 +1069,11 @@ impl HirDisplay for Ty { module_id, PrefixKind::Plain, false, - ImportPathConfig { prefer_no_std: false, prefer_prelude: true }, + ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }, ) { write!(f, "{}", path.display(f.db.upcast()))?; } else { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index ea10e6881e7..a96c101a388 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -2,8 +2,8 @@ //! //! The layout for generics as expected by chalk are as follows: //! - Optional Self parameter -//! - Type or Const parameters //! - Lifetime parameters +//! - Type or Const parameters //! - Parent parameters //! //! where parent follows the same scheme. @@ -20,18 +20,23 @@ use hir_def::{ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; use intern::Interned; +use itertools::chain; +use stdx::TupleExt; use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution}; pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); - Generics { def, params: db.generic_params(def), parent_generics } + let params = db.generic_params(def); + let has_trait_self_param = params.trait_self_param().is_some(); + Generics { def, params, parent_generics, has_trait_self_param } } #[derive(Clone, Debug)] pub(crate) struct Generics { def: GenericDefId, params: Interned<GenericParams>, parent_generics: Option<Box<Generics>>, + has_trait_self_param: bool, } impl<T> ops::Index<T> for Generics @@ -57,7 +62,7 @@ impl Generics { self.iter_self().map(|(id, _)| id) } - fn iter_parent_id(&self) -> impl Iterator<Item = GenericParamId> + '_ { + pub(crate) fn iter_parent_id(&self) -> impl Iterator<Item = GenericParamId> + '_ { self.iter_parent().map(|(id, _)| id) } @@ -67,6 +72,12 @@ impl Generics { self.params.iter_type_or_consts() } + pub(crate) fn iter_self_type_or_consts_id( + &self, + ) -> impl DoubleEndedIterator<Item = GenericParamId> + '_ { + self.params.iter_type_or_consts().map(from_toc_id(self)).map(TupleExt::head) + } + /// Iterate over the params followed by the parent params. pub(crate) fn iter( &self, @@ -78,10 +89,9 @@ impl Generics { pub(crate) fn iter_self( &self, ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ { - self.params - .iter_type_or_consts() - .map(from_toc_id(self)) - .chain(self.params.iter_lt().map(from_lt_id(self))) + let mut toc = self.params.iter_type_or_consts().map(from_toc_id(self)); + let trait_self_param = self.has_trait_self_param.then(|| toc.next()).flatten(); + chain!(trait_self_param, self.params.iter_lt().map(from_lt_id(self)), toc) } /// Iterator over types and const params of parent. @@ -89,8 +99,9 @@ impl Generics { &self, ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ { self.parent_generics().into_iter().flat_map(|it| { - let lt_iter = it.params.iter_lt().map(from_lt_id(it)); - it.params.iter_type_or_consts().map(from_toc_id(it)).chain(lt_iter) + let mut toc = it.params.iter_type_or_consts().map(from_toc_id(it)); + let trait_self_param = it.has_trait_self_param.then(|| toc.next()).flatten(); + chain!(trait_self_param, it.params.iter_lt().map(from_lt_id(it)), toc) }) } @@ -134,8 +145,11 @@ impl Generics { fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option<usize> { if param.parent == self.def { let idx = param.local_id.into_raw().into_u32() as usize; - debug_assert!(idx <= self.params.type_or_consts.len()); - Some(idx) + debug_assert!(idx <= self.params.len_type_or_consts()); + if self.params.trait_self_param() == Some(param.local_id) { + return Some(idx); + } + Some(self.params.len_lifetimes() + idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent)); self.parent_generics() @@ -152,8 +166,8 @@ impl Generics { fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option<usize> { if lifetime.parent == self.def { let idx = lifetime.local_id.into_raw().into_u32() as usize; - debug_assert!(idx <= self.params.lifetimes.len()); - Some(self.params.type_or_consts.len() + idx) + debug_assert!(idx <= self.params.len_lifetimes()); + Some(self.params.trait_self_param().is_some() as usize + idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent)); self.parent_generics() @@ -216,7 +230,6 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<Generic GenericDefId::FunctionId(it) => it.lookup(db).container, GenericDefId::TypeAliasId(it) => it.lookup(db).container, GenericDefId::ConstId(it) => it.lookup(db).container, - GenericDefId::EnumVariantId(it) => return Some(it.lookup(db).parent.into()), GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 96431ba4ce9..66ee02d74d9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -701,18 +701,23 @@ impl<'a> InferenceContext<'a> { table.propagate_diverging_flag(); for ty in type_of_expr.values_mut() { *ty = table.resolve_completely(ty.clone()); + *has_errors = *has_errors || ty.contains_unknown(); } for ty in type_of_pat.values_mut() { *ty = table.resolve_completely(ty.clone()); + *has_errors = *has_errors || ty.contains_unknown(); } for ty in type_of_binding.values_mut() { *ty = table.resolve_completely(ty.clone()); + *has_errors = *has_errors || ty.contains_unknown(); } for ty in type_of_rpit.values_mut() { *ty = table.resolve_completely(ty.clone()); + *has_errors = *has_errors || ty.contains_unknown(); } for ty in type_of_for_iterator.values_mut() { *ty = table.resolve_completely(ty.clone()); + *has_errors = *has_errors || ty.contains_unknown(); } *has_errors = !type_mismatches.is_empty(); @@ -835,11 +840,7 @@ impl<'a> InferenceContext<'a> { let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { // RPIT opaque types use substitution of their parent function. let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); - let result = self.insert_inference_vars_for_impl_trait( - return_ty, - rpits.clone(), - fn_placeholders, - ); + let result = self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders); let rpits = rpits.skip_binders(); for (id, _) in rpits.impl_traits.iter() { if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) { @@ -862,12 +863,7 @@ impl<'a> InferenceContext<'a> { self.insert_atpit_coercion_table(params_and_ret_tys.iter()); } - fn insert_inference_vars_for_impl_trait<T>( - &mut self, - t: T, - rpits: Arc<chalk_ir::Binders<crate::ImplTraits>>, - placeholders: Substitution, - ) -> T + fn insert_inference_vars_for_impl_trait<T>(&mut self, t: T, placeholders: Substitution) -> T where T: crate::HasInterner<Interner = Interner> + crate::TypeFoldable<Interner>, { @@ -878,13 +874,21 @@ impl<'a> InferenceContext<'a> { TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id, _ => return ty, }; - let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { - ImplTraitId::ReturnTypeImplTrait(_, idx) => idx, - ImplTraitId::AssociatedTypeImplTrait(_, idx) => idx, - _ => unreachable!(), + let (impl_traits, idx) = + match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { + ImplTraitId::ReturnTypeImplTrait(def, idx) => { + (self.db.return_type_impl_traits(def), idx) + } + ImplTraitId::AssociatedTypeImplTrait(def, idx) => { + (self.db.type_alias_impl_traits(def), idx) + } + _ => unreachable!(), + }; + let Some(impl_traits) = impl_traits else { + return ty; }; - let bounds = - (*rpits).map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter())); + let bounds = (*impl_traits) + .map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter())); let var = self.table.new_type_var(); let var_subst = Substitution::from1(Interner, var.clone()); for bound in bounds { @@ -892,11 +896,8 @@ impl<'a> InferenceContext<'a> { let (var_predicate, binders) = predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders(); always!(binders.is_empty(Interner)); // quantified where clauses not yet handled - let var_predicate = self.insert_inference_vars_for_impl_trait( - var_predicate, - rpits.clone(), - placeholders.clone(), - ); + let var_predicate = self + .insert_inference_vars_for_impl_trait(var_predicate, placeholders.clone()); self.push_obligation(var_predicate.cast(Interner)); } self.result.type_of_rpit.insert(idx, var.clone()); @@ -983,16 +984,8 @@ impl<'a> InferenceContext<'a> { self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { if assoc_tys.contains(&alias_id) { - let atpits = self - .db - .type_alias_impl_traits(alias_id) - .expect("Marked as ATPIT but no impl traits!"); let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id); - let ty = self.insert_inference_vars_for_impl_trait( - ty, - atpits, - alias_placeholders, - ); + let ty = self.insert_inference_vars_for_impl_trait(ty, alias_placeholders); return Some((opaque_ty_id, ty)); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 95f28531acf..7a0f7872a64 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -13,7 +13,7 @@ use hir_def::{ }, lang_item::{LangItem, LangItemTarget}, path::{GenericArgs, Path}, - BlockId, FieldId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, + BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; use hir_expand::name::{name, Name}; use stdx::always; @@ -440,7 +440,8 @@ impl InferenceContext<'_> { let ty = match self.infer_path(p, tgt_expr.into()) { Some(ty) => ty, None => { - if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident()) { + if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident() || mod_path.is_self()) + { self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { expr: tgt_expr, }); @@ -1895,7 +1896,8 @@ impl InferenceContext<'_> { let callable_ty = self.resolve_ty_shallow(callable_ty); if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) { let def: CallableDefId = from_chalk(self.db, *fn_def); - let generic_predicates = self.db.generic_predicates(def.into()); + let generic_predicates = + self.db.generic_predicates(GenericDefId::from_callable(self.db.upcast(), def)); for predicate in generic_predicates.iter() { let (predicate, binders) = predicate .clone() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index d876008cd58..490ecfd7fa3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -41,14 +41,7 @@ impl InferenceContext<'_> { fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> { let (value, self_subst) = self.resolve_value_path_inner(path, id)?; - let value_def = match value { - ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) { - Some(ty) => return Some(ValuePathResolution::NonGeneric(ty.clone())), - None => { - never!("uninferred pattern?"); - return None; - } - }, + let value_def: ValueTyDefId = match value { ValueNs::FunctionId(it) => it.into(), ValueNs::ConstId(it) => it.into(), ValueNs::StaticId(it) => it.into(), @@ -62,48 +55,79 @@ impl InferenceContext<'_> { it.into() } + ValueNs::LocalBinding(pat) => { + return match self.result.type_of_binding.get(pat) { + Some(ty) => Some(ValuePathResolution::NonGeneric(ty.clone())), + None => { + never!("uninferred pattern?"); + None + } + } + } ValueNs::ImplSelf(impl_id) => { let generics = crate::generics::generics(self.db.upcast(), impl_id.into()); let substs = generics.placeholder_subst(self.db); let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); - if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() { - return Some(ValuePathResolution::GenericDef( + return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() { + Some(ValuePathResolution::GenericDef( struct_id.into(), struct_id.into(), substs.clone(), - )); + )) } else { // FIXME: report error, invalid Self reference - return None; - } + None + }; } ValueNs::GenericParam(it) => { return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it))) } }; + let generic_def_id = value_def.to_generic_def_id(self.db); + let Some(generic_def) = generic_def_id else { + // `value_def` is the kind of item that can never be generic (i.e. statics, at least + // currently). We can just skip the binders to get its type. + let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders(); + stdx::always!(binders.is_empty(Interner), "non-empty binders for non-generic def",); + return Some(ValuePathResolution::NonGeneric(ty)); + }; + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); let substs = ctx.substs_from_path(path, value_def, true); let substs = substs.as_slice(Interner); + + if let ValueNs::EnumVariantId(_) = value { + let mut it = self_subst + .as_ref() + .map_or(&[][..], |s| s.as_slice(Interner)) + .iter() + .chain(substs) + .cloned(); + let builder = TyBuilder::subst_for_def(self.db, generic_def, None); + let substs = builder + .fill(|x| { + it.next().unwrap_or_else(|| match x { + ParamKind::Type => { + self.result.standard_types.unknown.clone().cast(Interner) + } + ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), + }) + }) + .build(); + + return Some(ValuePathResolution::GenericDef(value_def, generic_def, substs)); + } + let parent_substs = self_subst.or_else(|| { - let generics = generics(self.db.upcast(), value_def.to_generic_def_id()?); + let generics = generics(self.db.upcast(), generic_def_id?); let parent_params_len = generics.parent_generics()?.len(); let parent_args = &substs[substs.len() - parent_params_len..]; Some(Substitution::from_iter(Interner, parent_args)) }); let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner)); let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned(); - - let Some(generic_def) = value_def.to_generic_def_id() else { - // `value_def` is the kind of item that can never be generic (i.e. statics, at least - // currently). We can just skip the binders to get its type. - let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders(); - stdx::always!( - parent_substs.is_none() && binders.is_empty(Interner), - "non-empty binders for non-generic def", - ); - return Some(ValuePathResolution::NonGeneric(ty)); - }; let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs); let substs = builder .fill(|x| { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index d9fd029d378..034b9c773c2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -265,8 +265,10 @@ pub fn layout_of_ty_query( chalk_ir::Scalar::Float(f) => scalar( dl, Primitive::Float(match f { + FloatTy::F16 => Float::F16, FloatTy::F32 => Float::F32, FloatTy::F64 => Float::F64, + FloatTy::F128 => Float::F128, }), ), }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index 392bda51b52..35ea13eb119 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -426,6 +426,7 @@ fn enums() { #[test] fn primitives() { + // FIXME(#17451): Add `f16` and `f128` once they are stabilised. size_and_align! { struct Goal(i32, i128, isize, usize, f32, f64, bool, char); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 5e33e1285ee..bd650869bb3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -1,6 +1,6 @@ //! The type system. We currently use this to infer types for completion, hover //! information and various assists. -#![warn(rust_2018_idioms, unused_lifetimes)] + #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[cfg(feature = "in-rust-tree")] @@ -60,7 +60,7 @@ use chalk_ir::{ NoSolution, }; use either::Either; -use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId}; +use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId}; use hir_expand::name; use la_arena::{Arena, Idx}; use mir::{MirEvalError, VTableMap}; @@ -84,8 +84,8 @@ pub use infer::{ }; pub use interner::Interner; pub use lower::{ - associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode, - TyDefId, TyLoweringContext, ValueTyDefId, + associated_type_shorthand_candidates, ImplTraitLoweringMode, ParamLoweringMode, TyDefId, + TyLoweringContext, ValueTyDefId, }; pub use mapping::{ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 96f545415e2..d421e72d364 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -11,10 +11,7 @@ use std::{ ops::{self, Not as _}, }; -use base_db::{ - salsa::{Cycle, InternValueTrivial}, - CrateId, -}; +use base_db::{salsa::Cycle, CrateId}; use chalk_ir::{ cast::Cast, fold::{Shift, TypeFoldable}, @@ -38,10 +35,10 @@ use hir_def::{ type_ref::{ ConstRef, LifetimeRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef, }, - AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, - GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, LocalFieldId, - Lookup, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, - UnionId, VariantId, + AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, + LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, + TypeOwnerId, UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; use intern::Interned; @@ -387,14 +384,18 @@ impl<'a> TyLoweringContext<'a> { type_params, const_params, _impl_trait_params, - _lifetime_params, + lifetime_params, ) = self .generics() .expect("variable impl trait lowering must be in a generic def") .provenance_split(); TyKind::BoundVar(BoundVar::new( self.in_binders, - idx as usize + self_param as usize + type_params + const_params, + idx as usize + + self_param as usize + + type_params + + const_params + + lifetime_params, )) .intern(Interner) } @@ -815,13 +816,13 @@ impl<'a> TyLoweringContext<'a> { infer_args: bool, explicit_self_ty: Option<Ty>, ) -> Substitution { - // Remember that the item's own generic args come before its parent's. - let mut substs = Vec::new(); - let def = if let Some(d) = def { - d - } else { - return Substitution::empty(Interner); - }; + let Some(def) = def else { return Substitution::empty(Interner) }; + + // Order is + // - Optional Self parameter + // - Lifetime parameters + // - Type or Const parameters + // - Parent parameters let def_generics = generics(self.db.upcast(), def); let ( parent_params, @@ -835,130 +836,121 @@ impl<'a> TyLoweringContext<'a> { self_param as usize + type_params + const_params + impl_trait_params + lifetime_params; let total_len = parent_params + item_len; - let ty_error = TyKind::Error.intern(Interner).cast(Interner); + let mut substs = Vec::new(); - let mut def_generic_iter = def_generics.iter_id(); + // we need to iterate the lifetime and type/const params separately as our order of them + // differs from the supplied syntax - let fill_self_params = || { + let ty_error = || TyKind::Error.intern(Interner).cast(Interner); + let mut def_toc_iter = def_generics.iter_self_type_or_consts_id(); + let fill_self_param = || { if self_param { - let self_ty = - explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(|| ty_error.clone()); + let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error); - if let Some(id) = def_generic_iter.next() { - assert!(matches!( - id, - GenericParamId::TypeParamId(_) | GenericParamId::LifetimeParamId(_) - )); + if let Some(id) = def_toc_iter.next() { + assert!(matches!(id, GenericParamId::TypeParamId(_))); substs.push(self_ty); } } }; let mut had_explicit_args = false; - if let Some(generic_args) = &args_and_bindings { - if !generic_args.has_self_type { - fill_self_params(); - } - let expected_num = if generic_args.has_self_type { - self_param as usize + type_params + const_params + if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings { + // Fill in the self param first + if has_self_type && self_param { + had_explicit_args = true; + if let Some(id) = def_toc_iter.next() { + assert!(matches!(id, GenericParamId::TypeParamId(_))); + had_explicit_args = true; + if let GenericArg::Type(ty) = &args[0] { + substs.push(self.lower_ty(ty).cast(Interner)); + } + } } else { - type_params + const_params + fill_self_param() }; - let skip = if generic_args.has_self_type && !self_param { 1 } else { 0 }; - // if args are provided, it should be all of them, but we can't rely on that - for arg in generic_args - .args + + // Then fill in the supplied lifetime args, or error lifetimes if there are too few + // (default lifetimes aren't a thing) + for arg in args .iter() - .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) - .skip(skip) - .take(expected_num) + .filter_map(|arg| match arg { + GenericArg::Lifetime(arg) => Some(self.lower_lifetime(arg)), + _ => None, + }) + .chain(iter::repeat(error_lifetime())) + .take(lifetime_params) { - if let Some(id) = def_generic_iter.next() { - let arg = generic_arg_to_chalk( - self.db, - id, - arg, - &mut (), - |_, type_ref| self.lower_ty(type_ref), - |_, const_ref, ty| self.lower_const(const_ref, ty), - |_, lifetime_ref| self.lower_lifetime(lifetime_ref), - ); - had_explicit_args = true; - substs.push(arg); - } + substs.push(arg.cast(Interner)); } - for arg in generic_args - .args + let skip = if has_self_type { 1 } else { 0 }; + // Fill in supplied type and const args + // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that + for (arg, id) in args .iter() - .filter(|arg| matches!(arg, GenericArg::Lifetime(_))) - .take(lifetime_params) + .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) + .skip(skip) + .take(type_params + const_params) + .zip(def_toc_iter) { - // Taking into the fact that def_generic_iter will always have lifetimes at the end - // Should have some test cases tho to test this behaviour more properly - if let Some(id) = def_generic_iter.next() { - let arg = generic_arg_to_chalk( - self.db, - id, - arg, - &mut (), - |_, type_ref| self.lower_ty(type_ref), - |_, const_ref, ty| self.lower_const(const_ref, ty), - |_, lifetime_ref| self.lower_lifetime(lifetime_ref), - ); - had_explicit_args = true; - substs.push(arg); - } + had_explicit_args = true; + let arg = generic_arg_to_chalk( + self.db, + id, + arg, + &mut (), + |_, type_ref| self.lower_ty(type_ref), + |_, const_ref, ty| self.lower_const(const_ref, ty), + |_, lifetime_ref| self.lower_lifetime(lifetime_ref), + ); + substs.push(arg); } } else { - fill_self_params(); + fill_self_param(); } - // These params include those of parent. - let remaining_params: SmallVec<[_; 2]> = def_generic_iter - .map(|id| match id { - GenericParamId::ConstParamId(x) => { - unknown_const_as_generic(self.db.const_param_ty(x)) - } - GenericParamId::TypeParamId(_) => ty_error.clone(), - GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), - }) - .collect(); - assert_eq!(remaining_params.len() + substs.len(), total_len); - + let param_to_err = |id| match id { + GenericParamId::ConstParamId(x) => unknown_const_as_generic(self.db.const_param_ty(x)), + GenericParamId::TypeParamId(_) => ty_error(), + GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), + }; // handle defaults. In expression or pattern path segments without // explicitly specified type arguments, missing type arguments are inferred // (i.e. defaults aren't used). // Generic parameters for associated types are not supposed to have defaults, so we just // ignore them. - let is_assoc_ty = if let GenericDefId::TypeAliasId(id) = def { - let container = id.lookup(self.db.upcast()).container; - matches!(container, ItemContainerId::TraitId(_)) - } else { - false + let is_assoc_ty = || match def { + GenericDefId::TypeAliasId(id) => { + matches!(id.lookup(self.db.upcast()).container, ItemContainerId::TraitId(_)) + } + _ => false, }; - if !is_assoc_ty && (!infer_args || had_explicit_args) { - let defaults = self.db.generic_defaults(def); - assert_eq!(total_len, defaults.len()); + let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty(); + if fill_defaults { + let defaults = &*self.db.generic_defaults(def); + let (item, _parent) = defaults.split_at(item_len); let parent_from = item_len - substs.len(); - for (idx, default_ty) in defaults[substs.len()..item_len].iter().enumerate() { + let mut rem = + def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>(); + // Fill in defaults for type/const params + for (idx, default_ty) in item[substs.len()..].iter().enumerate() { // each default can depend on the previous parameters let substs_so_far = Substitution::from_iter( Interner, - substs.iter().cloned().chain(remaining_params[idx..].iter().cloned()), + substs.iter().cloned().chain(rem[idx..].iter().cloned()), ); substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); } - - // Keep parent's params as unknown. - let mut remaining_params = remaining_params; - substs.extend(remaining_params.drain(parent_from..)); + // Fill in remaining parent params + substs.extend(rem.drain(parent_from..)); } else { - substs.extend(remaining_params); + // Fill in remaining def params and parent params + substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err)); } - assert_eq!(substs.len(), total_len); + assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len()); Substitution::from_iter(Interner, substs) } @@ -1535,7 +1527,7 @@ pub(crate) fn generic_predicates_for_param_query( def: GenericDefId, param_id: TypeOrConstParamId, assoc_name: Option<Name>, -) -> Arc<[Binders<QuantifiedWhereClause>]> { +) -> GenericPredicates { let resolver = def.resolver(db.upcast()); let ctx = if let GenericDefId::FunctionId(_) = def { TyLoweringContext::new(db, &resolver, def.into()) @@ -1611,7 +1603,7 @@ pub(crate) fn generic_predicates_for_param_query( ); }; } - predicates.into() + GenericPredicates(predicates.is_empty().not().then(|| predicates.into())) } pub(crate) fn generic_predicates_for_param_recover( @@ -1620,15 +1612,15 @@ pub(crate) fn generic_predicates_for_param_recover( _def: &GenericDefId, _param_id: &TypeOrConstParamId, _assoc_name: &Option<Name>, -) -> Arc<[Binders<QuantifiedWhereClause>]> { - Arc::from_iter(None) +) -> GenericPredicates { + GenericPredicates(None) } pub(crate) fn trait_environment_for_body_query( db: &dyn HirDatabase, def: DefWithBodyId, ) -> Arc<TraitEnvironment> { - let Some(def) = def.as_generic_def_id() else { + let Some(def) = def.as_generic_def_id(db.upcast()) else { let krate = def.module(db.upcast()).krate(); return TraitEnvironment::empty(krate); }; @@ -1725,8 +1717,8 @@ pub(crate) fn generic_predicates_query( }) .collect::<Vec<_>>(); - let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); - if !subst.is_empty(Interner) { + if generics.len() > 0 { + let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); let explicitly_unsized_tys = ctx.unsized_types.into_inner(); if let Some(implicitly_sized_predicates) = implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver) @@ -1995,47 +1987,6 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> { } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum CallableDefId { - FunctionId(FunctionId), - StructId(StructId), - EnumVariantId(EnumVariantId), -} - -impl InternValueTrivial for CallableDefId {} - -impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); -impl From<CallableDefId> for ModuleDefId { - fn from(def: CallableDefId) -> ModuleDefId { - match def { - CallableDefId::FunctionId(f) => ModuleDefId::FunctionId(f), - CallableDefId::StructId(s) => ModuleDefId::AdtId(AdtId::StructId(s)), - CallableDefId::EnumVariantId(e) => ModuleDefId::EnumVariantId(e), - } - } -} - -impl CallableDefId { - pub fn krate(self, db: &dyn HirDatabase) -> CrateId { - let db = db.upcast(); - match self { - CallableDefId::FunctionId(f) => f.krate(db), - CallableDefId::StructId(s) => s.krate(db), - CallableDefId::EnumVariantId(e) => e.krate(db), - } - } -} - -impl From<CallableDefId> for GenericDefId { - fn from(def: CallableDefId) -> GenericDefId { - match def { - CallableDefId::FunctionId(f) => f.into(), - CallableDefId::StructId(s) => s.into(), - CallableDefId::EnumVariantId(e) => e.into(), - } - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum TyDefId { BuiltinType(BuiltinType), @@ -2056,12 +2007,12 @@ pub enum ValueTyDefId { impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); impl ValueTyDefId { - pub(crate) fn to_generic_def_id(self) -> Option<GenericDefId> { + pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> Option<GenericDefId> { match self { Self::FunctionId(id) => Some(id.into()), Self::StructId(id) => Some(id.into()), Self::UnionId(id) => Some(id.into()), - Self::EnumVariantId(var) => Some(var.into()), + Self::EnumVariantId(var) => Some(var.lookup(db.upcast()).parent.into()), Self::ConstId(id) => Some(id.into()), Self::StaticId(_) => None, } @@ -2112,7 +2063,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde // returns None if def is a type arg pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty { let parent_data = db.generic_params(def.parent()); - let data = &parent_data.type_or_consts[def.local_id()]; + let data = &parent_data[def.local_id()]; let resolver = def.parent().resolver(db.upcast()); let ctx = TyLoweringContext::new(db, &resolver, def.parent().into()); match data { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index 5ce124d6d27..fad74c2448c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -127,9 +127,11 @@ pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [ TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)), ]; -pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [ +pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 4] = [ + TyFingerprint::Scalar(Scalar::Float(FloatTy::F16)), TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)), TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)), + TyFingerprint::Scalar(Scalar::Float(FloatTy::F128)), ]; type TraitFpMap = FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Box<[ImplId]>>>; @@ -1322,7 +1324,7 @@ fn iterate_inherent_methods( callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { for &impl_id in impls.for_self_ty(self_ty) { - for &item in &table.db.impl_data(impl_id).items { + for &item in table.db.impl_data(impl_id).items.iter() { let visible = match is_valid_impl_method_candidate( table, self_ty, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 4ee96a66a39..2d9c221b732 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -18,6 +18,10 @@ use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile}; use intern::Interned; use la_arena::ArenaMap; use rustc_abi::TargetDataLayout; +use rustc_apfloat::{ + ieee::{Half as f16, Quad as f128}, + Float, +}; use rustc_hash::{FxHashMap, FxHashSet}; use stdx::never; use syntax::{SyntaxNodePtr, TextRange}; @@ -55,6 +59,13 @@ macro_rules! from_bytes { Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())), })) }; + ($apfloat:tt, $bits:tt, $value:expr) => { + // FIXME(#17451): Switch to builtin `f16` and `f128` once they are stable. + $apfloat::from_bits($bits::from_le_bytes(match ($value).try_into() { + Ok(it) => it, + Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $apfloat).into())), + }).into()) + }; } macro_rules! not_supported { @@ -1110,6 +1121,10 @@ impl Evaluator<'_> { } if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) { match f { + chalk_ir::FloatTy::F16 => { + let c = -from_bytes!(f16, u16, c); + Owned(u16::try_from(c.to_bits()).unwrap().to_le_bytes().into()) + } chalk_ir::FloatTy::F32 => { let c = -from_bytes!(f32, c); Owned(c.to_le_bytes().into()) @@ -1118,6 +1133,10 @@ impl Evaluator<'_> { let c = -from_bytes!(f64, c); Owned(c.to_le_bytes().into()) } + chalk_ir::FloatTy::F128 => { + let c = -from_bytes!(f128, u128, c); + Owned(c.to_bits().to_le_bytes().into()) + } } } else { let mut c = c.to_vec(); @@ -1169,6 +1188,39 @@ impl Evaluator<'_> { } if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) { match f { + chalk_ir::FloatTy::F16 => { + let l = from_bytes!(f16, u16, lc); + let r = from_bytes!(f16, u16, rc); + match op { + BinOp::Ge + | BinOp::Gt + | BinOp::Le + | BinOp::Lt + | BinOp::Eq + | BinOp::Ne => { + let r = op.run_compare(l, r) as u8; + Owned(vec![r]) + } + BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => { + let r = match op { + BinOp::Add => l + r, + BinOp::Sub => l - r, + BinOp::Mul => l * r, + BinOp::Div => l / r, + _ => unreachable!(), + }; + Owned( + u16::try_from(r.value.to_bits()) + .unwrap() + .to_le_bytes() + .into(), + ) + } + it => not_supported!( + "invalid binop {it:?} on floating point operators" + ), + } + } chalk_ir::FloatTy::F32 => { let l = from_bytes!(f32, lc); let r = from_bytes!(f32, rc); @@ -1225,6 +1277,34 @@ impl Evaluator<'_> { ), } } + chalk_ir::FloatTy::F128 => { + let l = from_bytes!(f128, u128, lc); + let r = from_bytes!(f128, u128, rc); + match op { + BinOp::Ge + | BinOp::Gt + | BinOp::Le + | BinOp::Lt + | BinOp::Eq + | BinOp::Ne => { + let r = op.run_compare(l, r) as u8; + Owned(vec![r]) + } + BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => { + let r = match op { + BinOp::Add => l + r, + BinOp::Sub => l - r, + BinOp::Mul => l * r, + BinOp::Div => l / r, + _ => unreachable!(), + }; + Owned(r.value.to_bits().to_le_bytes().into()) + } + it => not_supported!( + "invalid binop {it:?} on floating point operators" + ), + } + } } } else { let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_))); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index 3438712049e..ce22e9d2c2c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -627,6 +627,7 @@ impl Evaluator<'_> { if let Some(name) = name.strip_prefix("atomic_") { return self.exec_atomic_intrinsic(name, args, generic_args, destination, locals, span); } + // FIXME(#17451): Add `f16` and `f128` intrinsics. if let Some(name) = name.strip_suffix("f64") { let result = match name { "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 09302846f1b..1a0a1b780a1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -19,6 +19,7 @@ use hir_def::{ }; use hir_expand::name::Name; use la_arena::ArenaMap; +use rustc_apfloat::Float; use rustc_hash::FxHashMap; use syntax::TextRange; use triomphe::Arc; @@ -183,7 +184,7 @@ impl MirLowerError { }, MirLowerError::GenericArgNotProvided(id, subst) => { let parent = id.parent; - let param = &db.generic_params(parent).type_or_consts[id.local_id]; + let param = &db.generic_params(parent)[id.local_id]; writeln!( f, "Generic arg not provided for {}", @@ -483,7 +484,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) } ValueNs::GenericParam(p) => { - let Some(def) = self.owner.as_generic_def_id() else { + let Some(def) = self.owner.as_generic_def_id(self.db.upcast()) else { not_supported!("owner without generic def id"); }; let gen = generics(self.db.upcast(), def); @@ -1330,7 +1331,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn placeholder_subst(&mut self) -> Substitution { - match self.owner.as_generic_def_id() { + match self.owner.as_generic_def_id(self.db.upcast()) { Some(it) => TyBuilder::placeholder_subst(self.db, it), None => Substitution::empty(Interner), } @@ -1432,10 +1433,14 @@ impl<'ctx> MirLowerCtx<'ctx> { hir_def::hir::Literal::Int(it, _) => Box::from(&it.to_le_bytes()[0..size()?]), hir_def::hir::Literal::Uint(it, _) => Box::from(&it.to_le_bytes()[0..size()?]), hir_def::hir::Literal::Float(f, _) => match size()? { - 8 => Box::new(f.into_f64().to_le_bytes()), - 4 => Box::new(f.into_f32().to_le_bytes()), + 16 => Box::new(f.to_f128().to_bits().to_le_bytes()), + 8 => Box::new(f.to_f64().to_le_bytes()), + 4 => Box::new(f.to_f32().to_le_bytes()), + 2 => Box::new(u16::try_from(f.to_f16().to_bits()).unwrap().to_le_bytes()), _ => { - return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes")) + return Err(MirLowerError::TypeError( + "float with size other than 2, 4, 8 or 16 bytes", + )) } }, }; @@ -2160,9 +2165,7 @@ pub fn lower_to_mir( root_expr: ExprId, ) -> Result<MirBody> { if infer.has_errors { - return Err(MirLowerError::TypeMismatch( - infer.type_mismatches().next().map(|(_, it)| it.clone()), - )); + return Err(MirLowerError::TypeMismatch(None)); } let mut ctx = MirLowerCtx::new(db, owner, body, infer); // 0 is return local diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index 43afa615048..172dea02e61 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -302,7 +302,7 @@ pub fn monomorphized_mir_body_query( subst: Substitution, trait_env: Arc<crate::TraitEnvironment>, ) -> Result<Arc<MirBody>, MirLowerError> { - let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; let body = db.mir_body(owner)?; let mut body = (*body).clone(); @@ -327,7 +327,7 @@ pub fn monomorphized_mir_body_for_closure_query( trait_env: Arc<crate::TraitEnvironment>, ) -> Result<Arc<MirBody>, MirLowerError> { let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into()); - let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; let body = db.mir_body_for_closure(closure)?; let mut body = (*body).clone(); @@ -343,7 +343,7 @@ pub fn monomorphize_mir_body_bad( trait_env: Arc<crate::TraitEnvironment>, ) -> Result<MirBody, MirLowerError> { let owner = body.owner; - let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; filler.fill_body(&mut body)?; Ok(body) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs index d7f48c69a56..a4e077ba635 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs @@ -27,8 +27,10 @@ pub fn uint_ty_to_string(ty: UintTy) -> &'static str { pub fn float_ty_to_string(ty: FloatTy) -> &'static str { match ty { + FloatTy::F16 => "f16", FloatTy::F32 => "f32", FloatTy::F64 => "f64", + FloatTy::F128 => "f128", } } @@ -56,7 +58,9 @@ pub(super) fn uint_ty_from_builtin(t: BuiltinUint) -> UintTy { pub(super) fn float_ty_from_builtin(t: BuiltinFloat) -> FloatTy { match t { + BuiltinFloat::F16 => FloatTy::F16, BuiltinFloat::F32 => FloatTy::F32, BuiltinFloat::F64 => FloatTy::F64, + BuiltinFloat::F128 => FloatTy::F128, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs index 5e040a60e29..1c1f7055efd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs @@ -111,8 +111,10 @@ fn infer_literal_pattern() { if let "foo" = any() {} if let 1 = any() {} if let 1u32 = any() {} + if let 1f16 = any() {} if let 1f32 = any() {} if let 1.0 = any() {} + if let 1f128 = any() {} if let true = any() {} } "#, @@ -121,7 +123,7 @@ fn infer_literal_pattern() { 19..26 'loop {}': ! 24..26 '{}': () 37..38 'x': &'? i32 - 46..208 '{ ...) {} }': () + 46..263 '{ ...) {} }': () 52..75 'if let...y() {}': () 55..72 'let "f... any()': bool 59..64 '"foo"': &'static str @@ -145,25 +147,39 @@ fn infer_literal_pattern() { 124..126 '{}': () 131..153 'if let...y() {}': () 134..150 'let 1f... any()': bool - 138..142 '1f32': f32 - 138..142 '1f32': f32 - 145..148 'any': fn any<f32>() -> f32 - 145..150 'any()': f32 + 138..142 '1f16': f16 + 138..142 '1f16': f16 + 145..148 'any': fn any<f16>() -> f16 + 145..150 'any()': f16 151..153 '{}': () - 158..179 'if let...y() {}': () - 161..176 'let 1.0 = any()': bool - 165..168 '1.0': f64 - 165..168 '1.0': f64 - 171..174 'any': fn any<f64>() -> f64 - 171..176 'any()': f64 - 177..179 '{}': () - 184..206 'if let...y() {}': () - 187..203 'let tr... any()': bool - 191..195 'true': bool - 191..195 'true': bool - 198..201 'any': fn any<bool>() -> bool - 198..203 'any()': bool + 158..180 'if let...y() {}': () + 161..177 'let 1f... any()': bool + 165..169 '1f32': f32 + 165..169 '1f32': f32 + 172..175 'any': fn any<f32>() -> f32 + 172..177 'any()': f32 + 178..180 '{}': () + 185..206 'if let...y() {}': () + 188..203 'let 1.0 = any()': bool + 192..195 '1.0': f64 + 192..195 '1.0': f64 + 198..201 'any': fn any<f64>() -> f64 + 198..203 'any()': f64 204..206 '{}': () + 211..234 'if let...y() {}': () + 214..231 'let 1f... any()': bool + 218..223 '1f128': f128 + 218..223 '1f128': f128 + 226..229 'any': fn any<f128>() -> f128 + 226..231 'any()': f128 + 232..234 '{}': () + 239..261 'if let...y() {}': () + 242..258 'let tr... any()': bool + 246..250 'true': bool + 246..250 'true': bool + 253..256 'any': fn any<bool>() -> bool + 253..258 'any()': bool + 259..261 '{}': () "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 3aa94be755c..aa7b00b8deb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -1999,3 +1999,45 @@ where "#, ); } + +#[test] +fn tait_async_stack_overflow_17199() { + check_types( + r#" + //- minicore: fmt, future + type Foo = impl core::fmt::Debug; + + async fn foo() -> Foo { + () + } + + async fn test() { + let t = foo().await; + // ^ impl Debug + } +"#, + ); +} + +#[test] +fn lifetime_params_move_param_defaults() { + check_types( + r#" +pub struct Thing<'s, T = u32>; + +impl <'s> Thing<'s> { + pub fn new() -> Thing<'s> { + Thing + //^^^^^ Thing<'?, u32> + } +} + +fn main() { + let scope = + //^^^^^ &'? Thing<'?, u32> + &Thing::new(); + //^^^^^^^^^^^^ Thing<'?, u32> +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index e2cd7fa26b2..d83a34298ea 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -397,8 +397,10 @@ fn infer_literals() { r##" fn test() { 5i32; + 5f16; 5f32; 5f64; + 5f128; "hello"; b"bytes"; 'c'; @@ -421,26 +423,28 @@ h"; } "##, expect![[r##" - 18..478 '{ ... }': () + 18..515 '{ ... }': () 32..36 '5i32': i32 - 50..54 '5f32': f32 - 68..72 '5f64': f64 - 86..93 '"hello"': &'static str - 107..115 'b"bytes"': &'static [u8; 5] - 129..132 ''c'': char - 146..150 'b'b'': u8 - 164..168 '3.14': f64 - 182..186 '5000': i32 - 200..205 'false': bool - 219..223 'true': bool - 237..333 'r#" ... "#': &'static str - 347..357 'br#"yolo"#': &'static [u8; 4] - 375..376 'a': &'static [u8; 4] - 379..403 'b"a\x2... c"': &'static [u8; 4] - 421..422 'b': &'static [u8; 4] - 425..433 'br"g\ h"': &'static [u8; 4] - 451..452 'c': &'static [u8; 6] - 455..467 'br#"x"\"yb"#': &'static [u8; 6] + 50..54 '5f16': f16 + 68..72 '5f32': f32 + 86..90 '5f64': f64 + 104..109 '5f128': f128 + 123..130 '"hello"': &'static str + 144..152 'b"bytes"': &'static [u8; 5] + 166..169 ''c'': char + 183..187 'b'b'': u8 + 201..205 '3.14': f64 + 219..223 '5000': i32 + 237..242 'false': bool + 256..260 'true': bool + 274..370 'r#" ... "#': &'static str + 384..394 'br#"yolo"#': &'static [u8; 4] + 412..413 'a': &'static [u8; 4] + 416..440 'b"a\x2... c"': &'static [u8; 4] + 458..459 'b': &'static [u8; 4] + 462..470 'br"g\ h"': &'static [u8; 4] + 488..489 'c': &'static [u8; 6] + 492..504 'br#"x"\"yb"#': &'static [u8; 6] "##]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index 18fc8afd183..fb07e718d10 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -4824,3 +4824,76 @@ fn foo() { "#, ) } + +#[test] +fn nested_impl_traits() { + check_infer( + r#" +//- minicore: fn +trait Foo {} + +trait Bar<T> {} + +trait Baz { + type Assoc; +} + +struct Qux<T> { + qux: T, +} + +struct S; + +impl Foo for S {} + +fn not_allowed1(f: impl Fn(impl Foo)) { + let foo = S; + f(foo); +} + +// This caused stack overflow in #17498 +fn not_allowed2(f: impl Fn(&impl Foo)) { + let foo = S; + f(&foo); +} + +fn not_allowed3(bar: impl Bar<impl Foo>) {} + +// This also caused stack overflow +fn not_allowed4(bar: impl Bar<&impl Foo>) {} + +fn allowed1(baz: impl Baz<Assoc = impl Foo>) {} + +fn allowed2<'a>(baz: impl Baz<Assoc = &'a (impl Foo + 'a)>) {} + +fn allowed3(baz: impl Baz<Assoc = Qux<impl Foo>>) {} +"#, + expect![[r#" + 139..140 'f': impl Fn({unknown}) + ?Sized + 161..193 '{ ...oo); }': () + 171..174 'foo': S + 177..178 'S': S + 184..185 'f': impl Fn({unknown}) + ?Sized + 184..190 'f(foo)': () + 186..189 'foo': S + 251..252 'f': impl Fn(&'? {unknown}) + ?Sized + 274..307 '{ ...oo); }': () + 284..287 'foo': S + 290..291 'S': S + 297..298 'f': impl Fn(&'? {unknown}) + ?Sized + 297..304 'f(&foo)': () + 299..303 '&foo': &'? S + 300..303 'foo': S + 325..328 'bar': impl Bar<{unknown}> + ?Sized + 350..352 '{}': () + 405..408 'bar': impl Bar<&'? {unknown}> + ?Sized + 431..433 '{}': () + 447..450 'baz': impl Baz<Assoc = impl Foo + ?Sized> + ?Sized + 480..482 '{}': () + 500..503 'baz': impl Baz<Assoc = &'a impl Foo + 'a + ?Sized> + ?Sized + 544..546 '{}': () + 560..563 'baz': impl Baz<Assoc = Qux<impl Foo + ?Sized>> + ?Sized + 598..600 '{}': () + "#]], + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 969999cdb84..738e8421463 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -157,8 +157,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra let generic_params = db.generic_params(trait_.into()); let trait_self = generic_params.trait_self_param(); generic_params - .where_predicates - .iter() + .where_predicates() .filter_map(|pred| match pred { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 79069ed66bf..72e79af75df 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -452,7 +452,7 @@ impl HirDisplay for TypeOrConstParam { impl HirDisplay for TypeParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { let params = f.db.generic_params(self.id.parent()); - let param_data = ¶ms.type_or_consts[self.id.local_id()]; + let param_data = ¶ms[self.id.local_id()]; let substs = TyBuilder::placeholder_subst(f.db, self.id.parent()); let krate = self.id.parent().krate(f.db).id; let ty = @@ -539,11 +539,10 @@ fn write_generic_params( f: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { let params = f.db.generic_params(def); - if params.lifetimes.is_empty() - && params.type_or_consts.iter().all(|it| it.1.const_param().is_none()) + if params.iter_lt().next().is_none() + && params.iter_type_or_consts().all(|it| it.1.const_param().is_none()) && params - .type_or_consts - .iter() + .iter_type_or_consts() .filter_map(|it| it.1.type_param()) .all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList)) { @@ -560,11 +559,11 @@ fn write_generic_params( f.write_str(", ") } }; - for (_, lifetime) in params.lifetimes.iter() { + for (_, lifetime) in params.iter_lt() { delim(f)?; write!(f, "{}", lifetime.name.display(f.db.upcast()))?; } - for (_, ty) in params.type_or_consts.iter() { + for (_, ty) in params.iter_type_or_consts() { if let Some(name) = &ty.name() { match ty { TypeOrConstParamData::TypeParamData(ty) => { @@ -612,11 +611,11 @@ fn write_where_clause( } fn has_disaplayable_predicates(params: &Interned<GenericParams>) -> bool { - params.where_predicates.iter().any(|pred| { + params.where_predicates().any(|pred| { !matches!( pred, WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. } - if params.type_or_consts[*id].name().is_none() + if params[*id].name().is_none() ) }) } @@ -631,13 +630,13 @@ fn write_where_predicates( let is_unnamed_type_target = |params: &Interned<GenericParams>, target: &WherePredicateTypeTarget| { matches!(target, - WherePredicateTypeTarget::TypeOrConstParam(id) if params.type_or_consts[*id].name().is_none() + WherePredicateTypeTarget::TypeOrConstParam(id) if params[*id].name().is_none() ) }; let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target { WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f), - WherePredicateTypeTarget::TypeOrConstParam(id) => match params.type_or_consts[*id].name() { + WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { Some(name) => write!(f, "{}", name.display(f.db.upcast())), None => f.write_str("{unnamed}"), }, @@ -653,7 +652,7 @@ fn write_where_predicates( _ => false, }; - let mut iter = params.where_predicates.iter().peekable(); + let mut iter = params.where_predicates().peekable(); while let Some(pred) = iter.next() { if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) { continue; diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs index 887227bf4d0..2ad39817b2f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs +++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs @@ -182,7 +182,6 @@ impl From<GenericDef> for GenericDefId { GenericDef::TraitAlias(it) => GenericDefId::TraitAliasId(it.id), GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), GenericDef::Impl(it) => GenericDefId::ImplId(it.id), - GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()), GenericDef::Const(it) => GenericDefId::ConstId(it.id), } } @@ -197,7 +196,6 @@ impl From<GenericDefId> for GenericDef { GenericDefId::TraitAliasId(it) => GenericDef::TraitAlias(it.into()), GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()), GenericDefId::ImplId(it) => GenericDef::Impl(it.into()), - GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()), GenericDefId::ConstId(it) => GenericDef::Const(it.into()), } } diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs index 929c8b3c09e..18e27130f37 100644 --- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs @@ -5,10 +5,10 @@ use either::Either; use hir_def::{ nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource as _}, - Lookup, MacroId, VariantId, + CallableDefId, Lookup, MacroId, VariantId, }; use hir_expand::{HirFileId, InFile}; -use hir_ty::{db::InternedClosure, CallableDefId}; +use hir_ty::db::InternedClosure; use syntax::ast; use tt::TextRange; diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index c1fe8a8b316..016f3418517 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -17,7 +17,6 @@ //! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: //! <https://www.tedinski.com/2018/02/06/system-boundaries.html>. -#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "512"] @@ -52,11 +51,11 @@ use hir_def::{ path::ImportAlias, per_ns::PerNs, resolver::{HasResolver, Resolver}, - AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, - EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, - ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, - ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, TypeOrConstParamId, - TypeParamId, UnionId, + AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId, + DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, + HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, + MacroExpander, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, + TypeOrConstParamId, TypeParamId, UnionId, }; use hir_expand::{ attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult, @@ -71,7 +70,7 @@ use hir_ty::{ mir::{interpret_mir, MutBorrowKind}, primitive::UintTy, traits::FnTrait, - AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, + AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId, WhereClause, @@ -666,7 +665,7 @@ impl Module { } let parent = impl_def.id.into(); let generic_params = db.generic_params(parent); - let lifetime_params = generic_params.lifetimes.iter().map(|(local_id, _)| { + let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| { GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id }) }); let type_params = generic_params @@ -760,7 +759,7 @@ impl Module { impl_assoc_items_scratch.clear(); } - for &item in &db.impl_data(impl_def.id).items { + for &item in db.impl_data(impl_def.id).items.iter() { AssocItem::from(item).diagnostics(db, acc, style_lints); } } @@ -1144,7 +1143,7 @@ impl Field { let generic_def_id: GenericDefId = match self.parent { VariantDef::Struct(it) => it.id.into(), VariantDef::Union(it) => it.id.into(), - VariantDef::Variant(it) => it.id.into(), + VariantDef::Variant(it) => it.id.lookup(db.upcast()).parent.into(), }; let substs = TyBuilder::placeholder_subst(db, generic_def_id); let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); @@ -1177,7 +1176,9 @@ impl Field { db.layout_of_ty( self.ty(db).ty, db.trait_environment(match hir_def::VariantId::from(self.parent) { - hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id), + hir_def::VariantId::EnumVariantId(id) => { + GenericDefId::AdtId(id.lookup(db.upcast()).parent.into()) + } hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()), hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()), }), @@ -1539,8 +1540,7 @@ impl Adt { resolver .generic_params() .and_then(|gp| { - gp.lifetimes - .iter() + gp.iter_lt() // there should only be a single lifetime // but `Arena` requires to use an iterator .nth(0) @@ -2501,9 +2501,8 @@ impl Trait { db: &dyn HirDatabase, count_required_only: bool, ) -> usize { - db.generic_params(GenericDefId::from(self.id)) - .type_or_consts - .iter() + db.generic_params(self.id.into()) + .iter_type_or_consts() .filter(|(_, ty)| !matches!(ty, TypeOrConstParamData::TypeParamData(ty) if ty.provenance != TypeParamProvenance::TypeParamList)) .filter(|(_, ty)| !count_required_only || !ty.has_default()) .count() @@ -2623,6 +2622,13 @@ impl BuiltinType { matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_)) } + pub fn is_f16(&self) -> bool { + matches!( + self.inner, + hir_def::builtin_type::BuiltinType::Float(hir_def::builtin_type::BuiltinFloat::F16) + ) + } + pub fn is_f32(&self) -> bool { matches!( self.inner, @@ -2637,6 +2643,13 @@ impl BuiltinType { ) } + pub fn is_f128(&self) -> bool { + matches!( + self.inner, + hir_def::builtin_type::BuiltinType::Float(hir_def::builtin_type::BuiltinFloat::F128) + ) + } + pub fn is_char(&self) -> bool { matches!(self.inner, hir_def::builtin_type::BuiltinType::Char) } @@ -3107,9 +3120,6 @@ pub enum GenericDef { TraitAlias(TraitAlias), TypeAlias(TypeAlias), Impl(Impl), - // enum variants cannot have generics themselves, but their parent enums - // can, and this makes some code easier to write - Variant(Variant), // consts can have type parameters from their parents (i.e. associated consts of traits) Const(Const), } @@ -3120,7 +3130,6 @@ impl_from!( TraitAlias, TypeAlias, Impl, - Variant, Const for GenericDef ); @@ -3128,7 +3137,7 @@ impl_from!( impl GenericDef { pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> { let generics = db.generic_params(self.into()); - let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| { + let ty_params = generics.iter_type_or_consts().map(|(local_id, _)| { let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } }; match toc.split(db) { Either::Left(it) => GenericParam::ConstParam(it), @@ -3145,8 +3154,7 @@ impl GenericDef { pub fn lifetime_params(self, db: &dyn HirDatabase) -> Vec<LifetimeParam> { let generics = db.generic_params(self.into()); generics - .lifetimes - .iter() + .iter_lt() .map(|(local_id, _)| LifetimeParam { id: LifetimeParamId { parent: self.into(), local_id }, }) @@ -3156,8 +3164,7 @@ impl GenericDef { pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> { let generics = db.generic_params(self.into()); generics - .type_or_consts - .iter() + .iter_type_or_consts() .map(|(local_id, _)| TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id }, }) @@ -3499,7 +3506,7 @@ impl TypeParam { /// argument)? pub fn is_implicit(self, db: &dyn HirDatabase) -> bool { let params = db.generic_params(self.id.parent()); - let data = ¶ms.type_or_consts[self.id.local_id()]; + let data = ¶ms[self.id.local_id()]; match data.type_param().unwrap().provenance { hir_def::generics::TypeParamProvenance::TypeParamList => false, hir_def::generics::TypeParamProvenance::TraitSelf @@ -3553,7 +3560,7 @@ pub struct LifetimeParam { impl LifetimeParam { pub fn name(self, db: &dyn HirDatabase) -> Name { let params = db.generic_params(self.id.parent); - params.lifetimes[self.id.local_id].name.clone() + params[self.id.local_id].name.clone() } pub fn module(self, db: &dyn HirDatabase) -> Module { @@ -3577,7 +3584,7 @@ impl ConstParam { pub fn name(self, db: &dyn HirDatabase) -> Name { let params = db.generic_params(self.id.parent()); - match params.type_or_consts[self.id.local_id()].name() { + match params[self.id.local_id()].name() { Some(it) => it.clone(), None => { never!(); @@ -3605,9 +3612,9 @@ impl ConstParam { } fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<GenericArg> { - let params = db.generic_defaults(id.parent); let local_idx = hir_ty::param_idx(db, id)?; - let ty = params.get(local_idx)?.clone(); + let defaults = db.generic_defaults(id.parent); + let ty = defaults.get(local_idx)?.clone(); let subst = TyBuilder::placeholder_subst(db, id.parent); Some(ty.substitute(Interner, &subst)) } @@ -3620,7 +3627,7 @@ pub struct TypeOrConstParam { impl TypeOrConstParam { pub fn name(self, db: &dyn HirDatabase) -> Name { let params = db.generic_params(self.id.parent); - match params.type_or_consts[self.id.local_id].name() { + match params[self.id.local_id].name() { Some(n) => n.clone(), _ => Name::missing(), } @@ -3636,7 +3643,7 @@ impl TypeOrConstParam { pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> { let params = db.generic_params(self.id.parent); - match ¶ms.type_or_consts[self.id.local_id] { + match ¶ms[self.id.local_id] { hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) }) } @@ -3655,7 +3662,7 @@ impl TypeOrConstParam { pub fn as_type_param(self, db: &dyn HirDatabase) -> Option<TypeParam> { let params = db.generic_params(self.id.parent); - match ¶ms.type_or_consts[self.id.local_id] { + match ¶ms[self.id.local_id] { hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { Some(TypeParam { id: TypeParamId::from_unchecked(self.id) }) } @@ -3665,7 +3672,7 @@ impl TypeOrConstParam { pub fn as_const_param(self, db: &dyn HirDatabase) -> Option<ConstParam> { let params = db.generic_params(self.id.parent); - match ¶ms.type_or_consts[self.id.local_id] { + match ¶ms[self.id.local_id] { hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None, hir_def::generics::TypeOrConstParamData::ConstParamData(_) => { Some(ConstParam { id: ConstParamId::from_unchecked(self.id) }) @@ -4052,7 +4059,9 @@ impl Type { ValueTyDefId::FunctionId(it) => GenericDefId::FunctionId(it), ValueTyDefId::StructId(it) => GenericDefId::AdtId(AdtId::StructId(it)), ValueTyDefId::UnionId(it) => GenericDefId::AdtId(AdtId::UnionId(it)), - ValueTyDefId::EnumVariantId(it) => GenericDefId::EnumVariantId(it), + ValueTyDefId::EnumVariantId(it) => { + GenericDefId::AdtId(AdtId::EnumId(it.lookup(db.upcast()).parent)) + } ValueTyDefId::StaticId(_) => return Type::new(db, def, ty.skip_binders().clone()), }, ); diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 8e71a54f804..81c57f6caeb 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -68,38 +68,44 @@ impl SourceAnalyzer { pub(crate) fn new_for_body( db: &dyn HirDatabase, def: DefWithBodyId, - node @ InFile { file_id, .. }: InFile<&SyntaxNode>, + node: InFile<&SyntaxNode>, offset: Option<TextSize>, ) -> SourceAnalyzer { - let (body, source_map) = db.body_with_source_map(def); - let scopes = db.expr_scopes(def); - let scope = match offset { - None => scope_for(&scopes, &source_map, node), - Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset), - }; - let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { - resolver, - def: Some((def, body, source_map)), - infer: Some(db.infer(def)), - file_id, - } + Self::new_for_body_(db, def, node, offset, Some(db.infer(def))) } pub(crate) fn new_for_body_no_infer( db: &dyn HirDatabase, def: DefWithBodyId, + node: InFile<&SyntaxNode>, + offset: Option<TextSize>, + ) -> SourceAnalyzer { + Self::new_for_body_(db, def, node, offset, None) + } + + pub(crate) fn new_for_body_( + db: &dyn HirDatabase, + def: DefWithBodyId, node @ InFile { file_id, .. }: InFile<&SyntaxNode>, offset: Option<TextSize>, + infer: Option<Arc<InferenceResult>>, ) -> SourceAnalyzer { let (body, source_map) = db.body_with_source_map(def); let scopes = db.expr_scopes(def); let scope = match offset { - None => scope_for(&scopes, &source_map, node), - Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset), + None => scope_for(db, &scopes, &source_map, node), + Some(offset) => { + debug_assert!( + node.text_range().contains_inclusive(offset), + "{:?} not in {:?}", + offset, + node.text_range() + ); + scope_for_offset(db, &scopes, &source_map, node.file_id, offset) + } }; let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id } + SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer, file_id } } pub(crate) fn new_for_resolver( @@ -662,7 +668,6 @@ impl SourceAnalyzer { return resolved; } - // This must be a normal source file rather than macro file. let ctx = LowerCtx::new(db.upcast(), self.file_id); let hir_path = Path::from_src(&ctx, path.clone())?; @@ -955,14 +960,15 @@ impl SourceAnalyzer { } fn scope_for( + db: &dyn HirDatabase, scopes: &ExprScopes, source_map: &BodySourceMap, node: InFile<&SyntaxNode>, ) -> Option<ScopeId> { - node.value - .ancestors() - .filter_map(ast::Expr::cast) - .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it))) + node.ancestors_with_macros(db.upcast()) + .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind())) + .filter_map(|it| it.map(ast::Expr::cast).transpose()) + .filter_map(|it| source_map.node_expr(it.as_ref())) .find_map(|it| scopes.scope_for(it)) } @@ -988,8 +994,8 @@ fn scope_for_offset( Some(it.file_id.macro_file()?.call_node(db.upcast())) }) .find(|it| it.file_id == from_file) - .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?; - Some((source.value.text_range(), scope)) + .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?; + Some((source.text_range(), scope)) }) .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end()) // find containing scope diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index 3b88836c24b..02905ca2ce4 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -231,7 +231,7 @@ impl<'a> SymbolCollector<'a> { let impl_data = self.db.impl_data(impl_id); let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string())); self.with_container_name(impl_name, |s| { - for &assoc_item_id in &impl_data.items { + for &assoc_item_id in impl_data.items.iter() { s.push_assoc_item(assoc_item_id) } }) diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search.rs b/src/tools/rust-analyzer/crates/hir/src/term_search.rs index aa046b02e2e..6f845137084 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search.rs @@ -93,12 +93,6 @@ struct LookupTable { data: FxHashMap<Type, AlternativeExprs>, /// New types reached since last query by the `NewTypesKey` new_types: FxHashMap<NewTypesKey, Vec<Type>>, - /// ScopeDefs that are not interesting any more - exhausted_scopedefs: FxHashSet<ScopeDef>, - /// ScopeDefs that were used in current round - round_scopedef_hits: FxHashSet<ScopeDef>, - /// Amount of rounds since scopedef was first used. - rounds_since_sopedef_hit: FxHashMap<ScopeDef, u32>, /// Types queried but not present types_wishlist: FxHashSet<Type>, /// Threshold to squash trees to `Many` @@ -212,37 +206,6 @@ impl LookupTable { } } - /// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more - fn mark_exhausted(&mut self, def: ScopeDef) { - self.exhausted_scopedefs.insert(def); - } - - /// Mark `ScopeDef` as used meaning we managed to produce something useful from it - fn mark_fulfilled(&mut self, def: ScopeDef) { - self.round_scopedef_hits.insert(def); - } - - /// Start new round (meant to be called at the beginning of iteration in `term_search`) - /// - /// This functions marks some `ScopeDef`s as exhausted if there have been - /// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`. - fn new_round(&mut self) { - for def in &self.round_scopedef_hits { - let hits = - self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0); - const MAX_ROUNDS_AFTER_HIT: u32 = 2; - if *hits > MAX_ROUNDS_AFTER_HIT { - self.exhausted_scopedefs.insert(*def); - } - } - self.round_scopedef_hits.clear(); - } - - /// Get exhausted `ScopeDef`s - fn exhausted_scopedefs(&self) -> &FxHashSet<ScopeDef> { - &self.exhausted_scopedefs - } - /// Types queried but not found fn types_wishlist(&mut self) -> &FxHashSet<Type> { &self.types_wishlist @@ -275,7 +238,7 @@ pub struct TermSearchConfig { impl Default for TermSearchConfig { fn default() -> Self { - Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 400 } + Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 1200 } } } @@ -328,19 +291,12 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> { solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup)); while should_continue() { - lookup.new_round(); - solutions.extend(tactics::data_constructor(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup, should_continue)); - - // Discard not interesting `ScopeDef`s for speedup - for def in lookup.exhausted_scopedefs() { - defs.remove(def); - } } solutions.into_iter().filter(|it| !it.is_many()).unique().collect() diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs index bb687f5e73d..0c8f6932c71 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs @@ -9,8 +9,8 @@ use hir_ty::{ use itertools::Itertools; use crate::{ - Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, GenericDef, Local, - ModuleDef, SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, + Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, Local, ModuleDef, + SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, }; /// Helper function to get path to `ModuleDef` @@ -35,43 +35,6 @@ fn mod_item_path_str( .ok_or(DisplaySourceCodeError::PathNotFound) } -/// Helper function to get path to `Type` -fn type_path( - sema_scope: &SemanticsScope<'_>, - ty: &Type, - cfg: ImportPathConfig, -) -> Result<String, DisplaySourceCodeError> { - let db = sema_scope.db; - let m = sema_scope.module(); - - match ty.as_adt() { - Some(adt) => { - let ty_name = ty.display_source_code(db, m.id, true)?; - - let mut path = mod_item_path(sema_scope, &ModuleDef::Adt(adt), cfg).unwrap(); - path.pop_segment(); - let path = path.display(db.upcast()).to_string(); - let res = match path.is_empty() { - true => ty_name, - false => format!("{path}::{ty_name}"), - }; - Ok(res) - } - None => ty.display_source_code(db, m.id, true), - } -} - -/// Helper function to filter out generic parameters that are default -fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec<Type> { - def.type_or_const_params(db) - .into_iter() - .filter_map(|it| it.as_type_param(db)) - .zip(generics) - .filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg)) - .map(|(_, arg)| arg.clone()) - .collect() -} - /// Type tree shows how can we get from set of types to some type. /// /// Consider the following code as an example @@ -208,20 +171,7 @@ impl Expr { None => Ok(format!("{target_str}.{func_name}({args})")), } } - Expr::Variant { variant, generics, params } => { - let generics = non_default_generics(db, (*variant).into(), generics); - let generics_str = match generics.is_empty() { - true => String::new(), - false => { - let generics = generics - .iter() - .map(|it| type_path(sema_scope, it, cfg)) - .collect::<Result<Vec<String>, DisplaySourceCodeError>>()? - .into_iter() - .join(", "); - format!("::<{generics}>") - } - }; + Expr::Variant { variant, params, .. } => { let inner = match variant.kind(db) { StructKind::Tuple => { let args = params @@ -230,7 +180,7 @@ impl Expr { .collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .into_iter() .join(", "); - format!("{generics_str}({args})") + format!("({args})") } StructKind::Record => { let fields = variant.fields(db); @@ -248,16 +198,15 @@ impl Expr { .collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .into_iter() .join(", "); - format!("{generics_str}{{ {args} }}") + format!("{{ {args} }}") } - StructKind::Unit => generics_str, + StructKind::Unit => String::new(), }; let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?; Ok(format!("{prefix}{inner}")) } - Expr::Struct { strukt, generics, params } => { - let generics = non_default_generics(db, (*strukt).into(), generics); + Expr::Struct { strukt, params, .. } => { let inner = match strukt.kind(db) { StructKind::Tuple => { let args = params @@ -286,18 +235,7 @@ impl Expr { .join(", "); format!(" {{ {args} }}") } - StructKind::Unit => match generics.is_empty() { - true => String::new(), - false => { - let generics = generics - .iter() - .map(|it| type_path(sema_scope, it, cfg)) - .collect::<Result<Vec<String>, DisplaySourceCodeError>>()? - .into_iter() - .join(", "); - format!("::<{generics}>") - } - }, + StructKind::Unit => String::new(), }; let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?; diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs index b738e6af77b..1b0e6f8bd5b 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs @@ -17,11 +17,11 @@ use itertools::Itertools; use rustc_hash::FxHashSet; use crate::{ - Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, - TypeParam, Variant, + Adt, AssocItem, GenericDef, GenericParam, HasAttrs, HasVisibility, Impl, ModuleDef, ScopeDef, + Type, TypeParam, }; -use crate::term_search::{Expr, TermSearchConfig}; +use crate::term_search::Expr; use super::{LookupTable, NewTypesKey, TermSearchCtx}; @@ -74,8 +74,6 @@ pub(super) fn trivial<'a, DB: HirDatabase>( _ => None, }?; - lookup.mark_exhausted(*def); - let ty = expr.ty(db); lookup.insert(ty.clone(), std::iter::once(expr.clone())); @@ -124,6 +122,10 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>( .filter(move |it| it.is_visible_from(db, module)) .filter_map(AssocItem::as_const) .filter_map(|it| { + if it.attrs(db).is_unstable() { + return None; + } + let expr = Expr::Const(it); let ty = it.ty(db); @@ -151,212 +153,101 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>( /// * `should_continue` - Function that indicates when to stop iterating pub(super) fn data_constructor<'a, DB: HirDatabase>( ctx: &'a TermSearchCtx<'a, DB>, - defs: &'a FxHashSet<ScopeDef>, + _defs: &'a FxHashSet<ScopeDef>, lookup: &'a mut LookupTable, should_continue: &'a dyn std::ops::Fn() -> bool, ) -> impl Iterator<Item = Expr> + 'a { let db = ctx.sema.db; let module = ctx.scope.module(); - fn variant_helper( - db: &dyn HirDatabase, - lookup: &mut LookupTable, - should_continue: &dyn std::ops::Fn() -> bool, - parent_enum: Enum, - variant: Variant, - config: &TermSearchConfig, - ) -> Vec<(Type, Vec<Expr>)> { - // Ignore unstable - if variant.is_unstable(db) { - return Vec::new(); - } - - let generics = GenericDef::from(variant.parent_enum(db)); - let Some(type_params) = generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::<Option<Vec<TypeParam>>>() - else { - // Ignore enums with const generics - return Vec::new(); - }; - - // We currently do not check lifetime bounds so ignore all types that have something to do - // with them - if !generics.lifetime_params(db).is_empty() { - return Vec::new(); - } + lookup + .types_wishlist() + .clone() + .into_iter() + .chain(iter::once(ctx.goal.clone())) + .filter_map(|ty| ty.as_adt().map(|adt| (adt, ty))) + .filter(|_| should_continue()) + .filter_map(move |(adt, ty)| match adt { + Adt::Struct(strukt) => { + // Ignore unstable or not visible + if strukt.is_unstable(db) || !strukt.is_visible_from(db, module) { + return None; + } - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box<T, #[unstable] A: Allocator>` - if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { - return Vec::new(); - } + let generics = GenericDef::from(strukt); - let non_default_type_params_len = - type_params.iter().filter(|it| it.default(db).is_none()).count(); - - let enum_ty_shallow = Adt::from(parent_enum).ty(db); - let generic_params = lookup - .types_wishlist() - .clone() - .into_iter() - .filter(|ty| ty.could_unify_with(db, &enum_ty_shallow)) - .map(|it| it.type_arguments().collect::<Vec<Type>>()) - .chain((non_default_type_params_len == 0).then_some(Vec::new())); - - generic_params - .filter(|_| should_continue()) - .filter_map(move |generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = type_params - .iter() - .map(|it| it.default(db).or_else(|| g.next())) - .collect::<Option<_>>()?; + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return None; + } - let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned()); + if ty.contains_unknown() { + return None; + } // Ignore types that have something to do with lifetimes - if config.enable_borrowcheck && enum_ty.contains_reference(db) { + if ctx.config.enable_borrowcheck && ty.contains_reference(db) { return None; } + let fields = strukt.fields(db); + // Check if all fields are visible, otherwise we cannot fill them + if fields.iter().any(|it| !it.is_visible_from(db, module)) { + return None; + } + + let generics: Vec<_> = ty.type_arguments().collect(); // Early exit if some param cannot be filled from lookup - let param_exprs: Vec<Vec<Expr>> = variant - .fields(db) + let param_exprs: Vec<Vec<Expr>> = fields .into_iter() .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))) .collect::<Option<_>>()?; // Note that we need special case for 0 param constructors because of multi cartesian // product - let variant_exprs: Vec<Expr> = if param_exprs.is_empty() { - vec![Expr::Variant { variant, generics, params: Vec::new() }] + let exprs: Vec<Expr> = if param_exprs.is_empty() { + vec![Expr::Struct { strukt, generics, params: Vec::new() }] } else { param_exprs .into_iter() .multi_cartesian_product() - .map(|params| Expr::Variant { variant, generics: generics.clone(), params }) + .map(|params| Expr::Struct { strukt, generics: generics.clone(), params }) .collect() }; - lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned()); - - Some((enum_ty, variant_exprs)) - }) - .collect() - } - defs.iter() - .filter_map(move |def| match def { - ScopeDef::ModuleDef(ModuleDef::Variant(it)) => { - let variant_exprs = variant_helper( - db, - lookup, - should_continue, - it.parent_enum(db), - *it, - &ctx.config, - ); - if variant_exprs.is_empty() { - return None; - } - if GenericDef::from(it.parent_enum(db)) - .type_or_const_params(db) - .into_iter() - .filter_map(|it| it.as_type_param(db)) - .all(|it| it.default(db).is_some()) - { - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it))); - } - Some(variant_exprs) - } - ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => { - let exprs: Vec<(Type, Vec<Expr>)> = enum_ - .variants(db) - .into_iter() - .flat_map(|it| { - variant_helper(db, lookup, should_continue, *enum_, it, &ctx.config) - }) - .collect(); - - if exprs.is_empty() { - return None; - } - if GenericDef::from(*enum_) - .type_or_const_params(db) - .into_iter() - .filter_map(|it| it.as_type_param(db)) - .all(|it| it.default(db).is_some()) - { - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_)))); - } - - Some(exprs) + lookup.insert(ty.clone(), exprs.iter().cloned()); + Some((ty, exprs)) } - ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => { - // Ignore unstable and not visible - if it.is_unstable(db) || !it.is_visible_from(db, module) { + Adt::Enum(enum_) => { + // Ignore unstable or not visible + if enum_.is_unstable(db) || !enum_.is_visible_from(db, module) { return None; } - let generics = GenericDef::from(*it); - - // Ignore const params for now - let type_params = generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::<Option<Vec<TypeParam>>>()?; - + let generics = GenericDef::from(enum_); // We currently do not check lifetime bounds so ignore all types that have something to do // with them if !generics.lifetime_params(db).is_empty() { return None; } - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box<T, #[unstable] A: Allocator>` - if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + if ty.contains_unknown() { return None; } - let non_default_type_params_len = - type_params.iter().filter(|it| it.default(db).is_none()).count(); + // Ignore types that have something to do with lifetimes + if ctx.config.enable_borrowcheck && ty.contains_reference(db) { + return None; + } - let struct_ty_shallow = Adt::from(*it).ty(db); - let generic_params = lookup - .types_wishlist() - .clone() + let generics: Vec<_> = ty.type_arguments().collect(); + let exprs = enum_ + .variants(db) .into_iter() - .filter(|ty| ty.could_unify_with(db, &struct_ty_shallow)) - .map(|it| it.type_arguments().collect::<Vec<Type>>()) - .chain((non_default_type_params_len == 0).then_some(Vec::new())); - - let exprs = generic_params - .filter(|_| should_continue()) - .filter_map(|generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = type_params - .iter() - .map(|it| it.default(db).or_else(|| g.next())) - .collect::<Option<_>>()?; - - let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned()); - - // Ignore types that have something to do with lifetimes - if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { - return None; - } - let fields = it.fields(db); - // Check if all fields are visible, otherwise we cannot fill them - if fields.iter().any(|it| !it.is_visible_from(db, module)) { - return None; - } - + .filter_map(|variant| { // Early exit if some param cannot be filled from lookup - let param_exprs: Vec<Vec<Expr>> = fields + let param_exprs: Vec<Vec<Expr>> = variant + .fields(db) .into_iter() .map(|field| { lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())) @@ -365,36 +256,33 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>( // Note that we need special case for 0 param constructors because of multi cartesian // product - let struct_exprs: Vec<Expr> = if param_exprs.is_empty() { - vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }] + let variant_exprs: Vec<Expr> = if param_exprs.is_empty() { + vec![Expr::Variant { + variant, + generics: generics.clone(), + params: Vec::new(), + }] } else { param_exprs .into_iter() .multi_cartesian_product() - .map(|params| Expr::Struct { - strukt: *it, + .map(|params| Expr::Variant { + variant, generics: generics.clone(), params, }) .collect() }; - - if non_default_type_params_len == 0 { - // Fulfilled only if there are no generic parameters - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt( - Adt::Struct(*it), - ))); - } - lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned()); - - Some((struct_ty, struct_exprs)) + lookup.insert(ty.clone(), variant_exprs.iter().cloned()); + Some(variant_exprs) }) + .flatten() .collect(); - Some(exprs) + + Some((ty, exprs)) } - _ => None, + Adt::Union(_) => None, }) - .flatten() .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .flatten() } @@ -515,7 +403,6 @@ pub(super) fn free_function<'a, DB: HirDatabase>( .collect() }; - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it))); lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); Some((ret_ty, fn_exprs)) }) @@ -555,6 +442,8 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( lookup .new_types(NewTypesKey::ImplMethod) .into_iter() + .filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown())) + .filter(|_| should_continue()) .flat_map(|ty| { Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) }) @@ -563,26 +452,15 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( AssocItem::Function(f) => Some((imp, ty, f)), _ => None, }) + .filter(|_| should_continue()) .filter_map(move |(imp, ty, it)| { let fn_generics = GenericDef::from(it); let imp_generics = GenericDef::from(imp); - // Ignore const params for now - let imp_type_params = imp_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::<Option<Vec<TypeParam>>>()?; - - // Ignore const params for now - let fn_type_params = fn_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::<Option<Vec<TypeParam>>>()?; - // Ignore all functions that have something to do with lifetimes as we don't check them - if !fn_generics.lifetime_params(db).is_empty() { + if !fn_generics.lifetime_params(db).is_empty() + || !imp_generics.lifetime_params(db).is_empty() + { return None; } @@ -596,112 +474,59 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( return None; } - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box<T, #[unstable] A: Allocator>` - if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) - || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) - { + // Ignore functions with generics for now as they kill the performance + // Also checking bounds for generics is problematic + if !fn_generics.type_or_const_params(db).is_empty() { return None; } - // Double check that we have fully known type - if ty.type_arguments().any(|it| it.contains_unknown()) { + let ret_ty = it.ret_type_with_args(db, ty.type_arguments()); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr() + { return None; } - let non_default_fn_type_params_len = - fn_type_params.iter().filter(|it| it.default(db).is_none()).count(); - - // Ignore functions with generics for now as they kill the performance - // Also checking bounds for generics is problematic - if non_default_fn_type_params_len > 0 { + // Ignore functions that do not change the type + if ty.could_unify_with_deeply(db, &ret_ty) { return None; } - let generic_params = lookup - .iter_types() - .collect::<Vec<_>>() // Force take ownership - .into_iter() - .permutations(non_default_fn_type_params_len); + let self_ty = + it.self_param(db).expect("No self param").ty_with_args(db, ty.type_arguments()); - let exprs: Vec<_> = generic_params - .filter(|_| should_continue()) - .filter_map(|generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = ty - .type_arguments() - .map(Some) - .chain(fn_type_params.iter().map(|it| match it.default(db) { - Some(ty) => Some(ty), - None => { - let generic = g.next().expect("Missing type param"); - // Filter out generics that do not unify due to trait bounds - it.ty(db).could_unify_with(db, &generic).then_some(generic) - } - })) - .collect::<Option<_>>()?; - - let ret_ty = it.ret_type_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ); - // Filter out functions that return references - if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) - || ret_ty.is_raw_ptr() - { - return None; - } + // Ignore functions that have different self type + if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { + return None; + } - // Ignore functions that do not change the type - if ty.could_unify_with_deeply(db, &ret_ty) { - return None; - } + let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup"); - let self_ty = it - .self_param(db) - .expect("No self param") - .ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned())); + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec<Vec<Expr>> = it + .params_without_self_with_args(db, ty.type_arguments()) + .into_iter() + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::<Option<_>>()?; - // Ignore functions that have different self type - if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { - return None; + let generics: Vec<_> = ty.type_arguments().collect(); + let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs) + .chain(param_exprs) + .multi_cartesian_product() + .map(|params| { + let mut params = params.into_iter(); + let target = Box::new(params.next().unwrap()); + Expr::Method { + func: it, + generics: generics.clone(), + target, + params: params.collect(), } - - let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup"); - - // Early exit if some param cannot be filled from lookup - let param_exprs: Vec<Vec<Expr>> = it - .params_without_self_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ) - .into_iter() - .map(|field| lookup.find_autoref(db, field.ty())) - .collect::<Option<_>>()?; - - let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs) - .chain(param_exprs) - .multi_cartesian_product() - .map(|params| { - let mut params = params.into_iter(); - let target = Box::new(params.next().unwrap()); - Expr::Method { - func: it, - generics: generics.clone(), - target, - params: params.collect(), - } - }) - .collect(); - - lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); - Some((ret_ty, fn_exprs)) }) .collect(); - Some(exprs) + + Some((ret_ty, fn_exprs)) }) - .flatten() .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .flatten() } @@ -773,9 +598,8 @@ pub(super) fn famous_types<'a, DB: HirDatabase>( Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" }, ] .into_iter() - .map(|exprs| { + .inspect(|exprs| { lookup.insert(exprs.ty(db), std::iter::once(exprs.clone())); - exprs }) .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal)) } @@ -805,6 +629,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( .clone() .into_iter() .chain(iter::once(ctx.goal.clone())) + .filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown())) .filter(|_| should_continue()) .flat_map(|ty| { Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) @@ -815,24 +640,11 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( AssocItem::Function(f) => Some((imp, ty, f)), _ => None, }) + .filter(|_| should_continue()) .filter_map(move |(imp, ty, it)| { let fn_generics = GenericDef::from(it); let imp_generics = GenericDef::from(imp); - // Ignore const params for now - let imp_type_params = imp_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::<Option<Vec<TypeParam>>>()?; - - // Ignore const params for now - let fn_type_params = fn_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::<Option<Vec<TypeParam>>>()?; - // Ignore all functions that have something to do with lifetimes as we don't check them if !fn_generics.lifetime_params(db).is_empty() || !imp_generics.lifetime_params(db).is_empty() @@ -850,104 +662,43 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( return None; } - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box<T, #[unstable] A: Allocator>` - if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) - || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) - { - return None; - } - - // Double check that we have fully known type - if ty.type_arguments().any(|it| it.contains_unknown()) { + // Ignore functions with generics for now as they kill the performance + // Also checking bounds for generics is problematic + if !fn_generics.type_or_const_params(db).is_empty() { return None; } - let non_default_fn_type_params_len = - fn_type_params.iter().filter(|it| it.default(db).is_none()).count(); - - // Ignore functions with generics for now as they kill the performance - // Also checking bounds for generics is problematic - if non_default_fn_type_params_len > 0 { + let ret_ty = it.ret_type_with_args(db, ty.type_arguments()); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr() + { return None; } - let generic_params = lookup - .iter_types() - .collect::<Vec<_>>() // Force take ownership + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec<Vec<Expr>> = it + .params_without_self_with_args(db, ty.type_arguments()) .into_iter() - .permutations(non_default_fn_type_params_len); - - let exprs: Vec<_> = generic_params - .filter(|_| should_continue()) - .filter_map(|generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = ty - .type_arguments() - .map(Some) - .chain(fn_type_params.iter().map(|it| match it.default(db) { - Some(ty) => Some(ty), - None => { - let generic = g.next().expect("Missing type param"); - it.trait_bounds(db) - .into_iter() - .all(|bound| generic.impls_trait(db, bound, &[])); - // Filter out generics that do not unify due to trait bounds - it.ty(db).could_unify_with(db, &generic).then_some(generic) - } - })) - .collect::<Option<_>>()?; - - let ret_ty = it.ret_type_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ); - // Filter out functions that return references - if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) - || ret_ty.is_raw_ptr() - { - return None; - } + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::<Option<_>>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let generics = ty.type_arguments().collect(); + let fn_exprs: Vec<Expr> = if param_exprs.is_empty() { + vec![Expr::Function { func: it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { func: it, generics: generics.clone(), params }) + .collect() + }; - // Ignore functions that do not change the type - // if ty.could_unify_with_deeply(db, &ret_ty) { - // return None; - // } - - // Early exit if some param cannot be filled from lookup - let param_exprs: Vec<Vec<Expr>> = it - .params_without_self_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ) - .into_iter() - .map(|field| lookup.find_autoref(db, field.ty())) - .collect::<Option<_>>()?; - - // Note that we need special case for 0 param constructors because of multi cartesian - // product - let fn_exprs: Vec<Expr> = if param_exprs.is_empty() { - vec![Expr::Function { func: it, generics, params: Vec::new() }] - } else { - param_exprs - .into_iter() - .multi_cartesian_product() - .map(|params| Expr::Function { - func: it, - generics: generics.clone(), - params, - }) - .collect() - }; + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); - lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); - Some((ret_ty, fn_exprs)) - }) - .collect(); - Some(exprs) + Some((ret_ty, fn_exprs)) }) - .flatten() .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .flatten() } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs index 5d76cb04323..f1de6aba05b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs @@ -15,6 +15,8 @@ pub struct AssistConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub prefer_prelude: bool, + pub prefer_absolute: bool, pub assist_emit_must_use: bool, pub term_search_fuel: u64, + pub term_search_borrowck: bool, } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 22a4674fd46..4eb29a2378a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -74,6 +74,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let module = ctx.sema.scope(expr.syntax())?.module(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs index 363aa142b25..327709b28a3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs @@ -1,7 +1,7 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ - ast::{self, make, HasArgList}, + ast::{self, make, HasArgList, HasGenericArgs}, ted, AstNode, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs index fe895eb2598..f17635972b7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs @@ -93,6 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs index c95e24693d4..ab25e0167bf 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -1,3 +1,4 @@ +use either::Either; use hir::{ImportPathConfig, ModuleDef}; use ide_db::{ assists::{AssistId, AssistKind}, @@ -76,7 +77,11 @@ pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let usages = definition.usages(&ctx.sema).all(); add_enum_def(edit, ctx, &usages, target_node, &target_module); - replace_usages(edit, ctx, usages, definition, &target_module); + let mut delayed_mutations = Vec::new(); + replace_usages(edit, ctx, usages, definition, &target_module, &mut delayed_mutations); + for (scope, path) in delayed_mutations { + insert_use(&scope, path, &ctx.config.insert_use); + } }, ) } @@ -91,29 +96,32 @@ struct BoolNodeData { /// Attempts to find an appropriate node to apply the action to. fn find_bool_node(ctx: &AssistContext<'_>) -> Option<BoolNodeData> { - let name: ast::Name = ctx.find_node_at_offset()?; - - if let Some(let_stmt) = name.syntax().ancestors().find_map(ast::LetStmt::cast) { - let bind_pat = match let_stmt.pat()? { - ast::Pat::IdentPat(pat) => pat, - _ => { - cov_mark::hit!(not_applicable_in_non_ident_pat); - return None; - } - }; - let def = ctx.sema.to_def(&bind_pat)?; + let name = ctx.find_node_at_offset::<ast::Name>()?; + + if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) { + let def = ctx.sema.to_def(&ident_pat)?; if !def.ty(ctx.db()).is_bool() { cov_mark::hit!(not_applicable_non_bool_local); return None; } - Some(BoolNodeData { - target_node: let_stmt.syntax().clone(), - name, - ty_annotation: let_stmt.ty(), - initializer: let_stmt.initializer(), - definition: Definition::Local(def), - }) + let local_definition = Definition::Local(def); + match ident_pat.syntax().parent().and_then(Either::<ast::Param, ast::LetStmt>::cast)? { + Either::Left(param) => Some(BoolNodeData { + target_node: param.syntax().clone(), + name, + ty_annotation: param.ty(), + initializer: None, + definition: local_definition, + }), + Either::Right(let_stmt) => Some(BoolNodeData { + target_node: let_stmt.syntax().clone(), + name, + ty_annotation: let_stmt.ty(), + initializer: let_stmt.initializer(), + definition: local_definition, + }), + } } else if let Some(const_) = name.syntax().parent().and_then(ast::Const::cast) { let def = ctx.sema.to_def(&const_)?; if !def.ty(ctx.db()).is_bool() { @@ -197,6 +205,7 @@ fn replace_usages( usages: UsageSearchResult, target_definition: Definition, target_module: &hir::Module, + delayed_mutations: &mut Vec<(ImportScope, ast::Path)>, ) { for (file_id, references) in usages { edit.edit_file(file_id); @@ -217,6 +226,7 @@ fn replace_usages( def.usages(&ctx.sema).all(), target_definition, target_module, + delayed_mutations, ) } } else if let Some(initializer) = find_assignment_usage(&name) { @@ -255,6 +265,7 @@ fn replace_usages( def.usages(&ctx.sema).all(), target_definition, target_module, + delayed_mutations, ) } } @@ -306,7 +317,7 @@ fn replace_usages( ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)), }; - insert_use(&scope, path, &ctx.config.insert_use); + delayed_mutations.push((scope, path)); } }, ) @@ -329,6 +340,7 @@ fn augment_references_with_imports( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; references @@ -449,7 +461,20 @@ fn add_enum_def( usages: &UsageSearchResult, target_node: SyntaxNode, target_module: &hir::Module, -) { +) -> Option<()> { + let insert_before = node_to_insert_before(target_node); + + if ctx + .sema + .scope(&insert_before)? + .module() + .scope(ctx.db(), Some(*target_module)) + .iter() + .any(|(name, _)| name.as_str() == Some("Bool")) + { + return None; + } + let make_enum_pub = usages .iter() .flat_map(|(_, refs)| refs) @@ -460,7 +485,6 @@ fn add_enum_def( .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); let enum_def = make_bool_enum(make_enum_pub); - let insert_before = node_to_insert_before(target_node); let indent = IndentLevel::from_node(&insert_before); enum_def.reindent_to(indent); @@ -468,6 +492,8 @@ fn add_enum_def( insert_before.text_range().start(), format!("{}\n\n{indent}", enum_def.syntax().text()), ); + + Some(()) } /// Finds where to put the new enum definition. @@ -518,6 +544,125 @@ mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; #[test] + fn parameter_with_first_param_usage() { + check_assist( + bool_to_enum, + r#" +fn function($0foo: bool, bar: bool) { + if foo { + println!("foo"); + } +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: Bool, bar: bool) { + if foo == Bool::True { + println!("foo"); + } +} +"#, + ) + } + + #[test] + fn no_duplicate_enums() { + check_assist( + bool_to_enum, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: bool, $0bar: bool) { + if bar { + println!("bar"); + } +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: bool, bar: Bool) { + if bar == Bool::True { + println!("bar"); + } +} +"#, + ) + } + + #[test] + fn parameter_with_last_param_usage() { + check_assist( + bool_to_enum, + r#" +fn function(foo: bool, $0bar: bool) { + if bar { + println!("bar"); + } +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: bool, bar: Bool) { + if bar == Bool::True { + println!("bar"); + } +} +"#, + ) + } + + #[test] + fn parameter_with_middle_param_usage() { + check_assist( + bool_to_enum, + r#" +fn function(foo: bool, $0bar: bool, baz: bool) { + if bar { + println!("bar"); + } +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: bool, bar: Bool, baz: bool) { + if bar == Bool::True { + println!("bar"); + } +} +"#, + ) + } + + #[test] + fn parameter_with_closure_usage() { + check_assist( + bool_to_enum, + r#" +fn main() { + let foo = |$0bar: bool| bar; +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn main() { + let foo = |bar: Bool| bar == Bool::True; +} +"#, + ) + } + + #[test] fn local_variable_with_usage() { check_assist( bool_to_enum, @@ -784,7 +929,6 @@ fn main() { #[test] fn local_variable_non_ident_pat() { - cov_mark::check!(not_applicable_in_non_ident_pat); check_assist_not_applicable( bool_to_enum, r#" diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs index 5459bd334c8..67c72a93dad 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs @@ -1,7 +1,7 @@ use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; use itertools::Itertools; use syntax::{ - ast::{self, make, AstNode, HasName}, + ast::{self, make, AstNode, HasGenericArgs, HasName}, ted, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs index be433c33338..5349e86cf38 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -1,6 +1,6 @@ use hir::ImportPathConfig; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait}; -use syntax::ast::{self, AstNode, HasName}; +use syntax::ast::{self, AstNode, HasGenericArgs, HasName}; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -47,6 +47,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let src_type_path = { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 241fc3b7a37..c55ff24ae38 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -186,6 +186,7 @@ fn augment_references_with_imports( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; references diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs index 76188715524..666e1a1496e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -90,6 +90,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let module = ctx.sema.scope(ident_pat.syntax())?.module(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index de447c1e338..20c37f92337 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -216,6 +216,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index 3c6d73b62e7..54323e2928e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -393,6 +393,7 @@ fn process_references( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ); if let Some(mut mod_path) = mod_path { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs index 3612eda7847..dcf16e89b2c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -1,7 +1,7 @@ use either::Either; use ide_db::syntax_helpers::node_ext::walk_ty; use syntax::{ - ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName}, + ast::{self, edit::IndentLevel, make, AstNode, HasGenericArgs, HasGenericParams, HasName}, ted, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 19521b8a4b7..78def51a4a9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -17,8 +17,9 @@ use syntax::{ self, edit::{self, AstNodeEdit}, edit_in_place::AttrsOwnerEdit, - make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericParams, HasName, - HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred, + make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericArgs, + HasGenericParams, HasName, HasTypeBounds, HasVisibility as astHasVisibility, Path, + WherePred, }, ted::{self, Position}, AstNode, NodeOrToken, SmolStr, SyntaxKind, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs index 9a441fc5ebc..cc33439dd59 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs @@ -64,6 +64,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; @@ -111,6 +112,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs index 38b24fd19ca..51dd4884547 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -4,7 +4,7 @@ use itertools::Itertools; use stdx::{format_to, to_lower_snake_case}; use syntax::{ algo::skip_whitespace_token, - ast::{self, edit::IndentLevel, HasDocComments, HasName}, + ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName}, match_ast, AstNode, AstToken, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 52007e0e297..6056c808880 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -65,6 +65,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index 88fa6dc745e..8c9fe23bb0b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -15,7 +15,9 @@ use ide_db::{ }; use itertools::{izip, Itertools}; use syntax::{ - ast::{self, edit::IndentLevel, edit_in_place::Indent, HasArgList, Pat, PathExpr}, + ast::{ + self, edit::IndentLevel, edit_in_place::Indent, HasArgList, HasGenericArgs, Pat, PathExpr, + }, ted, AstNode, NodeOrToken, SyntaxKind, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs index 797c5c06533..7f751c93e48 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs @@ -490,6 +490,25 @@ use foo::bar; } #[test] + fn test_merge_nested_empty_and_self_with_other() { + check_assist( + merge_imports, + r" +use foo::$0{bar}; +use foo::{bar::{self, other}}; +", + r" +use foo::bar::{self, other}; +", + ); + check_assist_import_one_variations!( + "foo::$0{bar}", + "foo::{bar::{self, other}}", + "use {foo::bar::{self, other}};" + ); + } + + #[test] fn test_merge_nested_list_self_and_glob() { check_assist( merge_imports, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs index 7d003efe721..0b91eb676df 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs @@ -120,9 +120,38 @@ mod tests { } #[test] + fn test_braces_kept() { + check_assist_not_applicable_variations!("foo::bar::{$0self}"); + + // This code compiles but transforming "bar::{self}" into "bar" causes a + // compilation error (the name `bar` is defined multiple times). + // Therefore, the normalize_input assist must not apply here. + check_assist_not_applicable( + normalize_import, + r" +mod foo { + + pub mod bar {} + + pub const bar: i32 = 8; +} + +use foo::bar::{$0self}; + +const bar: u32 = 99; + +fn main() { + let local_bar = bar; +} + +", + ); + } + + #[test] fn test_redundant_braces() { check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}"); - check_assist_variations!("foo::{bar::{self}}", "foo::bar"); + check_assist_variations!("foo::{bar::{self}}", "foo::bar::{self}"); check_assist_variations!("foo::{bar::{*}}", "foo::bar::*"); check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux"); check_assist_variations!( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs index 5d1140d57ac..89e24fafc55 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -53,6 +53,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs index 978b719c30a..4164a4c1024 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs @@ -6,6 +6,7 @@ use ide_db::{ helpers::mod_path_to_ast, imports::import_assets::{ImportCandidate, LocatedImport}, }; +use syntax::ast::HasGenericArgs; use syntax::{ ast, ast::{make, HasArgList}, @@ -40,6 +41,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let mut proposed_imports: Vec<_> = diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs index 799d36be93e..f74fc261128 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs @@ -239,4 +239,33 @@ mod tests { check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(return 2) + 2 }"#); } + + #[test] + fn remove_parens_indirect_calls() { + check_assist( + remove_parentheses, + r#"fn f(call: fn(usize), arg: usize) { $0(call)(arg); }"#, + r#"fn f(call: fn(usize), arg: usize) { call(arg); }"#, + ); + check_assist( + remove_parentheses, + r#"fn f<F>(call: F, arg: usize) where F: Fn(usize) { $0(call)(arg); }"#, + r#"fn f<F>(call: F, arg: usize) where F: Fn(usize) { call(arg); }"#, + ); + + // Parentheses are necessary when calling a function-like pointer that is a member of a struct or union. + check_assist_not_applicable( + remove_parentheses, + r#" +struct Foo<T> { + t: T, +} + +impl Foo<fn(usize)> { + fn foo(&self, arg: usize) { + $0(self.t)(arg); + } +}"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index e792debaa51..5582256a170 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -89,6 +89,7 @@ pub(crate) fn replace_derive_with_manual_impl( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .as_ref() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index 188165e7764..b4e1a49aab5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -4,7 +4,7 @@ use ide_db::{ imports::insert_use::{insert_use, ImportScope}, }; use syntax::{ - ast::{self, make}, + ast::{self, make, HasGenericArgs}, match_ast, ted, AstNode, SyntaxNode, }; @@ -70,6 +70,7 @@ pub(crate) fn replace_qualified_name_with_use( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs index 1794c887439..3a6391cd380 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs @@ -1,7 +1,6 @@ use hir::HirDisplay; use syntax::{ - ast::{Expr, GenericArg, GenericArgList}, - ast::{LetStmt, Type::InferType}, + ast::{Expr, GenericArg, GenericArgList, HasGenericArgs, LetStmt, Type::InferType}, AstNode, TextRange, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs index 8a9229c549f..7a911799757 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs @@ -37,7 +37,11 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< sema: &ctx.sema, scope: &scope, goal: target_ty, - config: TermSearchConfig { fuel: ctx.config.term_search_fuel, ..Default::default() }, + config: TermSearchConfig { + fuel: ctx.config.term_search_fuel, + enable_borrowcheck: ctx.config.term_search_borrowck, + ..Default::default() + }, }; let paths = hir::term_search::term_search(&term_search_ctx); @@ -56,6 +60,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .ok() @@ -144,7 +149,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, term_search, r#"//- minicore: todo, unimplemented, option fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#, - r#"fn f() { let a: i32 = 1; let b: Option<i32> = None; }"#, + r#"fn f() { let a: i32 = 1; let b: Option<i32> = Some(a); }"#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs index 30e09648ea1..31d18a60138 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs @@ -4,7 +4,7 @@ use ide_db::{ famous_defs::FamousDefs, }; use syntax::{ - ast::{self, HasVisibility}, + ast::{self, HasGenericArgs, HasVisibility}, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, }; @@ -142,6 +142,7 @@ pub(crate) fn desugar_async_into_impl_future( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; let trait_path = trait_path.display(ctx.db()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs index 8a9e669630b..a1987247cb6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs @@ -4,7 +4,7 @@ use ide_db::{ }; use itertools::Itertools; use syntax::{ - ast::{self, Expr}, + ast::{self, Expr, HasGenericArgs}, match_ast, AstNode, NodeOrToken, SyntaxKind, TextRange, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index abebdec1d18..685d230dc6f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -58,8 +58,6 @@ //! See also this post: //! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html> -#![warn(rust_2018_idioms, unused_lifetimes)] - mod assist_config; mod assist_context; #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index 3b6c9512511..2dcfda334b8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -30,8 +30,10 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, assist_emit_must_use: false, term_search_fuel: 400, + term_search_borrowck: true, }; pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { @@ -46,8 +48,10 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, assist_emit_must_use: false, term_search_fuel: 400, + term_search_borrowck: true, }; pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { @@ -62,8 +66,10 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, assist_emit_must_use: false, term_search_fuel: 400, + term_search_borrowck: true, }; pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index eec1087e2df..a2287b2977d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_assists_docs`, do not edit by hand. +//! Generated by `cargo codegen assists-doc-tests`, do not edit by hand. use super::check_doc_test; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 11ffc8bc441..995a4443edf 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -639,6 +639,7 @@ fn enum_variants_with_paths( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) { // Variants with trivial paths are already added by the existing completion logic, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs index 7281c607da4..01f9368aa4e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs @@ -177,6 +177,7 @@ pub(crate) fn complete_expr_path( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .filter(|it| it.len() > 1); @@ -202,6 +203,7 @@ pub(crate) fn complete_expr_path( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .filter(|it| it.len() > 1); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index 71d44a57cb9..3a8b9c0cb97 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -259,6 +259,7 @@ fn import_on_the_fly( let import_cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; import_assets @@ -309,6 +310,7 @@ fn import_on_the_fly_pat_( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; import_assets @@ -354,6 +356,7 @@ fn import_on_the_fly_method( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; import_assets diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs index d67c00c6c69..a59246229bb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs @@ -173,7 +173,8 @@ fn should_add_self_completions( } fn comma_wrapper(ctx: &CompletionContext<'_>) -> Option<(impl Fn(&str) -> String, TextRange)> { - let param = ctx.token.parent_ancestors().find(|node| node.kind() == SyntaxKind::PARAM)?; + let param = + ctx.original_token.parent_ancestors().find(|node| node.kind() == SyntaxKind::PARAM)?; let next_token_kind = { let t = param.last_token()?.next_token()?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs index 02298b1e9b0..18833774083 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs @@ -82,17 +82,30 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option let no_vis_qualifiers = ctx.qualifier_ctx.vis_node.is_none(); let in_block = kind.is_none(); + let missing_qualifiers = [ + ctx.qualifier_ctx.unsafe_tok.is_none().then_some(("unsafe", "unsafe $0")), + ctx.qualifier_ctx.async_tok.is_none().then_some(("async", "async $0")), + ]; + if !in_trait_impl { - if ctx.qualifier_ctx.unsafe_tok.is_some() { + // handle qualifier tokens + if missing_qualifiers.iter().any(Option::is_none) { + // only complete missing qualifiers + missing_qualifiers.iter().filter_map(|x| *x).for_each(|(kw, snippet)| { + add_keyword(kw, snippet); + }); + if in_item_list || in_assoc_non_trait_impl { add_keyword("fn", "fn $1($2) {\n $0\n}"); } - if in_item_list { + + if ctx.qualifier_ctx.unsafe_tok.is_some() && in_item_list { add_keyword("trait", "trait $1 {\n $0\n}"); if no_vis_qualifiers { add_keyword("impl", "impl $1 {\n $0\n}"); } } + return; } @@ -100,7 +113,6 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option add_keyword("enum", "enum $1 {\n $0\n}"); add_keyword("mod", "mod $0"); add_keyword("static", "static $0"); - add_keyword("async", "async $0"); add_keyword("struct", "struct $0"); add_keyword("trait", "trait $1 {\n $0\n}"); add_keyword("union", "union $1 {\n $0\n}"); @@ -129,6 +141,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option add_keyword("fn", "fn $1($2) {\n $0\n}"); add_keyword("unsafe", "unsafe $0"); add_keyword("const", "const $0"); + add_keyword("async", "async $0"); } } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs index d79b5398828..3f50cd55cb3 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs @@ -57,6 +57,7 @@ mod tests { check( r"fn my_fn() { unsafe $0 }", expect![[r#" + kw async kw fn kw impl kw trait diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 5041ef8d8a1..d919609237a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -63,6 +63,7 @@ pub(crate) fn complete_postfix( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs index 809c305ed82..7d062cb23e5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs @@ -22,6 +22,7 @@ pub struct CompletionConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub prefer_prelude: bool, + pub prefer_absolute: bool, pub snippets: Vec<Snippet>, pub limit: Option<usize>, } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 992ca18bb06..5782a4423a6 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -46,13 +46,15 @@ pub(crate) enum Visible { /// Existing qualifiers for the thing we are currently completing. #[derive(Debug, Default)] pub(crate) struct QualifierCtx { + // TODO: Add try_tok and default_tok + pub(crate) async_tok: Option<SyntaxToken>, pub(crate) unsafe_tok: Option<SyntaxToken>, pub(crate) vis_node: Option<ast::Visibility>, } impl QualifierCtx { pub(crate) fn none(&self) -> bool { - self.unsafe_tok.is_none() && self.vis_node.is_none() + self.async_tok.is_none() && self.unsafe_tok.is_none() && self.vis_node.is_none() } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 80ce5bd4cf2..a14fe24fa75 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -6,7 +6,10 @@ use ide_db::{active_parameter::ActiveParameter, RootDatabase}; use itertools::Either; use syntax::{ algo::{ancestors_at_offset, find_node_at_offset, non_trivia_sibling}, - ast::{self, AttrKind, HasArgList, HasGenericParams, HasLoopBody, HasName, NameOrNameRef}, + ast::{ + self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName, + NameOrNameRef, + }, match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize, T, }; @@ -1287,10 +1290,15 @@ fn classify_name_ref( syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev) { if error_node.kind() == SyntaxKind::ERROR { - qualifier_ctx.unsafe_tok = error_node - .children_with_tokens() - .filter_map(NodeOrToken::into_token) - .find(|it| it.kind() == T![unsafe]); + for token in + error_node.children_with_tokens().filter_map(NodeOrToken::into_token) + { + match token.kind() { + SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token), + SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token), + _ => {} + } + } qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); } } @@ -1334,7 +1342,7 @@ fn pattern_context_for( .map_or((PatternRefutability::Irrefutable, false), |node| { let refutability = match_ast! { match node { - ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()), + ast::LetStmt(let_) => return (PatternRefutability::Refutable, let_.ty().is_some()), ast::Param(param) => { let has_type_ascription = param.ty().is_some(); param_ctx = (|| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 8323b8f9331..7d9c2c7c60d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -1,7 +1,5 @@ //! `completions` crate provides utilities for generating completions of user input. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod completions; mod config; mod context; @@ -255,6 +253,7 @@ pub fn resolve_completion_edits( let cfg = ImportPathConfig { prefer_no_std: config.prefer_no_std, prefer_prelude: config.prefer_prelude, + prefer_absolute: config.prefer_absolute, }; imports.into_iter().for_each(|(full_import_path, imported_name)| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index ebdc813f3d7..fe9e2e5268a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -298,6 +298,7 @@ pub(crate) fn render_expr( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?; @@ -764,6 +765,7 @@ fn main() { "#, expect![[r#" st dep::test_mod_b::Struct {…} [type_could_unify] + ex dep::test_mod_b::Struct { } [type_could_unify] st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import] fn main() [] fn test(…) [] @@ -839,6 +841,7 @@ fn main() { "#, expect![[r#" ev dep::test_mod_b::Enum::variant [type_could_unify] + ex dep::test_mod_b::Enum::variant [type_could_unify] en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import] fn main() [] fn test(…) [] @@ -876,6 +879,7 @@ fn main() { "#, expect![[r#" ev dep::test_mod_b::Enum::Variant [type_could_unify] + ex dep::test_mod_b::Enum::Variant [type_could_unify] fn main() [] fn test(…) [] md dep [] @@ -1839,7 +1843,6 @@ fn f() { A { bar: b$0 }; } fn baz() [type] ex baz() [type] ex bar() [type] - ex A { bar: ... }.bar [type] st A [] fn f() [] "#]], @@ -1978,7 +1981,6 @@ fn main() { "#, expect![[r#" ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify] - ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] lc m [local] lc t [local] lc &t [type+local] @@ -2028,7 +2030,6 @@ fn main() { "#, expect![[r#" ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify] - ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify] lc m [local] lc t [local] lc &mut t [type+local] @@ -2132,7 +2133,6 @@ fn main() { } "#, expect![[r#" - ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify] st S [] st &S [type] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs index 07836040b48..5885b74e09d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs @@ -172,6 +172,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<V let import_cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let resolve = |import: &GreenNode| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index c1a67315b75..fcac6c7ce72 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -79,6 +79,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, snippets: Vec::new(), limit: None, }; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index abffa73c3b4..7d9c1ed98ac 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -871,6 +871,38 @@ fn main() { } #[test] +fn config_prefer_absolute() { + let fixture = r#" +//- /lib.rs crate:dep +pub mod foo { + pub mod bar { + pub struct Item; + } +} + +//- /main.rs crate:main deps:dep +use ::dep::foo::bar; + +fn main() { + Ite$0 +}"#; + let mut config = TEST_CONFIG; + config.prefer_absolute = true; + + check_edit_with_config( + config.clone(), + "Item", + fixture, + r#" +use ::dep::foo::bar::{self, Item}; + +fn main() { + Item +}"#, + ); +} + +#[test] fn unresolved_qualifier() { let fixture = r#" mod foo { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs index c37900478e1..f138938b02b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs @@ -123,6 +123,7 @@ fn after_unsafe_token() { check( r#"unsafe $0"#, expect![[r#" + kw async kw fn kw impl kw trait @@ -131,6 +132,17 @@ fn after_unsafe_token() { } #[test] +fn after_async_token() { + check( + r#"async $0"#, + expect![[r#" + kw fn + kw unsafe + "#]], + ); +} + +#[test] fn after_visibility() { check( r#"pub $0"#, @@ -157,6 +169,7 @@ fn after_visibility_unsafe() { check( r#"pub unsafe $0"#, expect![[r#" + kw async kw fn kw trait "#]], @@ -170,6 +183,7 @@ fn in_impl_assoc_item_list() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -189,6 +203,7 @@ fn in_impl_assoc_item_list_after_attr() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -208,6 +223,7 @@ fn in_trait_assoc_item_list() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -225,6 +241,7 @@ fn in_trait_assoc_fn_missing_body() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -242,6 +259,7 @@ fn in_trait_assoc_const_missing_body() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -259,6 +277,7 @@ fn in_trait_assoc_type_aliases_missing_ty() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs index 8720cb555a1..6a0b67e291a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs @@ -146,7 +146,7 @@ enum SingleVariantEnum { } use SingleVariantEnum::Variant; fn foo() { - let a$0 + for a$0 } "#, expect![[r#" diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs index 088d2ec5e3f..42a80d63b1a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs @@ -79,8 +79,9 @@ pub fn generic_def_for_node( sema: &Semantics<'_, RootDatabase>, generic_arg_list: &ast::GenericArgList, token: &SyntaxToken, -) -> Option<(hir::GenericDef, usize, bool)> { +) -> Option<(hir::GenericDef, usize, bool, Option<hir::Variant>)> { let parent = generic_arg_list.syntax().parent()?; + let mut variant = None; let def = match_ast! { match parent { ast::PathSegment(ps) => { @@ -91,7 +92,10 @@ pub fn generic_def_for_node( hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::TraitAlias(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(), + hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => { + variant = Some(it); + it.parent_enum(sema.db).into() + }, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)) | hir::PathResolution::Def(hir::ModuleDef::Const(_)) | hir::PathResolution::Def(hir::ModuleDef::Macro(_)) @@ -134,5 +138,5 @@ pub fn generic_def_for_node( .next() .map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_))); - Some((def, active_param, first_arg_is_non_lifetime)) + Some((def, active_param, first_arg_is_non_lifetime, variant)) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs index 7755a9b9748..0504f3caf5f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_lints`, do not edit by hand. +//! Generated by `cargo codegen lint-definitions`, do not edit by hand. #[derive(Clone)] pub struct Lint { @@ -50,6 +50,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"detects pattern bindings with the same name as one of the matched variants"##, }, Lint { + label: "boxed_slice_into_iter", + description: r##"detects calling `into_iter` on boxed slices in Rust 2015, 2018, and 2021"##, + }, + Lint { label: "break_with_label_and_loop", description: r##"`break` expression with label and unlabeled loop as value expression"##, }, @@ -78,6 +82,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"detects visually confusable pairs between identifiers"##, }, Lint { + label: "const_eval_mutable_ptr_in_final_value", + description: r##"detects a mutable pointer that has leaked into final value of a const expression"##, + }, + Lint { label: "const_evaluatable_unchecked", description: r##"detects a generic constant is used in a type without a emitting a warning"##, }, @@ -86,6 +94,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"detects attempts to mutate a `const` item"##, }, Lint { label: "dead_code", description: r##"detect unused, unexported items"## }, + Lint { + label: "dependency_on_unit_never_type_fallback", + description: r##"never type fallback affecting unsafe function calls"##, + }, Lint { label: "deprecated", description: r##"detects use of deprecated items"## }, Lint { label: "deprecated_cfg_attr_crate_type_name", @@ -96,6 +108,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"detects use of items that will be deprecated in a future version"##, }, Lint { + label: "deprecated_safe", + description: r##"detects unsafe functions being used as safe functions"##, + }, + Lint { label: "deprecated_where_clause_location", description: r##"deprecated where clause location"##, }, @@ -171,7 +187,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "future_incompatible", - description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-evaluatable-unchecked, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety, writes-through-immutable-pointer"##, + description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-eval-mutable-ptr-in-final-value, const-evaluatable-unchecked, dependency-on-unit-never-type-fallback, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, never-type-fallback-flowing-into-unsafe, order-dependent-trait-objects, patterns-in-fns-without-body, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, self-constructor-from-outer-item, semicolon-in-expressions-from-macros, soft-unstable, uncovered-param-in-projection, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, wasm-c-abi, writes-through-immutable-pointer"##, }, Lint { label: "fuzzy_provenance_casts", @@ -186,6 +202,14 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"ill-formed attribute inputs that were previously accepted and used in practice"##, }, Lint { + label: "impl_trait_overcaptures", + description: r##"`impl Trait` will capture more lifetimes than possibly intended in edition 2024"##, + }, + Lint { + label: "impl_trait_redundant_captures", + description: r##"redundant precise-capturing `use<...>` syntax on an `impl Trait`"##, + }, + Lint { label: "improper_ctypes", description: r##"proper use of libc types in foreign modules"##, }, @@ -199,10 +223,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "incomplete_include", description: r##"trailing content in included file"## }, Lint { - label: "indirect_structural_match", - description: r##"constant used in pattern contains value of non-structural-match type in a field or a variant"##, - }, - Lint { label: "ineffective_unstable_trait_impl", description: r##"detects `#[unstable]` on stable trait implementations for stable types"##, }, @@ -256,6 +276,14 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "keyword_idents", + description: r##"lint group for: keyword-idents-2018, keyword-idents-2024"##, + }, + Lint { + label: "keyword_idents_2018", + description: r##"detects edition keywords being used as an identifier"##, + }, + Lint { + label: "keyword_idents_2024", description: r##"detects edition keywords being used as an identifier"##, }, Lint { label: "large_assignments", description: r##"detects large moves or copies"## }, @@ -321,6 +349,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"detects missing fragment specifiers in unused `macro_rules!` patterns"##, }, Lint { + label: "missing_unsafe_on_extern", + description: r##"detects missing unsafe keyword on extern declarations"##, + }, + Lint { label: "mixed_script_confusables", description: r##"detects Unicode scripts whose mixed script confusables codepoints are solely used"##, }, @@ -342,6 +374,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "named_asm_labels", description: r##"named labels in inline assembly"## }, Lint { + label: "never_type_fallback_flowing_into_unsafe", + description: r##"never type fallback affecting unsafe function calls"##, + }, + Lint { label: "no_mangle_const_items", description: r##"const items will not have their symbols exported"##, }, @@ -352,6 +388,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"types, variants, traits and type parameters should have camel case names"##, }, Lint { + label: "non_contiguous_range_endpoints", + description: r##"detects off-by-one errors with exclusive range patterns"##, + }, + Lint { label: "non_exhaustive_omitted_patterns", description: r##"detect when patterns of types marked `non_exhaustive` are missed"##, }, @@ -399,10 +439,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"patterns in functions without body were erroneously allowed"##, }, Lint { - label: "pointer_structural_match", - description: r##"pointers are not structural-match"##, - }, - Lint { label: "private_bounds", description: r##"private type in secondary interface of an item"##, }, @@ -411,10 +447,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"private type in primary interface of an item"##, }, Lint { - label: "proc_macro_back_compat", - description: r##"detects usage of old versions of certain proc-macro crates"##, - }, - Lint { label: "proc_macro_derive_resolution_fallback", description: r##"detects proc macro derives using inaccessible names from parent modules"##, }, @@ -423,11 +455,23 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"detect public re-exports of private extern crates"##, }, Lint { + label: "redundant_lifetimes", + description: r##"detects lifetime parameters that are redundant because they are equal to some other named lifetime"##, + }, + Lint { label: "redundant_semicolons", description: r##"detects unnecessary trailing semicolons"##, }, Lint { label: "refining_impl_trait", + description: r##"lint group for: refining-impl-trait-reachable, refining-impl-trait-internal"##, + }, + Lint { + label: "refining_impl_trait_internal", + description: r##"impl trait in impl method signature does not match trait method signature"##, + }, + Lint { + label: "refining_impl_trait_reachable", description: r##"impl trait in impl method signature does not match trait method signature"##, }, Lint { @@ -440,7 +484,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "rust_2018_compatibility", - description: r##"lint group for: keyword-idents, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##, + description: r##"lint group for: keyword-idents-2018, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##, }, Lint { label: "rust_2018_idioms", @@ -468,7 +512,15 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "rust_2024_compatibility", - description: r##"lint group for: static-mut-refs, unsafe-op-in-unsafe-fn"##, + description: r##"lint group for: keyword-idents-2024, deprecated-safe, missing-unsafe-on-extern, static-mut-refs, unsafe-attr-outside-unsafe, unsafe-op-in-unsafe-fn, boxed-slice-into-iter"##, + }, + Lint { + label: "rust_2024_incompatible_pat", + description: r##"detects patterns whose meaning will change in Rust 2024"##, + }, + Lint { + label: "self_constructor_from_outer_item", + description: r##"detect unsupported use of `Self` from outer item"##, }, Lint { label: "semicolon_in_expressions_from_macros", @@ -547,6 +599,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"functions that cannot return without calling themselves"##, }, Lint { + label: "uncovered_param_in_projection", + description: r##"impl contains type parameters that are not covered"##, + }, + Lint { label: "undefined_naked_function_abi", description: r##"undefined naked function ABI"##, }, @@ -595,6 +651,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"`pub` items not reachable from crate root"##, }, Lint { + label: "unsafe_attr_outside_unsafe", + description: r##"detects unsafe attributes outside of unsafe"##, + }, + Lint { label: "unsafe_code", description: r##"usage of `unsafe` code and other potentially unsound constructs"##, }, @@ -714,8 +774,8 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"lint group for: all lints that are set to issue warnings"##, }, Lint { - label: "where_clauses_object_safety", - description: r##"checks the object safety of where clauses"##, + label: "wasm_c_abi", + description: r##"detects dependencies that are incompatible with the Wasm C ABI"##, }, Lint { label: "while_true", @@ -731,7 +791,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "future_incompatible", - description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-evaluatable-unchecked, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety, writes-through-immutable-pointer"##, + description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-eval-mutable-ptr-in-final-value, const-evaluatable-unchecked, dependency-on-unit-never-type-fallback, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, never-type-fallback-flowing-into-unsafe, order-dependent-trait-objects, patterns-in-fns-without-body, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, self-constructor-from-outer-item, semicolon-in-expressions-from-macros, soft-unstable, uncovered-param-in-projection, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, wasm-c-abi, writes-through-immutable-pointer"##, }, children: &[ "deref_into_dyn_supertrait", @@ -741,37 +801,45 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ "cenum_impl_drop_cast", "coherence_leak_check", "conflicting_repr_hints", + "const_eval_mutable_ptr_in_final_value", "const_evaluatable_unchecked", + "dependency_on_unit_never_type_fallback", "deprecated_cfg_attr_crate_type_name", "elided_lifetimes_in_associated_constant", "forbidden_lint_groups", "ill_formed_attribute_input", - "indirect_structural_match", - "invalid_doc_attributes", "invalid_type_param_default", "late_bound_lifetime_arguments", "legacy_derive_helpers", "macro_expanded_macro_exports_accessed_by_absolute_paths", "missing_fragment_specifier", + "never_type_fallback_flowing_into_unsafe", "order_dependent_trait_objects", "patterns_in_fns_without_body", - "pointer_structural_match", - "proc_macro_back_compat", "proc_macro_derive_resolution_fallback", "pub_use_of_private_extern_crate", "repr_transparent_external_private_fields", + "self_constructor_from_outer_item", "semicolon_in_expressions_from_macros", "soft_unstable", + "uncovered_param_in_projection", "uninhabited_static", "unstable_name_collisions", "unstable_syntax_pre_expansion", "unsupported_calling_conventions", - "where_clauses_object_safety", + "wasm_c_abi", "writes_through_immutable_pointer", ], }, LintGroup { lint: Lint { + label: "keyword_idents", + description: r##"lint group for: keyword-idents-2018, keyword-idents-2024"##, + }, + children: &["keyword_idents_2018", "keyword_idents_2024"], + }, + LintGroup { + lint: Lint { label: "let_underscore", description: r##"lint group for: let-underscore-drop, let-underscore-lock"##, }, @@ -786,11 +854,18 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ }, LintGroup { lint: Lint { + label: "refining_impl_trait", + description: r##"lint group for: refining-impl-trait-reachable, refining-impl-trait-internal"##, + }, + children: &["refining_impl_trait_reachable", "refining_impl_trait_internal"], + }, + LintGroup { + lint: Lint { label: "rust_2018_compatibility", - description: r##"lint group for: keyword-idents, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##, + description: r##"lint group for: keyword-idents-2018, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##, }, children: &[ - "keyword_idents", + "keyword_idents_2018", "anonymous_parameters", "absolute_paths_not_starting_with_crate", "tyvar_behind_raw_pointer", @@ -828,9 +903,17 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "rust_2024_compatibility", - description: r##"lint group for: static-mut-refs, unsafe-op-in-unsafe-fn"##, + description: r##"lint group for: keyword-idents-2024, deprecated-safe, missing-unsafe-on-extern, static-mut-refs, unsafe-attr-outside-unsafe, unsafe-op-in-unsafe-fn, boxed-slice-into-iter"##, }, - children: &["static_mut_refs", "unsafe_op_in_unsafe_fn"], + children: &[ + "keyword_idents_2024", + "deprecated_safe", + "missing_unsafe_on_extern", + "static_mut_refs", + "unsafe_attr_outside_unsafe", + "unsafe_op_in_unsafe_fn", + "boxed_slice_into_iter", + ], }, LintGroup { lint: Lint { @@ -1187,9 +1270,23 @@ This feature has no tracking issue, and is therefore likely internal to the comp label: "abi_vectorcall", description: r##"# `abi_vectorcall` -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +The tracking issue for this feature is: [#124485] + +[#124485]: https://github.com/rust-lang/rust/issues/124485 ------------------------ + +Adds support for the Windows `"vectorcall"` ABI, the equivalent of `__vectorcall` in MSVC. + +```rust,ignore (only-windows-or-x86-or-x86-64) +extern "vectorcall" { + fn add_f64s(x: f64, y: f64) -> f64; +} + +fn main() { + println!("{}", add_f64s(2.0, 4.0)); +} +``` "##, }, Lint { @@ -1204,12 +1301,12 @@ The tracking issue for this feature is: [#40180] "##, }, Lint { - label: "absolute_path", - description: r##"# `absolute_path` + label: "acceptfilter", + description: r##"# `acceptfilter` -The tracking issue for this feature is: [#92750] +The tracking issue for this feature is: [#121891] -[#92750]: https://github.com/rust-lang/rust/issues/92750 +[#121891]: https://github.com/rust-lang/rust/issues/121891 ------------------------ "##, @@ -1234,6 +1331,34 @@ The tracking issue for this feature is: [#95174] [#95174]: https://github.com/rust-lang/rust/issues/95174 ------------------------ + +Allows for using more complex types for const parameters, such as structs or enums. + +```rust +#![feature(adt_const_params)] +#![allow(incomplete_features)] + +use std::marker::ConstParamTy; + +#[derive(ConstParamTy, PartialEq, Eq)] +enum Foo { + A, + B, + C, +} + +#[derive(ConstParamTy, PartialEq, Eq)] +struct Bar { + flag: bool, +} + +fn is_foo_a_and_bar_true<const F: Foo, const B: Bar>() -> bool { + match (F, B.flag) { + (Foo::A, true) => true, + _ => false, + } +} +``` "##, }, Lint { @@ -1380,6 +1505,28 @@ The tracking issue for this feature is: [#91583] "##, }, Lint { + label: "array_ptr_get", + description: r##"# `array_ptr_get` + +The tracking issue for this feature is: [#119834] + +[#119834]: https://github.com/rust-lang/rust/issues/119834 + +------------------------ +"##, + }, + Lint { + label: "array_repeat", + description: r##"# `array_repeat` + +The tracking issue for this feature is: [#126695] + +[#126695]: https://github.com/rust-lang/rust/issues/126695 + +------------------------ +"##, + }, + Lint { label: "array_try_from_fn", description: r##"# `array_try_from_fn` @@ -1483,6 +1630,7 @@ This feature tracks `asm!` and `global_asm!` support for the following architect - M68k - CSKY - s390x +- Arm64EC ## Register classes @@ -1515,6 +1663,9 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | CSKY | `freg` | `f[0-31]` | `f` | | s390x | `reg` | `r[0-10]`, `r[12-14]` | `r` | | s390x | `freg` | `f[0-15]` | `f` | +| Arm64EC | `reg` | `x[0-12]`, `x[15-22]`, `x[25-27]`, `x30` | `r` | +| Arm64EC | `vreg` | `v[0-15]` | `w` | +| Arm64EC | `vreg_low16` | `v[0-15]` | `x` | > **Notes**: > - NVPTX doesn't have a fixed register set, so named registers are not supported. @@ -1550,6 +1701,8 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | CSKY | `freg` | None | `f32`, | | s390x | `reg`, `reg_addr` | None | `i8`, `i16`, `i32`, `i64` | | s390x | `freg` | None | `f32`, `f64` | +| Arm64EC | `reg` | None | `i8`, `i16`, `i32`, `f32`, `i64`, `f64` | +| Arm64EC | `vreg` | None | `i8`, `i16`, `i32`, `f32`, `i64`, `f64`, <br> `i8x8`, `i16x4`, `i32x2`, `i64x1`, `f32x2`, `f64x1`, <br> `i8x16`, `i16x8`, `i32x4`, `i64x2`, `f32x4`, `f64x2` | ## Register aliases @@ -1582,6 +1735,12 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | CSKY | `r29` | `rtb` | | CSKY | `r30` | `svbr` | | CSKY | `r31` | `tls` | +| Arm64EC | `x[0-30]` | `w[0-30]` | +| Arm64EC | `x29` | `fp` | +| Arm64EC | `x30` | `lr` | +| Arm64EC | `sp` | `wsp` | +| Arm64EC | `xzr` | `wzr` | +| Arm64EC | `v[0-15]` | `b[0-15]`, `h[0-15]`, `s[0-15]`, `d[0-15]`, `q[0-15]` | > **Notes**: > - TI does not mandate a frame pointer for MSP430, but toolchains are allowed @@ -1592,8 +1751,8 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | Architecture | Unsupported register | Reason | | ------------ | --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | All | `sp`, `r15` (s390x) | The stack pointer must be restored to its original value at the end of an asm code block. | -| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR), `r4` (MSP430), `a6` (M68k), `r11` (s390x) | The frame pointer cannot be used as an input or output. | -| All | `r19` (Hexagon) | This is used internally by LLVM as a "base pointer" for functions with complex stack frames. | +| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR), `r4` (MSP430), `a6` (M68k), `r11` (s390x), `x29` (Arm64EC) | The frame pointer cannot be used as an input or output. | +| All | `r19` (Hexagon), `x19` (Arm64EC) | This is used internally by LLVM as a "base pointer" for functions with complex stack frames. | | MIPS | `$0` or `$zero` | This is a constant zero register which can't be modified. | | MIPS | `$1` or `$at` | Reserved for assembler. | | MIPS | `$26`/`$k0`, `$27`/`$k1` | OS-reserved registers. | @@ -1609,6 +1768,9 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | CSKY | `r15` | This is the link register. | | CSKY | `r[26-30]` | Reserved by its ABI. | | CSKY | `r31` | This is the TLS register. | +| Arm64EC | `xzr` | This is a constant zero register which can't be modified. | +| Arm64EC | `x18` | This is an OS-reserved register. | +| Arm64EC | `x13`, `x14`, `x23`, `x24`, `x28`, `v[16-31]` | These are AArch64 registers that are not supported for Arm64EC. | ## Template modifiers @@ -1629,6 +1791,16 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | s390x | `freg` | None | `%f0` | None | | CSKY | `reg` | None | `r0` | None | | CSKY | `freg` | None | `f0` | None | +| Arm64EC | `reg` | None | `x0` | `x` | +| Arm64EC | `reg` | `w` | `w0` | `w` | +| Arm64EC | `reg` | `x` | `x0` | `x` | +| Arm64EC | `vreg` | None | `v0` | None | +| Arm64EC | `vreg` | `v` | `v0` | None | +| Arm64EC | `vreg` | `b` | `b0` | `b` | +| Arm64EC | `vreg` | `h` | `h0` | `h` | +| Arm64EC | `vreg` | `s` | `s0` | `s` | +| Arm64EC | `vreg` | `d` | `d0` | `d` | +| Arm64EC | `vreg` | `q` | `q0` | `q` | # Flags covered by `preserves_flags` @@ -1641,6 +1813,43 @@ These flags registers must be restored upon exiting the asm block if the `preser - The condition code register `ccr`. - s390x - The condition code register `cc`. +- Arm64EC + - Condition flags (`NZCV` register). + - Floating-point status (`FPSR` register). +"##, + }, + Lint { + label: "asm_goto", + description: r##"# `asm_goto` + +The tracking issue for this feature is: [#119364] + +[#119364]: https://github.com/rust-lang/rust/issues/119364 + +------------------------ + +This feature adds a `label <block>` operand type to `asm!`. + +Example: +```rust,ignore (partial-example, x86-only) + +unsafe { + asm!( + "jmp {}", + label { + println!("Jumped from asm!"); + } + ); +} +``` + +The block must have unit type or diverge. + +When `label <block>` is used together with `noreturn` option, it means that the +assembly will not fallthrough. It's allowed to jump to a label within the +assembly. In this case, the entire `asm!` expression will have an unit type as +opposed to diverging, if not all label blocks diverge. The `asm!` expression +still diverges if `noreturn` option is used and all label blocks diverge. "##, }, Lint { @@ -1679,17 +1888,6 @@ The tracking issue for this feature is: [#92827] "##, }, Lint { - label: "associated_type_bounds", - description: r##"# `associated_type_bounds` - -The tracking issue for this feature is: [#52662] - -[#52662]: https://github.com/rust-lang/rust/issues/52662 - ------------------------- -"##, - }, - Lint { label: "associated_type_defaults", description: r##"# `associated_type_defaults` @@ -1712,6 +1910,17 @@ The tracking issue for this feature is: [#62290] "##, }, Lint { + label: "async_drop", + description: r##"# `async_drop` + +The tracking issue for this feature is: [#126482] + +[#126482]: https://github.com/rust-lang/rust/issues/126482 + +------------------------ +"##, + }, + Lint { label: "async_fn_track_caller", description: r##"# `async_fn_track_caller` @@ -1736,7 +1945,7 @@ for creating custom closure-like types that return futures. [`AsyncFn*`]: ../../std/ops/trait.AsyncFn.html The main difference to the `Fn*` family of traits is that `AsyncFn` can return a future -that borrows from itself (`FnOnce::Output` has no lifetime parameters, while `AsyncFn::CallFuture` does). +that borrows from itself (`FnOnce::Output` has no lifetime parameters, while `AsyncFnMut::CallRefFuture` does). "##, }, Lint { @@ -1782,17 +1991,6 @@ The tracking issue for this feature is: [#79024] "##, }, Lint { - label: "atomic_bool_fetch_not", - description: r##"# `atomic_bool_fetch_not` - -The tracking issue for this feature is: [#98485] - -[#98485]: https://github.com/rust-lang/rust/issues/98485 - ------------------------- -"##, - }, - Lint { label: "atomic_from_mut", description: r##"# `atomic_from_mut` @@ -1947,17 +2145,6 @@ The tracking issue for this feature is: [#85532] "##, }, Lint { - label: "binary_heap_as_slice", - description: r##"# `binary_heap_as_slice` - -The tracking issue for this feature is: [#83659] - -[#83659]: https://github.com/rust-lang/rust/issues/83659 - ------------------------- -"##, - }, - Lint { label: "binary_heap_drain_sorted", description: r##"# `binary_heap_drain_sorted` @@ -2113,23 +2300,23 @@ The tracking issue for this feature is: [#111735] "##, }, Lint { - label: "builtin_syntax", - description: r##"# `builtin_syntax` + label: "build_hasher_default_const_new", + description: r##"# `build_hasher_default_const_new` -The tracking issue for this feature is: [#110680] +The tracking issue for this feature is: [#123197] -[#110680]: https://github.com/rust-lang/rust/issues/110680 +[#123197]: https://github.com/rust-lang/rust/issues/123197 ------------------------ "##, }, Lint { - label: "byte_slice_trim_ascii", - description: r##"# `byte_slice_trim_ascii` + label: "builtin_syntax", + description: r##"# `builtin_syntax` -The tracking issue for this feature is: [#94035] +The tracking issue for this feature is: [#110680] -[#94035]: https://github.com/rust-lang/rust/issues/94035 +[#110680]: https://github.com/rust-lang/rust/issues/110680 ------------------------ "##, @@ -2146,33 +2333,14 @@ The tracking issue for this feature is: [#88345] "##, }, Lint { - label: "c_unwind", - description: r##"# `c_unwind` + label: "c_str_module", + description: r##"# `c_str_module` -The tracking issue for this feature is: [#74990] +The tracking issue for this feature is: [#112134] -[#74990]: https://github.com/rust-lang/rust/issues/74990 +[#112134]: https://github.com/rust-lang/rust/issues/112134 ------------------------ - -Introduces new ABI strings: -- "C-unwind" -- "cdecl-unwind" -- "stdcall-unwind" -- "fastcall-unwind" -- "vectorcall-unwind" -- "thiscall-unwind" -- "aapcs-unwind" -- "win64-unwind" -- "sysv64-unwind" -- "system-unwind" - -These enable unwinding from other languages (such as C++) into Rust frames and -from Rust into other languages. - -See [RFC 2945] for more information. - -[RFC 2945]: https://github.com/rust-lang/rfcs/blob/master/text/2945-c-unwind-abi.md "##, }, Lint { @@ -2424,6 +2592,17 @@ The tracking issue for this feature is: [#29594] "##, }, Lint { + label: "cfg_ub_checks", + description: r##"# `cfg_ub_checks` + +The tracking issue for this feature is: [#123499] + +[#123499]: https://github.com/rust-lang/rust/issues/123499 + +------------------------ +"##, + }, + Lint { label: "cfg_version", description: r##"# `cfg_version` @@ -2523,6 +2702,17 @@ The tracking issue for this feature is: [#114298] "##, }, Lint { + label: "clone_to_uninit", + description: r##"# `clone_to_uninit` + +The tracking issue for this feature is: [#126799] + +[#126799]: https://github.com/rust-lang/rust/issues/126799 + +------------------------ +"##, + }, + Lint { label: "closure_lifetime_binder", description: r##"# `closure_lifetime_binder` @@ -2657,17 +2847,6 @@ The tracking issue for this feature is: [#18598] "##, }, Lint { - label: "collapse_debuginfo", - description: r##"# `collapse_debuginfo` - -The tracking issue for this feature is: [#100758] - -[#100758]: https://github.com/rust-lang/rust/issues/100758 - ------------------------- -"##, - }, - Lint { label: "compiler_builtins", description: r##"# `compiler_builtins` @@ -2835,8 +3014,8 @@ The tracking issue for this feature is: [#85532] "##, }, Lint { - label: "const_binary_heap_constructor", - description: r##"# `const_binary_heap_constructor` + label: "const_binary_heap_new_in", + description: r##"# `const_binary_heap_new_in` The tracking issue for this feature is: [#112353] @@ -2875,17 +3054,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "const_caller_location", - description: r##"# `const_caller_location` - -The tracking issue for this feature is: [#76156] - -[#76156]: https://github.com/rust-lang/rust/issues/76156 - ------------------------- -"##, - }, - Lint { label: "const_cell_into_inner", description: r##"# `const_cell_into_inner` @@ -2919,17 +3087,6 @@ The tracking issue for this feature is: [#106003] "##, }, Lint { - label: "const_cmp", - description: r##"# `const_cmp` - -The tracking issue for this feature is: [#92391] - -[#92391]: https://github.com/rust-lang/rust/issues/92391 - ------------------------- -"##, - }, - Lint { label: "const_collections_with_hasher", description: r##"# `const_collections_with_hasher` @@ -2966,7 +3123,9 @@ The tracking issue for this feature is: [#113219] label: "const_eval_select", description: r##"# `const_eval_select` -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +The tracking issue for this feature is: [#124625] + +[#124625]: https://github.com/rust-lang/rust/issues/124625 ------------------------ "##, @@ -3076,17 +3235,6 @@ The tracking issue for this feature is: [#79597] "##, }, Lint { - label: "const_hint_assert_unchecked", - description: r##"# `const_hint_assert_unchecked` - -The tracking issue for this feature is: [#119131] - -[#119131]: https://github.com/rust-lang/rust/issues/119131 - ------------------------- -"##, - }, - Lint { label: "const_index_range_slice_index", description: r##"# `const_index_range_slice_index` @@ -3096,10 +3244,12 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "const_int_unchecked_arith", - description: r##"# `const_int_unchecked_arith` + label: "const_int_from_str", + description: r##"# `const_int_from_str` -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +The tracking issue for this feature is: [#59133] + +[#59133]: https://github.com/rust-lang/rust/issues/59133 ------------------------ "##, @@ -3154,17 +3304,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "const_io_structs", - description: r##"# `const_io_structs` - -The tracking issue for this feature is: [#78812] - -[#78812]: https://github.com/rust-lang/rust/issues/78812 - ------------------------- -"##, - }, - Lint { label: "const_ip", description: r##"# `const_ip` @@ -3207,17 +3346,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "const_location_fields", - description: r##"# `const_location_fields` - -The tracking issue for this feature is: [#102911] - -[#102911]: https://github.com/rust-lang/rust/issues/102911 - ------------------------- -"##, - }, - Lint { label: "const_maybe_uninit_array_assume_init", description: r##"# `const_maybe_uninit_array_assume_init` @@ -3304,17 +3432,6 @@ The tracking issue for this feature is: [#110840] "##, }, Lint { - label: "const_ops", - description: r##"# `const_ops` - -The tracking issue for this feature is: [#90080] - -[#90080]: https://github.com/rust-lang/rust/issues/90080 - ------------------------- -"##, - }, - Lint { label: "const_option", description: r##"# `const_option` @@ -3535,6 +3652,17 @@ The tracking issue for this feature is: [#111774] "##, }, Lint { + label: "const_slice_flatten", + description: r##"# `const_slice_flatten` + +The tracking issue for this feature is: [#95629] + +[#95629]: https://github.com/rust-lang/rust/issues/95629 + +------------------------ +"##, + }, + Lint { label: "const_slice_from_mut_ptr_range", description: r##"# `const_slice_from_mut_ptr_range` @@ -3588,17 +3716,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "const_slice_ptr_len", - description: r##"# `const_slice_ptr_len` - -The tracking issue for this feature is: [#71146] - -[#71146]: https://github.com/rust-lang/rust/issues/71146 - ------------------------- -"##, - }, - Lint { label: "const_slice_split_at_mut", description: r##"# `const_slice_split_at_mut` @@ -3665,6 +3782,15 @@ The tracking issue for this feature is: [#83163] "##, }, Lint { + label: "const_three_way_compare", + description: r##"# `const_three_way_compare` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { label: "const_trait_impl", description: r##"# `const_trait_impl` @@ -3709,6 +3835,24 @@ The tracking issue for this feature is: [#63084] "##, }, Lint { + label: "const_typed_swap", + description: r##"# `const_typed_swap` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { + label: "const_ub_checks", + description: r##"# `const_ub_checks` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { label: "const_unicode_case_lookup", description: r##"# `const_unicode_case_lookup` @@ -3751,6 +3895,17 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { + label: "context_ext", + description: r##"# `context_ext` + +The tracking issue for this feature is: [#123392] + +[#123392]: https://github.com/rust-lang/rust/issues/123392 + +------------------------ +"##, + }, + Lint { label: "control_flow_enum", description: r##"# `control_flow_enum` @@ -3793,6 +3948,24 @@ The tracking issue for this feature is: [#117693] "##, }, Lint { + label: "core_pattern_type", + description: r##"# `core_pattern_type` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { + label: "core_pattern_types", + description: r##"# `core_pattern_types` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { label: "core_private_bignum", description: r##"# `core_private_bignum` @@ -3862,7 +4035,7 @@ tweaks to the overall design. A syntactical example of a coroutine is: ```rust -#![feature(coroutines, coroutine_trait)] +#![feature(coroutines, coroutine_trait, stmt_expr_attributes)] use std::ops::{Coroutine, CoroutineState}; use std::pin::Pin; @@ -3884,7 +4057,8 @@ fn main() { } ``` -Coroutines are closure-like literals which can contain a `yield` statement. The +Coroutines are closure-like literals which are annotated with `#[coroutine]` +and can contain a `yield` statement. The `yield` statement takes an optional expression of a value to yield out of the coroutine. All coroutine literals implement the `Coroutine` trait in the `std::ops` module. The `Coroutine` trait has one main method, `resume`, which @@ -3894,7 +4068,7 @@ An example of the control flow of coroutines is that the following example prints all numbers in order: ```rust -#![feature(coroutines, coroutine_trait)] +#![feature(coroutines, coroutine_trait, stmt_expr_attributes)] use std::ops::Coroutine; use std::pin::Pin; @@ -3914,9 +4088,9 @@ fn main() { } ``` -At this time the main intended use case of coroutines is an implementation -primitive for async/await syntax, but coroutines will likely be extended to -ergonomic implementations of iterators and other primitives in the future. +At this time the main use case of coroutines is an implementation +primitive for `async`/`await` and `gen` syntax, but coroutines +will likely be extended to other primitives in the future. Feedback on the design and usage is always appreciated! ### The `Coroutine` trait @@ -3999,7 +4173,7 @@ which point all state is saved off in the coroutine and a value is returned. Let's take a look at an example to see what's going on here: ```rust -#![feature(coroutines, coroutine_trait)] +#![feature(coroutines, coroutine_trait, stmt_expr_attributes)] use std::ops::Coroutine; use std::pin::Pin; @@ -4019,7 +4193,7 @@ fn main() { This coroutine literal will compile down to something similar to: ```rust -#![feature(arbitrary_self_types, coroutines, coroutine_trait)] +#![feature(arbitrary_self_types, coroutine_trait)] use std::ops::{Coroutine, CoroutineState}; use std::pin::Pin; @@ -4139,12 +4313,12 @@ The tracking issue for this feature is: [#44839] "##, }, Lint { - label: "cstr_count_bytes", - description: r##"# `cstr_count_bytes` + label: "cstr_bytes", + description: r##"# `cstr_bytes` -The tracking issue for this feature is: [#114441] +The tracking issue for this feature is: [#112115] -[#114441]: https://github.com/rust-lang/rust/issues/114441 +[#112115]: https://github.com/rust-lang/rust/issues/112115 ------------------------ "##, @@ -4161,17 +4335,6 @@ The tracking issue for this feature is: [#86369] "##, }, Lint { - label: "custom_code_classes_in_docs", - description: r##"# `custom_code_classes_in_docs` - -The tracking issue for this feature is: [#79483] - -[#79483]: https://github.com/rust-lang/rust/issues/79483 - ------------------------- -"##, - }, - Lint { label: "custom_inner_attributes", description: r##"# `custom_inner_attributes` @@ -4281,15 +4444,6 @@ The tracking issue for this feature is: [#27336] "##, }, Lint { - label: "delayed_debug_assertions", - description: r##"# `delayed_debug_assertions` - -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. - ------------------------- -"##, - }, - Lint { label: "deprecated_safe", description: r##"# `deprecated_safe` @@ -4312,6 +4466,28 @@ The tracking issue for this feature is: [#94785] "##, }, Lint { + label: "deref_patterns", + description: r##"# `deref_patterns` + +The tracking issue for this feature is: [#87121] + +[#87121]: https://github.com/rust-lang/rust/issues/87121 + +------------------------ +"##, + }, + Lint { + label: "deref_pure_trait", + description: r##"# `deref_pure_trait` + +The tracking issue for this feature is: [#87121] + +[#87121]: https://github.com/rust-lang/rust/issues/87121 + +------------------------ +"##, + }, + Lint { label: "derive_clone_copy", description: r##"# `derive_clone_copy` @@ -4339,91 +4515,14 @@ This feature is internal to the Rust compiler and is not intended for general us "##, }, Lint { - label: "diagnostic_namespace", - description: r##"# `diagnostic_namespace` + label: "derive_smart_pointer", + description: r##"# `derive_smart_pointer` -The tracking issue for this feature is: [#111996] +The tracking issue for this feature is: [#123430] -[#111996]: https://github.com/rust-lang/rust/issues/111996 +[#123430]: https://github.com/rust-lang/rust/issues/123430 ------------------------ - -The `diagnostic_namespace` feature permits customization of compilation errors. - -## diagnostic::on_unimplemented - -With [#114452] support for `diagnostic::on_unimplemented` was added. - -When used on a trait declaration, the following options are available: - -* `message` to customize the primary error message -* `note` to add a customized note message to an error message -* `label` to customize the label part of the error message - -The attribute will hint to the compiler to use these in error messages: -```rust -// some library -#![feature(diagnostic_namespace)] - -#[diagnostic::on_unimplemented( - message = "cannot insert element", - label = "cannot be put into a table", - note = "see <link> for more information about the Table api" -)] -pub trait Element { - // ... -} -``` - -```rust,compile_fail,E0277 -# #![feature(diagnostic_namespace)] -# -# #[diagnostic::on_unimplemented( -# message = "cannot insert element", -# label = "cannot be put into a table", -# note = "see <link> for more information about the Table api" -# )] -# pub trait Element { -# // ... -# } -# struct Table; -# impl Table { -# fn insert<T: Element>(&self, element: T) { -# // .. -# } -# } -# fn main() { -# let table = Table; -# let element = (); -// user code -table.insert(element); -# } -``` - -```text -error[E0277]: cannot insert element - --> src/main.rs:24:18 - | -24 | table.insert(element); - | ------ ^^^^^^^ cannot be put into a table - | | - | required by a bound introduced by this call - | - = help: the trait `Element` is not implemented for `<type>` - = note: see <link> for more information about the Table api -note: required by a bound in `Table::insert` - --> src/main.rs:15:18 - | -15 | fn insert<T: Element>(&self, element: T) { - | ^^^^^^^ required by this bound in `Table::insert` - -For more information about this error, try `rustc --explain E0277`. -``` - -See [RFC 3368] for more information. - -[#114452]: https://github.com/rust-lang/rust/pull/114452 -[RFC 3368]: https://github.com/rust-lang/rfcs/blob/master/text/3368-diagnostic-attribute-namespace.md "##, }, Lint { @@ -4456,17 +4555,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "div_duration", - description: r##"# `div_duration` - -The tracking issue for this feature is: [#63139] - -[#63139]: https://github.com/rust-lang/rust/issues/63139 - ------------------------- -"##, - }, - Lint { label: "do_not_recommend", description: r##"# `do_not_recommend` @@ -4648,17 +4736,6 @@ The tracking issue for this feature is: [#34761] "##, }, Lint { - label: "duration_abs_diff", - description: r##"# `duration_abs_diff` - -The tracking issue for this feature is: [#117618] - -[#117618]: https://github.com/rust-lang/rust/issues/117618 - ------------------------- -"##, - }, - Lint { label: "duration_constants", description: r##"# `duration_constants` @@ -4694,6 +4771,17 @@ The tracking issue for this feature is: [#72440] "##, }, Lint { + label: "duration_millis_float", + description: r##"# `duration_millis_float` + +The tracking issue for this feature is: [#122451] + +[#122451]: https://github.com/rust-lang/rust/issues/122451 + +------------------------ +"##, + }, + Lint { label: "duration_units", description: r##"# `duration_units` @@ -4725,6 +4813,15 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { + label: "effect_types", + description: r##"# `effect_types` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { label: "effects", description: r##"# `effects` @@ -4769,17 +4866,6 @@ The tracking issue for this feature is: [#99301] "##, }, Lint { - label: "error_in_core", - description: r##"# `error_in_core` - -The tracking issue for this feature is: [#103765] - -[#103765]: https://github.com/rust-lang/rust/issues/103765 - ------------------------- -"##, - }, - Lint { label: "error_iter", description: r##"# `error_iter` @@ -4824,36 +4910,6 @@ The tracking issue for this feature is: [#35428] "##, }, Lint { - label: "exclusive_range_pattern", - description: r##"# `exclusive_range_pattern` - -The tracking issue for this feature is: [#37854]. - - -[#67264]: https://github.com/rust-lang/rust/issues/67264 -[#37854]: https://github.com/rust-lang/rust/issues/37854 ------ - -The `exclusive_range_pattern` feature allows non-inclusive range -patterns (`0..10`) to be used in appropriate pattern matching -contexts. It also can be combined with `#![feature(half_open_range_patterns]` -to be able to use RangeTo patterns (`..10`). - -It also enabled RangeFrom patterns but that has since been -stabilized. - -```rust -#![feature(exclusive_range_pattern)] - let x = 5; - match x { - 0..10 => println!("single digit"), - 10 => println!("ten isn't part of the above range"), - _ => println!("nor is everything else.") - } -``` -"##, - }, - Lint { label: "exclusive_wrapper", description: r##"# `exclusive_wrapper` @@ -4920,6 +4976,17 @@ The tracking issue for this feature is: [#95228] "##, }, Lint { + label: "expr_fragment_specifier_2024", + description: r##"# `expr_fragment_specifier_2024` + +The tracking issue for this feature is: [#123742] + +[#123742]: https://github.com/rust-lang/rust/issues/123742 + +------------------------ +"##, + }, + Lint { label: "extend_one", description: r##"# `extend_one` @@ -4967,6 +5034,32 @@ The tracking issue for this feature is: [#43244] "##, }, Lint { + label: "f128", + description: r##"# `f128` + +The tracking issue for this feature is: [#116909] + +[#116909]: https://github.com/rust-lang/rust/issues/116909 + +--- + +Enable the `f128` type for IEEE 128-bit floating numbers (quad precision). +"##, + }, + Lint { + label: "f16", + description: r##"# `f16` + +The tracking issue for this feature is: [#116909] + +[#116909]: https://github.com/rust-lang/rust/issues/116909 + +--- + +Enable the `f16` type for IEEE 16-bit floating numbers (half precision). +"##, + }, + Lint { label: "fd", description: r##"# `fd` @@ -5249,12 +5342,23 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "fs_try_exists", - description: r##"# `fs_try_exists` + label: "freeze", + description: r##"# `freeze` + +The tracking issue for this feature is: [#121675] + +[#121675]: https://github.com/rust-lang/rust/issues/121675 + +------------------------ +"##, + }, + Lint { + label: "freeze_impls", + description: r##"# `freeze_impls` -The tracking issue for this feature is: [#83186] +The tracking issue for this feature is: [#121675] -[#83186]: https://github.com/rust-lang/rust/issues/83186 +[#121675]: https://github.com/rust-lang/rust/issues/121675 ------------------------ "##, @@ -5368,17 +5472,6 @@ The tracking issue for this feature is: [#113521] "##, }, Lint { - label: "generic_nonzero", - description: r##"# `generic_nonzero` - -The tracking issue for this feature is: [#120257] - -[#120257]: https://github.com/rust-lang/rust/issues/120257 - ------------------------- -"##, - }, - Lint { label: "get_many_mut", description: r##"# `get_many_mut` @@ -5401,11 +5494,22 @@ The tracking issue for this feature is: [#63292] "##, }, Lint { + label: "global_registration", + description: r##"# `global_registration` + +The tracking issue for this feature is: [#125119] + +[#125119]: https://github.com/rust-lang/rust/issues/125119 + +------------------------ +"##, + }, + Lint { label: "half_open_range_patterns_in_slices", description: r##"# `half_open_range_patterns_in_slices` The tracking issue for this feature is: [#67264] -It is part of the `exclusive_range_pattern` feature, +It is a future part of the `exclusive_range_pattern` feature, tracked at [#37854]. [#67264]: https://github.com/rust-lang/rust/issues/67264 @@ -5416,7 +5520,6 @@ This feature allow using top-level half-open range patterns in slices. ```rust #![feature(half_open_range_patterns_in_slices)] -#![feature(exclusive_range_pattern)] fn main() { let xs = [13, 1, 5, 2, 3, 1, 21, 8]; @@ -5499,17 +5602,6 @@ The tracking issue for this feature is: [#44839] "##, }, Lint { - label: "hint_assert_unchecked", - description: r##"# `hint_assert_unchecked` - -The tracking issue for this feature is: [#119131] - -[#119131]: https://github.com/rust-lang/rust/issues/119131 - ------------------------- -"##, - }, - Lint { label: "hint_must_use", description: r##"# `hint_must_use` @@ -5554,17 +5646,6 @@ The tracking issue for this feature is: [#99697] "##, }, Lint { - label: "imported_main", - description: r##"# `imported_main` - -The tracking issue for this feature is: [#28937] - -[#28937]: https://github.com/rust-lang/rust/issues/28937 - ------------------------- -"##, - }, - Lint { label: "inherent_associated_types", description: r##"# `inherent_associated_types` @@ -5576,49 +5657,11 @@ The tracking issue for this feature is: [#8995] "##, }, Lint { - label: "inline_const", - description: r##"# `inline_const` - -The tracking issue for this feature is: [#76001] - -See also [`inline_const_pat`](inline-const-pat.md) - ------- - -This feature allows you to use inline constant expressions. For example, you can -turn this code: - -```rust -# fn add_one(x: i32) -> i32 { x + 1 } -const MY_COMPUTATION: i32 = 1 + 2 * 3 / 4; - -fn main() { - let x = add_one(MY_COMPUTATION); -} -``` - -into this code: - -```rust -#![feature(inline_const)] - -# fn add_one(x: i32) -> i32 { x + 1 } -fn main() { - let x = add_one(const { 1 + 2 * 3 / 4 }); -} -``` - -[#76001]: https://github.com/rust-lang/rust/issues/76001 -"##, - }, - Lint { label: "inline_const_pat", description: r##"# `inline_const_pat` The tracking issue for this feature is: [#76001] -See also [`inline_const`](inline-const.md) - ------ This feature allows you to use inline constant expressions in pattern position: @@ -5671,6 +5714,17 @@ The tracking issue for this feature is: [#99069] "##, }, Lint { + label: "integer_sign_cast", + description: r##"# `integer_sign_cast` + +The tracking issue for this feature is: [#125882] + +[#125882]: https://github.com/rust-lang/rust/issues/125882 + +------------------------ +"##, + }, + Lint { label: "internal_impls_macro", description: r##"# `internal_impls_macro` @@ -5729,7 +5783,7 @@ All intrinsic fallback bodies are automatically made cross-crate inlineable (lik by the codegen backend, but not the MIR inliner. ```rust -#![feature(rustc_attrs, effects)] +#![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -5739,7 +5793,7 @@ const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {} Since these are just regular functions, it is perfectly ok to create the intrinsic twice: ```rust -#![feature(rustc_attrs, effects)] +#![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -5763,12 +5817,23 @@ with any regular function. Various intrinsics have native MIR operations that they correspond to. Instead of requiring backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass will convert the calls to the MIR operation. Backends do not need to know about these intrinsics -at all. +at all. These intrinsics only make sense without a body, and can either be declared as a "rust-intrinsic" +or as a `#[rustc_intrinsic]`. The body is never used, as calls to the intrinsic do not exist +anymore after MIR analyses. ## Intrinsics without fallback logic These must be implemented by all backends. +### `#[rustc_intrinsic]` declarations + +These are written like intrinsics with fallback bodies, but the body is irrelevant. +Use `loop {}` for the body or call the intrinsic recursively and add +`#[rustc_intrinsic_must_be_overridden]` to the function to ensure that backends don't +invoke the body. + +### Legacy extern ABI based intrinsics + These are imported as if they were FFI functions, with the special `rust-intrinsic` ABI. For example, if one was in a freestanding context, but wished to be able to `transmute` between types, and @@ -5792,17 +5857,6 @@ You can add `#[rustc_safe_intrinsic]` to the intrinsic to make it safe to call. "##, }, Lint { - label: "io_error_downcast", - description: r##"# `io_error_downcast` - -The tracking issue for this feature is: [#99262] - -[#99262]: https://github.com/rust-lang/rust/issues/99262 - ------------------------- -"##, - }, - Lint { label: "io_error_more", description: r##"# `io_error_more` @@ -5845,23 +5899,23 @@ The tracking issue for this feature is: [#27709] "##, }, Lint { - label: "ip_bits", - description: r##"# `ip_bits` + label: "is_ascii_octdigit", + description: r##"# `is_ascii_octdigit` -The tracking issue for this feature is: [#113744] +The tracking issue for this feature is: [#101288] -[#113744]: https://github.com/rust-lang/rust/issues/113744 +[#101288]: https://github.com/rust-lang/rust/issues/101288 ------------------------ "##, }, Lint { - label: "is_ascii_octdigit", - description: r##"# `is_ascii_octdigit` + label: "is_none_or", + description: r##"# `is_none_or` -The tracking issue for this feature is: [#101288] +The tracking issue for this feature is: [#126383] -[#101288]: https://github.com/rust-lang/rust/issues/101288 +[#126383]: https://github.com/rust-lang/rust/issues/126383 ------------------------ "##, @@ -5935,6 +5989,17 @@ The tracking issue for this feature is: [#100450] "##, }, Lint { + label: "iter_chain", + description: r##"# `iter_chain` + +The tracking issue for this feature is: [#125964] + +[#125964]: https://github.com/rust-lang/rust/issues/125964 + +------------------------ +"##, + }, + Lint { label: "iter_collect_into", description: r##"# `iter_collect_into` @@ -6056,6 +6121,17 @@ The tracking issue for this feature is: [#87053] "##, }, Lint { + label: "junction_point", + description: r##"# `junction_point` + +The tracking issue for this feature is: [#121709] + +[#121709]: https://github.com/rust-lang/rust/issues/121709 + +------------------------ +"##, + }, + Lint { label: "lahfsahf_target_feature", description: r##"# `lahfsahf_target_feature` @@ -6208,23 +6284,12 @@ The tracking issue for this feature is: [#69835] "##, }, Lint { - label: "lazy_cell", - description: r##"# `lazy_cell` - -The tracking issue for this feature is: [#109736] - -[#109736]: https://github.com/rust-lang/rust/issues/109736 - ------------------------- -"##, - }, - Lint { label: "lazy_cell_consume", description: r##"# `lazy_cell_consume` -The tracking issue for this feature is: [#109736] +The tracking issue for this feature is: [#125623] -[#109736]: https://github.com/rust-lang/rust/issues/109736 +[#125623]: https://github.com/rust-lang/rust/issues/125623 ------------------------ "##, @@ -6368,17 +6433,6 @@ The tracking issue for this feature is: [#114135] "##, }, Lint { - label: "lint_reasons", - description: r##"# `lint_reasons` - -The tracking issue for this feature is: [#54503] - -[#54503]: https://github.com/rust-lang/rust/issues/54503 - ------------------------- -"##, - }, - Lint { label: "linux_pidfd", description: r##"# `linux_pidfd` @@ -6434,6 +6488,17 @@ The tracking issue for this feature is: [#83527] "##, }, Lint { + label: "macro_metavar_expr_concat", + description: r##"# `macro_metavar_expr_concat` + +The tracking issue for this feature is: [#124225] + +[#124225]: https://github.com/rust-lang/rust/issues/124225 + +------------------------ +"##, + }, + Lint { label: "map_entry_replace", description: r##"# `map_entry_replace` @@ -6539,6 +6604,17 @@ The tracking issue for this feature is: [#93092] "##, }, Lint { + label: "maybe_uninit_fill", + description: r##"# `maybe_uninit_fill` + +The tracking issue for this feature is: [#117428] + +[#117428]: https://github.com/rust-lang/rust/issues/117428 + +------------------------ +"##, + }, + Lint { label: "maybe_uninit_slice", description: r##"# `maybe_uninit_slice` @@ -6691,6 +6767,17 @@ The tracking issue for this feature is: [#83310] "##, }, Lint { + label: "mut_ref", + description: r##"# `mut_ref` + +The tracking issue for this feature is: [#123076] + +[#123076]: https://github.com/rust-lang/rust/issues/123076 + +------------------------ +"##, + }, + Lint { label: "naked_functions", description: r##"# `naked_functions` @@ -6838,6 +6925,17 @@ The tracking issue for this feature is: [#65992] "##, }, Lint { + label: "new_range_api", + description: r##"# `new_range_api` + +The tracking issue for this feature is: [#125687] + +[#125687]: https://github.com/rust-lang/rust/issues/125687 + +------------------------ +"##, + }, + Lint { label: "new_uninit", description: r##"# `new_uninit` @@ -6915,17 +7013,6 @@ The tracking issue for this feature is: [#108185] "##, }, Lint { - label: "non_null_convenience", - description: r##"# `non_null_convenience` - -The tracking issue for this feature is: [#117691] - -[#117691]: https://github.com/rust-lang/rust/issues/117691 - ------------------------- -"##, - }, - Lint { label: "non_zero_count_ones", description: r##"# `non_zero_count_ones` @@ -7032,6 +7119,17 @@ The tracking issue for this feature is: [#120140] "##, }, Lint { + label: "offset_of_slice", + description: r##"# `offset_of_slice` + +The tracking issue for this feature is: [#126151] + +[#126151]: https://github.com/rust-lang/rust/issues/126151 + +------------------------ +"##, + }, + Lint { label: "omit_gdb_pretty_printer_section", description: r##"# `omit_gdb_pretty_printer_section` @@ -7041,6 +7139,17 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { + label: "once_cell_get_mut", + description: r##"# `once_cell_get_mut` + +The tracking issue for this feature is: [#121641] + +[#121641]: https://github.com/rust-lang/rust/issues/121641 + +------------------------ +"##, + }, + Lint { label: "once_cell_try", description: r##"# `once_cell_try` @@ -7096,17 +7205,6 @@ The tracking issue for this feature is: [#82901] "##, }, Lint { - label: "option_take_if", - description: r##"# `option_take_if` - -The tracking issue for this feature is: [#98934] - -[#98934]: https://github.com/rust-lang/rust/issues/98934 - ------------------------- -"##, - }, - Lint { label: "option_zip", description: r##"# `option_zip` @@ -7140,6 +7238,17 @@ The tracking issue for this feature is: [#118485] "##, }, Lint { + label: "os_string_pathbuf_leak", + description: r##"# `os_string_pathbuf_leak` + +The tracking issue for this feature is: [#125965] + +[#125965]: https://github.com/rust-lang/rust/issues/125965 + +------------------------ +"##, + }, + Lint { label: "panic_abort", description: r##"# `panic_abort` @@ -7184,21 +7293,21 @@ The tracking issue for this feature is: [#92988] "##, }, Lint { - label: "panic_info_message", - description: r##"# `panic_info_message` - -The tracking issue for this feature is: [#66745] + label: "panic_internals", + description: r##"# `panic_internals` -[#66745]: https://github.com/rust-lang/rust/issues/66745 +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. ------------------------ "##, }, Lint { - label: "panic_internals", - description: r##"# `panic_internals` + label: "panic_payload_as_str", + description: r##"# `panic_payload_as_str` -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +The tracking issue for this feature is: [#125175] + +[#125175]: https://github.com/rust-lang/rust/issues/125175 ------------------------ "##, @@ -7237,6 +7346,28 @@ The tracking issue for this feature is: [#92649] "##, }, Lint { + label: "patchable_function_entry", + description: r##"# `patchable_function_entry` + +The tracking issue for this feature is: [#123115] + +[#123115]: https://github.com/rust-lang/rust/issues/123115 + +------------------------ +"##, + }, + Lint { + label: "path_add_extension", + description: r##"# `path_add_extension` + +The tracking issue for this feature is: [#127292] + +[#127292]: https://github.com/rust-lang/rust/issues/127292 + +------------------------ +"##, + }, + Lint { label: "path_file_prefix", description: r##"# `path_file_prefix` @@ -7268,6 +7399,17 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { + label: "pattern_types", + description: r##"# `pattern_types` + +The tracking issue for this feature is: [#123646] + +[#123646]: https://github.com/rust-lang/rust/issues/123646 + +------------------------ +"##, + }, + Lint { label: "peer_credentials_unix_socket", description: r##"# `peer_credentials_unix_socket` @@ -7290,8 +7432,8 @@ The tracking issue for this feature is: [#86918] "##, }, Lint { - label: "pointer_is_aligned", - description: r##"# `pointer_is_aligned` + label: "pointer_is_aligned_to", + description: r##"# `pointer_is_aligned_to` The tracking issue for this feature is: [#96284] @@ -7321,6 +7463,32 @@ The tracking issue for this feature is: [#86656] "##, }, Lint { + label: "postfix_match", + description: r##"# `postfix-match` + +`postfix-match` adds the feature for matching upon values postfix +the expressions that generate the values. + +```rust,edition2021 +#![feature(postfix_match)] + +enum Foo { + Bar, + Baz +} + +fn get_foo() -> Foo { + Foo::Bar +} + +get_foo().match { + Foo::Bar => {}, + Foo::Baz => panic!(), +} +``` +"##, + }, + Lint { label: "powerpc_target_feature", description: r##"# `powerpc_target_feature` @@ -7332,6 +7500,17 @@ The tracking issue for this feature is: [#44839] "##, }, Lint { + label: "precise_capturing", + description: r##"# `precise_capturing` + +The tracking issue for this feature is: [#123432] + +[#123432]: https://github.com/rust-lang/rust/issues/123432 + +------------------------ +"##, + }, + Lint { label: "prelude_2024", description: r##"# `prelude_2024` @@ -7372,28 +7551,6 @@ This feature is internal to the Rust compiler and is not intended for general us "##, }, Lint { - label: "proc_macro_byte_character", - description: r##"# `proc_macro_byte_character` - -The tracking issue for this feature is: [#115268] - -[#115268]: https://github.com/rust-lang/rust/issues/115268 - ------------------------- -"##, - }, - Lint { - label: "proc_macro_c_str_literals", - description: r##"# `proc_macro_c_str_literals` - -The tracking issue for this feature is: [#119750] - -[#119750]: https://github.com/rust-lang/rust/issues/119750 - ------------------------- -"##, - }, - Lint { label: "proc_macro_def_site", description: r##"# `proc_macro_def_site` @@ -7529,6 +7686,17 @@ The tracking issue for this feature is: [#102070] "##, }, Lint { + label: "ptr_as_ref_unchecked", + description: r##"# `ptr_as_ref_unchecked` + +The tracking issue for this feature is: [#122034] + +[#122034]: https://github.com/rust-lang/rust/issues/122034 + +------------------------ +"##, + }, + Lint { label: "ptr_as_uninit", description: r##"# `ptr_as_uninit` @@ -7582,17 +7750,6 @@ The tracking issue for this feature is: [#95892] "##, }, Lint { - label: "ptr_to_from_bits", - description: r##"# `ptr_to_from_bits` - -The tracking issue for this feature is: [#91126] - -[#91126]: https://github.com/rust-lang/rust/issues/91126 - ------------------------- -"##, - }, - Lint { label: "pub_crate_should_not_need_unstable_attr", description: r##"# `pub_crate_should_not_need_unstable_attr` @@ -7686,6 +7843,28 @@ The tracking issue for this feature is: [#121440] "##, }, Lint { + label: "ref_pat_eat_one_layer_2024", + description: r##"# `ref_pat_eat_one_layer_2024` + +The tracking issue for this feature is: [#123076] + +[#123076]: https://github.com/rust-lang/rust/issues/123076 + +------------------------ +"##, + }, + Lint { + label: "ref_pat_eat_one_layer_2024_structural", + description: r##"# `ref_pat_eat_one_layer_2024_structural` + +The tracking issue for this feature is: [#123076] + +[#123076]: https://github.com/rust-lang/rust/issues/123076 + +------------------------ +"##, + }, + Lint { label: "register_tool", description: r##"# `register_tool` @@ -7739,6 +7918,24 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { + label: "result_ffi_guarantees", + description: r##"# `result_ffi_guarantees` + +The tracking issue for this feature is: [#110503] + +[#110503]: https://github.com/rust-lang/rust/issues/110503 + +------------------------ + +This feature adds the possibility of using `Result<T, E>` in FFI if T's niche +value can be used to describe E or vise-versa. + +See [RFC 3391] for more information. + +[RFC 3391]: https://github.com/rust-lang/rfcs/blob/master/text/3391-result_ffi_guarantees.md +"##, + }, + Lint { label: "result_flattening", description: r##"# `result_flattening` @@ -7882,6 +8079,15 @@ error: aborting due to 2 previous errors "##, }, Lint { + label: "rustc_encodable_decodable", + description: r##"# `rustc_encodable_decodable` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ +"##, + }, + Lint { label: "rustc_private", description: r##"# `rustc_private` @@ -7924,17 +8130,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "seek_seek_relative", - description: r##"# `seek_seek_relative` - -The tracking issue for this feature is: [#117374] - -[#117374]: https://github.com/rust-lang/rust/issues/117374 - ------------------------- -"##, - }, - Lint { label: "seek_stream_len", description: r##"# `seek_stream_len` @@ -7979,6 +8174,17 @@ The tracking issue for this feature is: [#56975] "##, }, Lint { + label: "shorter_tail_lifetimes", + description: r##"# `shorter_tail_lifetimes` + +The tracking issue for this feature is: [#123739] + +[#123739]: https://github.com/rust-lang/rust/issues/123739 + +------------------------ +"##, + }, + Lint { label: "simd_ffi", description: r##"# `simd_ffi` @@ -8032,17 +8238,6 @@ The tracking issue for this feature is: [#27747] "##, }, Lint { - label: "slice_flatten", - description: r##"# `slice_flatten` - -The tracking issue for this feature is: [#95629] - -[#95629]: https://github.com/rust-lang/rust/issues/95629 - ------------------------- -"##, - }, - Lint { label: "slice_from_ptr_range", description: r##"# `slice_from_ptr_range` @@ -8116,17 +8311,6 @@ The tracking issue for this feature is: [#74265] "##, }, Lint { - label: "slice_ptr_len", - description: r##"# `slice_ptr_len` - -The tracking issue for this feature is: [#71146] - -[#71146]: https://github.com/rust-lang/rust/issues/71146 - ------------------------- -"##, - }, - Lint { label: "slice_range", description: r##"# `slice_range` @@ -8138,17 +8322,6 @@ The tracking issue for this feature is: [#76393] "##, }, Lint { - label: "slice_split_at_unchecked", - description: r##"# `slice_split_at_unchecked` - -The tracking issue for this feature is: [#76014] - -[#76014]: https://github.com/rust-lang/rust/issues/76014 - ------------------------- -"##, - }, - Lint { label: "slice_split_once", description: r##"# `slice_split_once` @@ -8202,24 +8375,6 @@ The tracking issue for this feature is: [#93396] "##, }, Lint { - label: "sort_internals", - description: r##"# `sort_internals` - -This feature is internal to the Rust compiler and is not intended for general use. - ------------------------- -"##, - }, - Lint { - label: "spec_option_partial_eq", - description: r##"# `spec_option_partial_eq` - -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. - ------------------------- -"##, - }, - Lint { label: "specialization", description: r##"# `specialization` @@ -8253,17 +8408,6 @@ The tracking issue for this feature is: [#96137] "##, }, Lint { - label: "split_at_checked", - description: r##"# `split_at_checked` - -The tracking issue for this feature is: [#119128] - -[#119128]: https://github.com/rust-lang/rust/issues/119128 - ------------------------- -"##, - }, - Lint { label: "sse4a_target_feature", description: r##"# `sse4a_target_feature` @@ -8696,9 +8840,9 @@ The tracking issue for this feature is: [#96256] label: "tcplistener_into_incoming", description: r##"# `tcplistener_into_incoming` -The tracking issue for this feature is: [#88339] +The tracking issue for this feature is: [#88373] -[#88339]: https://github.com/rust-lang/rust/issues/88339 +[#88373]: https://github.com/rust-lang/rust/issues/88373 ------------------------ "##, @@ -9322,6 +9466,17 @@ The tracking issue for this feature is: [#96374] "##, }, Lint { + label: "try_with_capacity", + description: r##"# `try_with_capacity` + +The tracking issue for this feature is: [#91913] + +[#91913]: https://github.com/rust-lang/rust/issues/91913 + +------------------------ +"##, + }, + Lint { label: "tuple_trait", description: r##"# `tuple_trait` @@ -9390,12 +9545,10 @@ fn main () { "##, }, Lint { - label: "type_privacy_lints", - description: r##"# `type_privacy_lints` - -The tracking issue for this feature is: [#48054] + label: "ub_checks", + description: r##"# `ub_checks` -[#48054]: https://github.com/rust-lang/rust/issues/48054 +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. ------------------------ "##, @@ -9441,17 +9594,6 @@ fn main() {} "##, }, Lint { - label: "unchecked_math", - description: r##"# `unchecked_math` - -The tracking issue for this feature is: [#85122] - -[#85122]: https://github.com/rust-lang/rust/issues/85122 - ------------------------- -"##, - }, - Lint { label: "unchecked_neg", description: r##"# `unchecked_neg` @@ -9516,72 +9658,6 @@ The tracking issue for this feature is: [#96467] "##, }, Lint { - label: "unix_sigpipe", - description: r##"# `unix_sigpipe` - -The tracking issue for this feature is: [#97889] - -[#97889]: https://github.com/rust-lang/rust/issues/97889 - ---- - -The `#[unix_sigpipe = "..."]` attribute on `fn main()` can be used to specify how libstd shall setup `SIGPIPE` on Unix platforms before invoking `fn main()`. This attribute is ignored on non-Unix targets. There are three variants: -* `#[unix_sigpipe = "inherit"]` -* `#[unix_sigpipe = "sig_dfl"]` -* `#[unix_sigpipe = "sig_ign"]` - -## `#[unix_sigpipe = "inherit"]` - -Leave `SIGPIPE` untouched before entering `fn main()`. Unless the parent process has changed the default `SIGPIPE` handler from `SIG_DFL` to something else, this will behave the same as `#[unix_sigpipe = "sig_dfl"]`. - -## `#[unix_sigpipe = "sig_dfl"]` - -Set the `SIGPIPE` handler to `SIG_DFL`. This will result in your program getting killed if it tries to write to a closed pipe. This is normally what you want if your program produces textual output. - -### Example - -```rust,no_run -#![feature(unix_sigpipe)] -#[unix_sigpipe = "sig_dfl"] -fn main() { loop { println!("hello world"); } } -``` - -```bash -% ./main | head -n 1 -hello world -``` - -## `#[unix_sigpipe = "sig_ign"]` - -Set the `SIGPIPE` handler to `SIG_IGN` before invoking `fn main()`. This will result in `ErrorKind::BrokenPipe` errors if you program tries to write to a closed pipe. This is normally what you want if you for example write socket servers, socket clients, or pipe peers. - -This is what libstd has done by default since 2014. (However, see the note on child processes below.) - -### Example - -```rust,no_run -#![feature(unix_sigpipe)] -#[unix_sigpipe = "sig_ign"] -fn main() { loop { println!("hello world"); } } -``` - -```bash -% ./main | head -n 1 -hello world -thread 'main' panicked at 'failed printing to stdout: Broken pipe (os error 32)', library/std/src/io/stdio.rs:1016:9 -note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace -``` - -### Note on child processes - -When spawning child processes, the legacy Rust behavior if `#[unix_sigpipe]` is not specified is to -reset `SIGPIPE` to `SIG_DFL`. - -If `#[unix_sigpipe = "..."]` is specified, no matter what its value is, the signal disposition of -`SIGPIPE` is no longer reset. This means that the child inherits the parent's `SIGPIPE` behavior. -"##, - }, - Lint { label: "unix_socket_ancillary_data", description: r##"# `unix_socket_ancillary_data` @@ -9615,6 +9691,17 @@ The tracking issue for this feature is: [#49804] "##, }, Lint { + label: "unsafe_attributes", + description: r##"# `unsafe_attributes` + +The tracking issue for this feature is: [#123757] + +[#123757]: https://github.com/rust-lang/rust/issues/123757 + +------------------------ +"##, + }, + Lint { label: "unsafe_cell_from_mut", description: r##"# `unsafe_cell_from_mut` @@ -9626,6 +9713,17 @@ The tracking issue for this feature is: [#111645] "##, }, Lint { + label: "unsafe_extern_blocks", + description: r##"# `unsafe_extern_blocks` + +The tracking issue for this feature is: [#123743] + +[#123743]: https://github.com/rust-lang/rust/issues/123743 + +------------------------ +"##, + }, + Lint { label: "unsafe_pin_internals", description: r##"# `unsafe_pin_internals` @@ -9920,17 +10018,6 @@ The tracking issue for this feature is: [#94919] "##, }, Lint { - label: "utf8_chunks", - description: r##"# `utf8_chunks` - -The tracking issue for this feature is: [#99543] - -[#99543]: https://github.com/rust-lang/rust/issues/99543 - ------------------------- -"##, - }, - Lint { label: "variant_count", description: r##"# `variant_count` @@ -9953,6 +10040,17 @@ The tracking issue for this feature is: [#65816] "##, }, Lint { + label: "vec_pop_if", + description: r##"# `vec_pop_if` + +The tracking issue for this feature is: [#122741] + +[#122741]: https://github.com/rust-lang/rust/issues/122741 + +------------------------ +"##, + }, + Lint { label: "vec_push_within_capacity", description: r##"# `vec_push_within_capacity` @@ -10224,18 +10322,12 @@ checked."##, description: r##"Checks for usage of the `#[allow]` attribute and suggests replacing it with the `#[expect]` (See [RFC 2383](https://rust-lang.github.io/rfcs/2383-lint-reasons.html)) -The expect attribute is still unstable and requires the `lint_reasons` -on nightly. It can be enabled by adding `#![feature(lint_reasons)]` to -the crate root. - This lint only warns outer attributes (`#[allow]`), as inner attributes (`#![allow]`) are usually used to enable or disable lints on a global scale."##, }, Lint { label: "clippy::allow_attributes_without_reason", - description: r##"Checks for attributes that allow lints without a reason. - -(This requires the `lint_reasons` feature)"##, + description: r##"Checks for attributes that allow lints without a reason."##, }, Lint { label: "clippy::almost_complete_range", @@ -10309,6 +10401,10 @@ patterns."##, description: r##"Nothing. This lint has been deprecated."##, }, Lint { + label: "clippy::assigning_clones", + description: r##"Checks for code like `foo = bar.clone();`"##, + }, + Lint { label: "clippy::async_yields_async", description: r##"Checks for async blocks that yield values of types that can themselves be awaited."##, @@ -10401,8 +10497,8 @@ Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) fo }, Lint { label: "clippy::box_default", - description: r##"checks for `Box::new(T::default())`, which is better written as -`Box::<T>::default()`."##, + description: r##"checks for `Box::new(Default::default())`, which can be written as +`Box::default()`."##, }, Lint { label: "clippy::boxed_local", @@ -10419,6 +10515,11 @@ moved out of the blocks."##, description: r##"Warns if a generic shadows a built-in type."##, }, Lint { + label: "clippy::byte_char_slices", + description: r##"Checks for hard to read slices of byte characters, that could be more easily expressed as a +byte string."##, + }, + Lint { label: "clippy::bytes_count_to_len", description: r##"It checks for `str::bytes().count()` and suggests replacing it with `str::len()`."##, @@ -10507,6 +10608,10 @@ defined, this lint is `Allow` by default."##, description: r##"Checks for a raw slice being cast to a slice pointer"##, }, Lint { + label: "clippy::cfg_not_test", + description: r##"Checks for usage of `cfg` that excludes code from `test` builds. (i.e., `#{cfg(not(test))]`)"##, + }, + Lint { label: "clippy::char_lit_as_u8", description: r##"Checks for expressions where a character literal is cast to `u8` and suggests using a byte literal instead."##, @@ -10595,6 +10700,10 @@ rewritten with `match` and `cmp`."##, and suggests using `.is_empty()` where applicable."##, }, Lint { + label: "clippy::const_is_empty", + description: r##"It identifies calls to `.is_empty()` on constant values."##, + }, + Lint { label: "clippy::copy_iterator", description: r##"Checks for types that implement `Copy` as well as `Iterator`."##, @@ -10748,6 +10857,13 @@ types are defined in the clippy.toml file."##, statements."##, }, Lint { + label: "clippy::doc_lazy_continuation", + description: r##"In CommonMark Markdown, the language used to write doc comments, a +paragraph nested within a list or block quote does not need any line +after the first one to be indented or marked. The specification calls +this a lazy paragraph continuation."##, + }, + Lint { label: "clippy::doc_link_with_quotes", description: r##"Detects the syntax `['foo']` in documentation comments (notice quotes instead of backticks) outside of code blocks"##, @@ -10796,6 +10912,10 @@ marked as `#[must_use]`."##, differing by an underscore."##, }, Lint { + label: "clippy::duplicated_attributes", + description: r##"Checks for attributes that appear two or more times."##, + }, + Lint { label: "clippy::duration_subsec", description: r##"Checks for calculation of subsecond microseconds or milliseconds from other `Duration` methods."##, @@ -10817,11 +10937,11 @@ but without a final `else` branch."##, }, Lint { label: "clippy::empty_enum", - description: r##"Checks for `enum`s with no variants. + description: r##"Checks for `enum`s with no variants, which therefore are uninhabited types +(cannot be instantiated). -As of this writing, the `never_type` is still a -nightly-only experimental API. Therefore, this lint is only triggered -if the `never_type` is enabled."##, +As of this writing, the `never_type` is still a nightly-only experimental API. +Therefore, this lint is only triggered if `#![feature(never_type)]` is enabled."##, }, Lint { label: "clippy::empty_enum_variants_with_brackets", @@ -10890,7 +11010,7 @@ than that supported by the underlying type."##, }, Lint { label: "clippy::exhaustive_structs", - description: r##"Warns on any exported `structs`s that are not tagged `#[non_exhaustive]`"##, + description: r##"Warns on any exported `struct`s that are not tagged `#[non_exhaustive]`"##, }, Lint { label: "clippy::exit", @@ -10965,6 +11085,11 @@ anywhere else."##, with Default::default()."##, }, Lint { + label: "clippy::field_scoped_visibility_modifiers", + description: r##"Checks for usage of scoped visibility modifiers, like `pub(crate)`, on fields. These +make a field visible within a scope between public and private."##, + }, + Lint { label: "clippy::filetype_is_file", description: r##"Checks for `FileType::is_file()`."##, }, @@ -11297,6 +11422,11 @@ unless the annotated function is empty or simply panics."##, }, Lint { label: "clippy::integer_division", description: r##"Checks for division of integers"## }, Lint { + label: "clippy::integer_division_remainder_used", + description: r##"Checks for the usage of division (`/`) and remainder (`%`) operations +when performed on any integer types using the default `Div` and `Rem` trait implementations."##, + }, + Lint { label: "clippy::into_iter_on_ref", description: r##"Checks for `into_iter` calls on references which should be replaced by `iter` or `iter_mut`."##, @@ -11353,12 +11483,12 @@ create a `Vec`."##, Lint { label: "clippy::iter_filter_is_ok", description: r##"Checks for usage of `.filter(Result::is_ok)` that may be replaced with a `.flatten()` call. -This lint will require additional changes to the follow-up calls as it appects the type."##, +This lint will require additional changes to the follow-up calls as it affects the type."##, }, Lint { label: "clippy::iter_filter_is_some", description: r##"Checks for usage of `.filter(Option::is_some)` that may be replaced with a `.flatten()` call. -This lint will require additional changes to the follow-up calls as it appects the type."##, +This lint will require additional changes to the follow-up calls as it affects the type."##, }, Lint { label: "clippy::iter_kv_map", @@ -11376,8 +11506,8 @@ ignoring either the keys or values."##, }, Lint { label: "clippy::iter_nth", - description: r##"Checks for usage of `.iter().nth()` (and the related -`.iter_mut().nth()`) on standard library types with *O*(1) element access."##, + description: r##"Checks for usage of `.iter().nth()`/`.iter_mut().nth()` on standard library types that have +equivalent `.get()`/`.get_mut()` methods."##, }, Lint { label: "clippy::iter_nth_zero", @@ -11456,7 +11586,7 @@ are too large."##, Lint { label: "clippy::large_include_file", description: r##"Checks for the inclusion of large files via `include_bytes!()` -and `include_str!()`"##, +or `include_str!()`."##, }, Lint { label: "clippy::large_stack_arrays", @@ -11481,6 +11611,11 @@ because that might induce API breakage, if the parameter is declared as mutable, or if the argument is a `self`."##, }, Lint { + label: "clippy::legacy_numeric_constants", + description: r##"Checks for usage of `<integer>::max_value()`, `std::<integer>::MAX`, +`std::<float>::EPSILON`, etc."##, + }, + Lint { label: "clippy::len_without_is_empty", description: r##"Checks for items that implement `.len()` but not `.is_empty()`."##, @@ -11547,6 +11682,10 @@ is resolved."##, cannot be represented as the underlying type without loss."##, }, Lint { + label: "clippy::macro_metavars_in_unsafe", + description: r##"Looks for macros that expand metavariables in an unsafe block."##, + }, + Lint { label: "clippy::macro_use_imports", description: r##"Checks for `#[macro_use] use...`."##, }, @@ -11567,7 +11706,12 @@ cannot be represented as the underlying type without loss."##, description: r##"Checks for usage of `std::mem::size_of::<T>() * 8` when `T::BITS` is available."##, }, - Lint { label: "clippy::manual_c_str_literals", description: r##""## }, + Lint { + label: "clippy::manual_c_str_literals", + description: r##"Checks for the manual creation of C strings (a string with a `NUL` byte at the end), either +through one of the `CStr` constructor functions, or more plainly by calling `.as_ptr()` +on a (byte) string literal with a hardcoded `\\0` byte at the end."##, + }, Lint { label: "clippy::manual_clamp", description: r##"Identifies good opportunities for a clamp function from std or core, and suggests using it."##, @@ -11602,6 +11746,10 @@ where only the `Some` or `Ok` variant of the iterator element is used."##, [`BuildHasher::hash_one`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html#method.hash_one"##, }, Lint { + label: "clippy::manual_inspect", + description: r##"Checks for uses of `map` which return the original item."##, + }, + Lint { label: "clippy::manual_instant_elapsed", description: r##"Lints subtraction between `Instant::now()` and another `Instant`."##, }, @@ -11621,7 +11769,10 @@ ascii range"##, description: r##"Checks for manual `is_infinite` reimplementations (i.e., `x == <float>::INFINITY || x == <float>::NEG_INFINITY`)."##, }, - Lint { label: "clippy::manual_is_variant_and", description: r##""## }, + Lint { + label: "clippy::manual_is_variant_and", + description: r##"Checks for usage of `option.map(f).unwrap_or_default()` and `result.map(f).unwrap_or_default()` where f is a function or closure that returns the `bool` type."##, + }, Lint { label: "clippy::manual_let_else", description: r##"Warn of cases where `let...else` could be used"##, @@ -11653,6 +11804,10 @@ slices that could be optimized by having a memcpy."##, description: r##"Finds patterns that reimplement `Option::ok_or`."##, }, Lint { + label: "clippy::manual_pattern_char_comparison", + description: r##"Checks for manual `char` comparison in string patterns"##, + }, + Lint { label: "clippy::manual_range_contains", description: r##"Checks for expressions like `x >= 3 && x < 8` that could be more readably expressed as `(3..8).contains(x)`."##, @@ -11672,6 +11827,11 @@ of `x.rem_euclid(4)`."##, description: r##"Checks for code to be replaced by `.retain()`."##, }, Lint { + label: "clippy::manual_rotate", + description: r##"It detects manual bit rotations that could be rewritten using standard +functions `rotate_left` or `rotate_right`."##, + }, + Lint { label: "clippy::manual_saturating_arithmetic", description: r##"Checks for `.checked_add/sub(x).unwrap_or(MAX/MIN)`."##, }, @@ -11713,6 +11873,11 @@ Note that the lint will not be emitted in const blocks, as the suggestion would description: r##"Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`."##, }, Lint { + label: "clippy::manual_unwrap_or_default", + description: r##"Checks if a `match` or `if let` expression can be simplified using +`.unwrap_or_default()`."##, + }, + Lint { label: "clippy::manual_while_let_some", description: r##"Looks for loops that check for emptiness of a `Vec` in the condition and pop an element in the body as a separate operation."##, @@ -11817,10 +11982,7 @@ and take drastic actions like `panic!`."##, }, Lint { label: "clippy::maybe_misused_cfg", - description: r##"Checks for `#[cfg(features = ...)]` and suggests to replace it with -`#[cfg(feature = ...)]`. - -It also checks if `cfg(test)` was misspelled."##, + description: r##"Nothing. This lint has been deprecated."##, }, Lint { label: "clippy::mem_forget", @@ -11844,7 +12006,7 @@ and `mem::replace(&mut _, mem::zeroed())`."##, }, Lint { label: "clippy::min_ident_chars", - description: r##"Checks for idents which comprise of a single letter. + description: r##"Checks for identifiers which consist of a single character (or fewer than the configured threshold). Note: This lint can be very noisy when enabled; it may be desirable to only enable it temporarily."##, @@ -11860,7 +12022,7 @@ used to clamp values, but switched so that the result is constant."##, }, Lint { label: "clippy::mismatched_target_os", - description: r##"Checks for cfg attributes having operating systems used in target family position."##, + description: r##"Nothing. This lint has been deprecated."##, }, Lint { label: "clippy::mismatching_type_param_order", @@ -11892,8 +12054,12 @@ is greater than the largest index used to index into the slice."##, description: r##"Suggests the use of `const` in functions and methods where possible."##, }, Lint { + label: "clippy::missing_const_for_thread_local", + description: r##"Suggests to use `const` in `thread_local!` macro if possible."##, + }, + Lint { label: "clippy::missing_docs_in_private_items", - description: r##"Warns if there is missing doc for any private documentable item"##, + description: r##"Warns if there is missing documentation for any private documentable item."##, }, Lint { label: "clippy::missing_enforced_import_renames", @@ -11931,12 +12097,11 @@ unsafe functions and warns if there is no `# Safety` section."##, Lint { label: "clippy::missing_trait_methods", description: r##"Checks if a provided method is used implicitly by a trait -implementation. A usage example would be a wrapper where every method -should perform some operation before delegating to the inner type's -implementation. - -This lint should typically be enabled on a specific trait `impl` item -rather than globally."##, +implementation."##, + }, + Lint { + label: "clippy::missing_transmute_annotations", + description: r##"Checks if transmute calls have all generics specified."##, }, Lint { label: "clippy::mistyped_literal_suffixes", @@ -11944,7 +12109,7 @@ rather than globally."##, }, Lint { label: "clippy::mixed_attributes_style", - description: r##"Checks that an item has only one kind of attributes."##, + description: r##"Checks for items that have the same kind of attributes with mixed styles (inner/outer)."##, }, Lint { label: "clippy::mixed_case_hex_literals", @@ -11959,7 +12124,7 @@ order of sub-expressions."##, }, Lint { label: "clippy::mod_module_files", - description: r##"Checks that module layout uses only self named module files, bans `mod.rs` files."##, + description: r##"Checks that module layout uses only self named module files; bans `mod.rs` files."##, }, Lint { label: "clippy::module_inception", @@ -12025,7 +12190,7 @@ reference with the output lifetime, this lint will not trigger."##, }, Lint { label: "clippy::mut_range_bound", - description: r##"Checks for loops which have a range bound that is a mutable variable"##, + description: r##"Checks for loops with a range bound that is a mutable variable."##, }, Lint { label: "clippy::mutable_key_type", @@ -12074,10 +12239,14 @@ value with `&ref`."##, }, Lint { label: "clippy::needless_borrows_for_generic_args", - description: r##"Checks for borrow operations (`&`) that used as a generic argument to a + description: r##"Checks for borrow operations (`&`) that are used as a generic argument to a function when the borrowed value could be used."##, }, Lint { + label: "clippy::needless_character_iteration", + description: r##"Checks if an iterator is used to check if a string is ascii."##, + }, + Lint { label: "clippy::needless_collect", description: r##"Checks for functions collecting an iterator when collect is not needed."##, @@ -12119,6 +12288,10 @@ relying on lifetime elision."##, when function signatures are the same."##, }, Lint { + label: "clippy::needless_maybe_sized", + description: r##"Lints `?Sized` bounds applied to type parameters that cannot be unsized"##, + }, + Lint { label: "clippy::needless_option_as_deref", description: r##"Checks for no-op uses of `Option::{as_deref, as_deref_mut}`, for example, `Option<&T>::as_deref()` returns the same type."##, @@ -12137,7 +12310,7 @@ superfluous."##, description: r##"Check if a `&mut` function argument is actually used mutably. Be careful if the function is publicly reexported as it would break compatibility with -users of this function."##, +users of this function, when the users pass this function as an argument."##, }, Lint { label: "clippy::needless_pass_by_value", @@ -12369,7 +12542,7 @@ can be eliminated."##, Lint { label: "clippy::panic", description: r##"Checks for usage of `panic!`."## }, Lint { label: "clippy::panic_in_result_fn", - description: r##"Checks for usage of `panic!` or assertions in a function of type result."##, + description: r##"Checks for usage of `panic!` or assertions in a function whose return type is `Result`."##, }, Lint { label: "clippy::panicking_unwrap", @@ -12377,7 +12550,7 @@ can be eliminated."##, }, Lint { label: "clippy::partial_pub_fields", - description: r##"Checks whether partial fields of a struct are public. + description: r##"Checks whether some but not all fields of a `struct` are public. Either make all fields of a type public, or make none of them public"##, }, @@ -12685,6 +12858,11 @@ and suggests `std::ptr::from_ref` and `std::ptr::from_mut` instead."##, description: r##"Nothing. This lint has been deprecated."##, }, Lint { + label: "clippy::renamed_function_params", + description: r##"Lints when the name of function parameters from trait impl is +different than its default implementation."##, + }, + Lint { label: "clippy::repeat_once", description: r##"Checks for usage of `.repeat(1)` and suggest the following method for each types. - `.to_string()` for `str` @@ -12765,8 +12943,8 @@ one from a trait, another not from trait."##, }, Lint { label: "clippy::seek_from_current", - description: r##"Checks an argument of `seek` method of `Seek` trait -and if it start seek from `SeekFrom::Current(0)`, suggests `stream_position` instead."##, + description: r##"Checks if the `seek` method of the `Seek` trait is called with `SeekFrom::Current(0)`, +and if it is, suggests using `stream_position` instead."##, }, Lint { label: "clippy::seek_to_start_instead_of_rewind", @@ -12810,6 +12988,11 @@ see the `unseparated_literal_suffix` lint."##, description: r##"Checks for misuses of the serde API."##, }, Lint { + label: "clippy::set_contains_or_insert", + description: r##"Checks for usage of `contains` to see if a value is not +present on `HashSet` followed by a `insert`."##, + }, + Lint { label: "clippy::shadow_reuse", description: r##"Checks for bindings that shadow other bindings already in scope, while reusing the original value."##, @@ -13100,10 +13283,6 @@ either `ignore`, `no_run` or `compile_fail`."##, (marked with `#[cfg(test)]`)."##, }, Lint { - label: "clippy::thread_local_initializer_can_be_made_const", - description: r##"Suggests to use `const` in `thread_local!` macro if possible."##, - }, - Lint { label: "clippy::to_digit_is_some", description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##, }, @@ -13162,7 +13341,7 @@ syntax specifications for trait bounds are used simultaneously."##, }, Lint { label: "clippy::transmute_int_to_non_zero", - description: r##"Checks for transmutes from integers to `NonZero*` types, and suggests their `new_unchecked` + description: r##"Checks for transmutes from `T` to `NonZero<T>`, and suggests the `new_unchecked` method instead."##, }, Lint { @@ -13222,7 +13401,7 @@ declarations above a certain complexity threshold."##, }, Lint { label: "clippy::type_id_on_box", - description: r##"Looks for calls to `<Box<dyn Any> as Any>::type_id`."##, + description: r##"Looks for calls to `.type_id()` on a `Box<dyn _>`."##, }, Lint { label: "clippy::type_repetition_in_bounds", @@ -13234,8 +13413,8 @@ declarations above a certain complexity threshold."##, }, Lint { label: "clippy::unconditional_recursion", - description: r##"Checks that there isn't an infinite recursion in `PartialEq` trait -implementation."##, + description: r##"Checks that there isn't an infinite recursion in trait +implementations."##, }, Lint { label: "clippy::undocumented_unsafe_blocks", @@ -13380,6 +13559,12 @@ simpler code: is being constructed."##, }, Lint { + label: "clippy::unnecessary_min_or_max", + description: r##"Checks for unnecessary calls to `min()` or `max()` in the following cases +- Either both side is constant +- One side is clearly larger than the other, like i32::MIN and an i32 variable"##, + }, + Lint { label: "clippy::unnecessary_mut_passed", description: r##"Detects passing a mutable reference to a function that only requires an immutable reference."##, @@ -13586,11 +13771,21 @@ lint attributes. This lint permits lint attributes for lints emitted on the items themself. For `use` items these lints are: +* ambiguous_glob_reexports +* dead_code * deprecated +* hidden_glob_reexports * unreachable_pub -* unused_imports +* unused +* unused_braces +* unused_import_braces +* clippy::disallowed_types * clippy::enum_glob_use * clippy::macro_use_imports +* clippy::module_name_repetitions +* clippy::redundant_pub_crate +* clippy::single_component_path_imports +* clippy::unsafe_removed_from_name * clippy::wildcard_imports For `extern crate` items these lints are: @@ -13655,6 +13850,10 @@ to `trailing_zeros`"##, description: r##"Checks for usage of `waker.clone().wake()`"##, }, Lint { + label: "clippy::while_float", + description: r##"Checks for while loops comparing floating point values."##, + }, + Lint { label: "clippy::while_immutable_condition", description: r##"Checks whether variables used within while loop condition can be (and are) mutated in the body."##, @@ -13746,6 +13945,11 @@ architecture."##, description: r##"Catch casts from `0` to some pointer type"##, }, Lint { + label: "clippy::zero_repeat_side_effects", + description: r##"Checks for array or vec initializations which call a function or method, +but which have a repeat count of zero."##, + }, + Lint { label: "clippy::zero_sized_map_values", description: r##"Checks for maps with zero-sized value types anywhere in the code."##, }, @@ -13772,7 +13976,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::complexity", - description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::implied_bounds_in_impls, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_filter_map, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##, + description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::implied_bounds_in_impls, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_clamp, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_inspect, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_filter_map, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_min_or_max, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##, }, children: &[ "clippy::bind_instead_of_map", @@ -13808,12 +14012,14 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::iter_count", "clippy::iter_kv_map", "clippy::let_with_type_underscore", + "clippy::manual_clamp", "clippy::manual_filter", "clippy::manual_filter_map", "clippy::manual_find", "clippy::manual_find_map", "clippy::manual_flatten", "clippy::manual_hash_one", + "clippy::manual_inspect", "clippy::manual_main_separator_str", "clippy::manual_range_patterns", "clippy::manual_rem_euclid", @@ -13887,6 +14093,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::unnecessary_find_map", "clippy::unnecessary_literal_unwrap", "clippy::unnecessary_map_on_constructor", + "clippy::unnecessary_min_or_max", "clippy::unnecessary_operation", "clippy::unnecessary_sort_by", "clippy::unnecessary_unwrap", @@ -13906,7 +14113,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::correctness", - description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::eager_transmute, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::lint_groups_priority, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, + description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::eager_transmute, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::lint_groups_priority, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, }, children: &[ "clippy::absurd_extreme_comparisons", @@ -13942,7 +14149,6 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::match_str_case_mismatch", "clippy::mem_replace_with_uninit", "clippy::min_max", - "clippy::mismatched_target_os", "clippy::mistyped_literal_suffixes", "clippy::modulo_one", "clippy::mut_from_ref", @@ -13983,7 +14189,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::deprecated", - description: r##"lint group for: clippy::assign_ops, clippy::extend_from_slice, clippy::filter_map, clippy::find_map, clippy::if_let_redundant_pattern_matching, clippy::misaligned_transmute, clippy::pub_enum_variant_names, clippy::range_step_by_zero, clippy::regex_macro, clippy::replace_consts, clippy::should_assert_eq, clippy::unsafe_vector_initialization, clippy::unstable_as_mut_slice, clippy::unstable_as_slice, clippy::unused_collect, clippy::wrong_pub_self_convention"##, + description: r##"lint group for: clippy::assign_ops, clippy::extend_from_slice, clippy::filter_map, clippy::find_map, clippy::if_let_redundant_pattern_matching, clippy::maybe_misused_cfg, clippy::misaligned_transmute, clippy::mismatched_target_os, clippy::pub_enum_variant_names, clippy::range_step_by_zero, clippy::regex_macro, clippy::replace_consts, clippy::should_assert_eq, clippy::unsafe_vector_initialization, clippy::unstable_as_mut_slice, clippy::unstable_as_slice, clippy::unused_collect, clippy::wrong_pub_self_convention"##, }, children: &[ "clippy::assign_ops", @@ -13991,7 +14197,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::filter_map", "clippy::find_map", "clippy::if_let_redundant_pattern_matching", + "clippy::maybe_misused_cfg", "clippy::misaligned_transmute", + "clippy::mismatched_target_os", "clippy::pub_enum_variant_names", "clippy::range_step_by_zero", "clippy::regex_macro", @@ -14007,7 +14215,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::nursery", - description: r##"lint group for: clippy::as_ptr_cast_mut, clippy::branches_sharing_code, clippy::clear_with_drain, clippy::cognitive_complexity, clippy::collection_is_never_read, clippy::debug_assert_with_mut_call, clippy::derive_partial_eq_without_eq, clippy::empty_line_after_doc_comments, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::iter_on_empty_collections, clippy::iter_on_single_items, clippy::iter_with_drain, clippy::large_stack_frames, clippy::manual_clamp, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::needless_collect, clippy::needless_pass_by_ref_mut, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::or_fun_call, clippy::path_buf_push_overwrite, clippy::read_zero_byte_vec, clippy::readonly_write_lock, clippy::redundant_clone, clippy::redundant_pub_crate, clippy::significant_drop_in_scrutinee, clippy::significant_drop_tightening, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trait_duplication_in_bounds, clippy::transmute_undefined_repr, clippy::trivial_regex, clippy::tuple_array_conversions, clippy::type_repetition_in_bounds, clippy::uninhabited_references, clippy::unnecessary_struct_initialization, clippy::unused_peekable, clippy::unused_rounding, clippy::use_self, clippy::useless_let_if_seq"##, + description: r##"lint group for: clippy::as_ptr_cast_mut, clippy::branches_sharing_code, clippy::clear_with_drain, clippy::cognitive_complexity, clippy::collection_is_never_read, clippy::debug_assert_with_mut_call, clippy::derive_partial_eq_without_eq, clippy::empty_line_after_doc_comments, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::iter_on_empty_collections, clippy::iter_on_single_items, clippy::iter_with_drain, clippy::large_stack_frames, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::needless_collect, clippy::needless_pass_by_ref_mut, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::or_fun_call, clippy::path_buf_push_overwrite, clippy::read_zero_byte_vec, clippy::redundant_clone, clippy::redundant_pub_crate, clippy::set_contains_or_insert, clippy::significant_drop_in_scrutinee, clippy::significant_drop_tightening, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trait_duplication_in_bounds, clippy::transmute_undefined_repr, clippy::trivial_regex, clippy::tuple_array_conversions, clippy::type_repetition_in_bounds, clippy::uninhabited_references, clippy::unnecessary_struct_initialization, clippy::unused_peekable, clippy::unused_rounding, clippy::use_self, clippy::useless_let_if_seq, clippy::while_float"##, }, children: &[ "clippy::as_ptr_cast_mut", @@ -14027,7 +14235,6 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::iter_on_single_items", "clippy::iter_with_drain", "clippy::large_stack_frames", - "clippy::manual_clamp", "clippy::missing_const_for_fn", "clippy::mutex_integer", "clippy::needless_collect", @@ -14038,9 +14245,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::or_fun_call", "clippy::path_buf_push_overwrite", "clippy::read_zero_byte_vec", - "clippy::readonly_write_lock", "clippy::redundant_clone", "clippy::redundant_pub_crate", + "clippy::set_contains_or_insert", "clippy::significant_drop_in_scrutinee", "clippy::significant_drop_tightening", "clippy::string_lit_as_bytes", @@ -14058,14 +14265,16 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::unused_rounding", "clippy::use_self", "clippy::useless_let_if_seq", + "clippy::while_float", ], }, LintGroup { lint: Lint { label: "clippy::pedantic", - description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_filter_is_ok, clippy::iter_filter_is_some, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_c_str_literals, clippy::manual_instant_elapsed, clippy::manual_is_variant_and, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_as_ref_cloned, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::pub_underscore_fields, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_as_ptr, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::str_split_at_newline, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##, + description: r##"lint group for: clippy::assigning_clones, clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_filter_is_ok, clippy::iter_filter_is_some, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_c_str_literals, clippy::manual_instant_elapsed, clippy::manual_is_variant_and, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_as_ref_cloned, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::pub_underscore_fields, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_as_ptr, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_char_pattern, clippy::single_match_else, clippy::stable_sort_primitive, clippy::str_split_at_newline, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##, }, children: &[ + "clippy::assigning_clones", "clippy::bool_to_int_with_if", "clippy::borrow_as_ptr", "clippy::case_sensitive_file_extension_comparisons", @@ -14160,6 +14369,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::semicolon_if_nothing_returned", "clippy::should_panic_without_expect", "clippy::similar_names", + "clippy::single_char_pattern", "clippy::single_match_else", "clippy::stable_sort_primitive", "clippy::str_split_at_newline", @@ -14189,11 +14399,10 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::perf", - description: r##"lint group for: clippy::box_collection, clippy::box_default, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_nth, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_spin_loop, clippy::redundant_allocation, clippy::result_large_err, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::thread_local_initializer_can_be_made_const, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push, clippy::waker_clone_wake"##, + description: r##"lint group for: clippy::box_collection, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_const_for_thread_local, clippy::missing_spin_loop, clippy::readonly_write_lock, clippy::redundant_allocation, clippy::result_large_err, clippy::slow_vector_initialization, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push, clippy::waker_clone_wake"##, }, children: &[ "clippy::box_collection", - "clippy::box_default", "clippy::boxed_local", "clippy::cmp_owned", "clippy::collapsible_str_replace", @@ -14202,7 +14411,6 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::extend_with_drain", "clippy::format_collect", "clippy::format_in_format_args", - "clippy::iter_nth", "clippy::iter_overeager_cloned", "clippy::large_const_arrays", "clippy::large_enum_variant", @@ -14211,12 +14419,12 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::manual_str_repeat", "clippy::manual_try_fold", "clippy::map_entry", + "clippy::missing_const_for_thread_local", "clippy::missing_spin_loop", + "clippy::readonly_write_lock", "clippy::redundant_allocation", "clippy::result_large_err", - "clippy::single_char_pattern", "clippy::slow_vector_initialization", - "clippy::thread_local_initializer_can_be_made_const", "clippy::to_string_in_format_args", "clippy::unnecessary_to_owned", "clippy::useless_vec", @@ -14227,7 +14435,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::restriction", - description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_enum_variants_with_brackets, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, + description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::cfg_not_test, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_enum_variants_with_brackets, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::field_scoped_visibility_modifiers, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::integer_division_remainder_used, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::renamed_function_params, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, }, children: &[ "clippy::absolute_paths", @@ -14239,6 +14447,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::as_underscore", "clippy::assertions_on_result_states", "clippy::big_endian_bytes", + "clippy::cfg_not_test", "clippy::clone_on_ref_ptr", "clippy::create_dir", "clippy::dbg_macro", @@ -14256,6 +14465,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::exhaustive_structs", "clippy::exit", "clippy::expect_used", + "clippy::field_scoped_visibility_modifiers", "clippy::filetype_is_file", "clippy::float_arithmetic", "clippy::float_cmp_const", @@ -14271,6 +14481,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::inline_asm_x86_att_syntax", "clippy::inline_asm_x86_intel_syntax", "clippy::integer_division", + "clippy::integer_division_remainder_used", "clippy::iter_over_hash_type", "clippy::large_include_file", "clippy::let_underscore_must_use", @@ -14307,6 +14518,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::rc_mutex", "clippy::redundant_type_annotations", "clippy::ref_patterns", + "clippy::renamed_function_params", "clippy::rest_pat_in_fully_bound_structs", "clippy::same_name_method", "clippy::self_named_module_files", @@ -14347,7 +14559,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::style", - description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::if_same_then_else, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_enforced_import_renames, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_err_ok, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::to_string_trait_impl, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fallible_conversions, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_enumerate_index, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##, + description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::box_default, clippy::builtin_type_shadow, clippy::byte_char_slices, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::doc_lazy_continuation, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::if_same_then_else, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::legacy_numeric_constants, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_pattern_char_comparison, clippy::manual_range_contains, clippy::manual_rotate, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_enforced_import_renames, clippy::missing_safety_doc, clippy::mixed_attributes_style, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_err_ok, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::to_string_trait_impl, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fallible_conversions, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_enumerate_index, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##, }, children: &[ "clippy::assertions_on_constants", @@ -14355,7 +14567,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::blocks_in_conditions", "clippy::bool_assert_comparison", "clippy::borrow_interior_mutable_const", + "clippy::box_default", "clippy::builtin_type_shadow", + "clippy::byte_char_slices", "clippy::bytes_nth", "clippy::chars_last_cmp", "clippy::chars_next_cmp", @@ -14371,6 +14585,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::disallowed_methods", "clippy::disallowed_names", "clippy::disallowed_types", + "clippy::doc_lazy_continuation", "clippy::double_must_use", "clippy::double_neg", "clippy::duplicate_underscore_argument", @@ -14397,9 +14612,11 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::items_after_test_module", "clippy::iter_cloned_collect", "clippy::iter_next_slice", + "clippy::iter_nth", "clippy::iter_nth_zero", "clippy::iter_skip_next", "clippy::just_underscores_and_digits", + "clippy::legacy_numeric_constants", "clippy::len_without_is_empty", "clippy::len_zero", "clippy::let_and_return", @@ -14413,7 +14630,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::manual_map", "clippy::manual_next_back", "clippy::manual_non_exhaustive", + "clippy::manual_pattern_char_comparison", "clippy::manual_range_contains", + "clippy::manual_rotate", "clippy::manual_saturating_arithmetic", "clippy::manual_while_let_some", "clippy::map_clone", @@ -14426,6 +14645,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::mem_replace_with_default", "clippy::missing_enforced_import_renames", "clippy::missing_safety_doc", + "clippy::mixed_attributes_style", "clippy::mixed_case_hex_literals", "clippy::module_inception", "clippy::must_use_unit", @@ -14497,7 +14717,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::suspicious", - description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::deprecated_clippy_cfg_attr, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_docs, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::incompatible_msrv, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::mixed_attributes_style, clippy::multi_assignments, clippy::multiple_bound_locations, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_open_options, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion, clippy::unnecessary_clippy_cfg, clippy::unnecessary_get_then_check, clippy::unnecessary_result_map_or_else"##, + description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::const_is_empty, clippy::crate_in_macro_def, clippy::deprecated_clippy_cfg_attr, clippy::drop_non_drop, clippy::duplicate_mod, clippy::duplicated_attributes, clippy::empty_docs, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::incompatible_msrv, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::macro_metavars_in_unsafe, clippy::manual_unwrap_or_default, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::missing_transmute_annotations, clippy::multi_assignments, clippy::multiple_bound_locations, clippy::mut_range_bound, clippy::mutable_key_type, clippy::needless_character_iteration, clippy::needless_maybe_sized, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_open_options, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion, clippy::unnecessary_clippy_cfg, clippy::unnecessary_get_then_check, clippy::unnecessary_result_map_or_else, clippy::zero_repeat_side_effects"##, }, children: &[ "clippy::almost_complete_range", @@ -14511,10 +14731,12 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::cast_enum_truncation", "clippy::cast_nan_to_int", "clippy::cast_slice_from_raw_parts", + "clippy::const_is_empty", "clippy::crate_in_macro_def", "clippy::deprecated_clippy_cfg_attr", "clippy::drop_non_drop", "clippy::duplicate_mod", + "clippy::duplicated_attributes", "clippy::empty_docs", "clippy::empty_loop", "clippy::float_equality_without_abs", @@ -14527,14 +14749,17 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::join_absolute_paths", "clippy::let_underscore_future", "clippy::lines_filter_map_ok", - "clippy::maybe_misused_cfg", + "clippy::macro_metavars_in_unsafe", + "clippy::manual_unwrap_or_default", "clippy::misnamed_getters", "clippy::misrefactored_assign_op", - "clippy::mixed_attributes_style", + "clippy::missing_transmute_annotations", "clippy::multi_assignments", "clippy::multiple_bound_locations", "clippy::mut_range_bound", "clippy::mutable_key_type", + "clippy::needless_character_iteration", + "clippy::needless_maybe_sized", "clippy::no_effect_replace", "clippy::non_canonical_clone_impl", "clippy::non_canonical_partial_ord_impl", @@ -14563,6 +14788,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::unnecessary_clippy_cfg", "clippy::unnecessary_get_then_check", "clippy::unnecessary_result_map_or_else", + "clippy::zero_repeat_side_effects", ], }, ]; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs index 9d1f1cc09c6..81604b55272 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs @@ -1222,6 +1222,26 @@ use self::foo::{self, Bar, Foo}; ); } +#[test] +fn insert_with_double_colon_prefixed_import_merge() { + check_with_config( + "use ::ext::foo::Foo", + r#" +use ::ext::foo::Foo as _; +"#, + r#" +use ::ext::foo::Foo; +"#, + &InsertUseConfig { + granularity: ImportGranularity::Crate, + prefix_kind: hir::PrefixKind::BySelf, + enforce_granularity: true, + group: true, + skip_glob_imports: true, + }, + ); +} + fn check_with_config( path: &str, ra_fixture_before: &str, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs index b153aafa0e1..9cacb6b1a60 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs @@ -157,10 +157,14 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) } match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) { - (Some(true), None) => continue, + (Some(true), None) => { + remove_subtree_if_only_self(lhs_t); + continue; + } (None, Some(true)) => { ted::replace(lhs_t.syntax(), rhs_t.syntax()); *lhs_t = rhs_t; + remove_subtree_if_only_self(lhs_t); continue; } _ => (), @@ -278,14 +282,20 @@ pub fn try_normalize_use_tree_mut( fn recursive_normalize(use_tree: &ast::UseTree, style: NormalizationStyle) -> Option<()> { let use_tree_list = use_tree.use_tree_list()?; let merge_subtree_into_parent_tree = |single_subtree: &ast::UseTree| { + let subtree_is_only_self = single_subtree.path().as_ref().map_or(false, path_is_self); + let merged_path = match (use_tree.path(), single_subtree.path()) { + // If the subtree is `{self}` then we cannot merge: `use + // foo::bar::{self}` is not equivalent to `use foo::bar`. See + // https://github.com/rust-lang/rust-analyzer/pull/17140#issuecomment-2079189725. + _ if subtree_is_only_self => None, + (None, None) => None, (Some(outer), None) => Some(outer), - (None, Some(inner)) if path_is_self(&inner) => None, (None, Some(inner)) => Some(inner), - (Some(outer), Some(inner)) if path_is_self(&inner) => Some(outer), (Some(outer), Some(inner)) => Some(make::path_concat(outer, inner).clone_for_update()), }; + if merged_path.is_some() || single_subtree.use_tree_list().is_some() || single_subtree.star_token().is_some() @@ -706,3 +716,20 @@ fn path_is_self(path: &ast::Path) -> bool { fn path_len(path: ast::Path) -> usize { path.segments().count() } + +fn get_single_subtree(use_tree: &ast::UseTree) -> Option<ast::UseTree> { + use_tree + .use_tree_list() + .and_then(|tree_list| tree_list.use_trees().collect_tuple()) + .map(|(single_subtree,)| single_subtree) +} + +fn remove_subtree_if_only_self(use_tree: &ast::UseTree) { + let Some(single_subtree) = get_single_subtree(use_tree) else { return }; + match (use_tree.path(), single_subtree.path()) { + (Some(_), Some(inner)) if path_is_self(&inner) => { + ted::remove_all_iter(single_subtree.syntax().children_with_tokens()); + } + _ => (), + } +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 357209ceb0b..8fac5baa57b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -2,8 +2,6 @@ //! //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod apply_change; pub mod active_parameter; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 7c11dd3e2a4..e21d54ccd0e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -6,7 +6,7 @@ use hir::{AsAssocItem, HirDisplay, ImportPathConfig, ModuleDef, SemanticsScope}; use itertools::Itertools; use rustc_hash::FxHashMap; use syntax::{ - ast::{self, make, AstNode}, + ast::{self, make, AstNode, HasGenericArgs}, ted, SyntaxNode, }; @@ -308,8 +308,11 @@ impl Ctx<'_> { parent.segment()?.name_ref()?, ) .and_then(|trait_ref| { - let cfg = - ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let found_path = self.target_module.find_path( self.source_scope.db.upcast(), hir::ModuleDef::Trait(trait_ref), @@ -348,7 +351,11 @@ impl Ctx<'_> { } } - let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let found_path = self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?; let res = mod_path_to_ast(&found_path).clone_for_update(); @@ -383,7 +390,11 @@ impl Ctx<'_> { if let Some(adt) = ty.as_adt() { if let ast::Type::PathType(path_ty) = &ast_ty { - let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let found_path = self.target_module.find_path( self.source_scope.db.upcast(), ModuleDef::from(adt), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs index ab2a250289c..eacd9b9b4d2 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs @@ -7,11 +7,7 @@ pub fn is_rust_fence(s: &str) -> bool { let mut seen_rust_tags = false; let mut seen_other_tags = false; - let tokens = s - .trim() - .split(|c| c == ',' || c == ' ' || c == '\t') - .map(str::trim) - .filter(|t| !t.is_empty()); + let tokens = s.trim().split([',', ' ', '\t']).map(str::trim).filter(|t| !t.is_empty()); for token in tokens { match token { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index b62f34f4157..e1cfe048983 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -320,7 +320,6 @@ impl Definition { hir::GenericDef::TraitAlias(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()), - hir::GenericDef::Variant(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()), }; return match def { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs index 9f56e104145..d3f30207752 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs @@ -1,16 +1,23 @@ use hir::InFile; +use syntax::{AstNode, TextRange}; -use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: incoherent-impl // // This diagnostic is triggered if the targe type of an impl is from a foreign crate. pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic { - Diagnostic::new_with_syntax_node_ptr( - ctx, + let display_range = adjusted_display_range(ctx, InFile::new(d.file_id, d.impl_), &|node| { + Some(TextRange::new( + node.syntax().text_range().start(), + node.self_ty()?.syntax().text_range().end(), + )) + }); + + Diagnostic::new( DiagnosticCode::RustcHardError("E0210"), "cannot define inherent `impl` for foreign type".to_owned(), - InFile::new(d.file_id, d.impl_.into()), + display_range, ) } @@ -23,7 +30,7 @@ mod change_case { check_diagnostics( r#" impl bool {} -//^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +//^^^^^^^^^ error: cannot define inherent `impl` for foreign type "#, ); } @@ -60,7 +67,7 @@ impl foo::S { pub struct S; //- /main.rs crate:main deps:foo impl foo::S { #[rustc_allow_incoherent_impl] fn func(self) {} } -//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +//^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type "#, ); check_diagnostics( @@ -70,7 +77,7 @@ pub struct S; pub struct S; //- /main.rs crate:main deps:foo impl foo::S { fn func(self) {} } -//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +//^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 2b8779044fb..a9c0e3b7319 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -146,6 +146,7 @@ pub(crate) fn json_in_items( let cfg = ImportPathConfig { prefer_no_std: config.prefer_no_std, prefer_prelude: config.prefer_prelude, + prefer_absolute: config.prefer_absolute, }; if !scope_has("Serialize") { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index f8780fc0da7..2cd6a71c001 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -13,7 +13,7 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> ) } -// Diagnostic: macro-error +// Diagnostic: macro-def-error // // This diagnostic is shown for macro expansion errors. pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefError) -> Diagnostic { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 9eff84b8987..6a809cb0cef 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -128,6 +128,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index be1e6ed5722..a470ce72fc3 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -28,10 +28,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let function = id; ( format!("`fn {redundant_assoc_item_name}`"), - function - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + function.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n {};", function.display(db)), ) } @@ -39,10 +36,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let constant = id; ( format!("`const {redundant_assoc_item_name}`"), - constant - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + constant.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n {};", constant.display(db)), ) } @@ -50,10 +44,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let type_alias = id; ( format!("`type {redundant_assoc_item_name}`"), - type_alias - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + type_alias.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs index 9651ce6106b..4f04267adb1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -47,7 +47,12 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist> sema: &ctx.sema, scope: &scope, goal: d.expected.clone(), - config: TermSearchConfig { fuel: ctx.config.term_search_fuel, ..Default::default() }, + config: TermSearchConfig { + fuel: ctx.config.term_search_fuel, + enable_borrowcheck: ctx.config.term_search_borrowck, + + ..Default::default() + }, }; let paths = term_search(&term_search_ctx); @@ -62,6 +67,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist> ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .ok() @@ -276,7 +282,7 @@ impl Foo for Baz { } fn asd() -> Bar { let a = Baz; - Foo::foo(_) + Foo::foo(a) } ", ); @@ -365,7 +371,7 @@ impl Foo for A { } fn main() { let a = A; - let c: Bar = Foo::foo(_); + let c: Bar = Foo::foo(&a); }"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index cbf50d13f58..77ffd0fd968 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -30,11 +30,13 @@ pub(crate) fn unlinked_file( // FIXME: This is a hack for the vscode extension to notice whether there is an autofix or not before having to resolve diagnostics. // This is to prevent project linking popups from appearing when there is an autofix. https://github.com/rust-lang/rust-analyzer/issues/14523 let message = if fixes.is_none() { - "file not included in crate hierarchy" + "This file is not included in any crates, so rust-analyzer can't offer IDE services." } else { - "file not included in module tree" + "This file is not included anywhere in the module tree, so rust-analyzer can't offer IDE services." }; + let message = format!("{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings."); + let mut range = ctx.sema.db.parse(file_id).syntax_node().text_range(); let mut unused = true; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs index 7aa3e16536c..9a81682aaeb 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs @@ -56,4 +56,20 @@ fn main() { "#, ); } + + #[test] + fn unresolved_self_val() { + check_diagnostics( + r#" +fn main() { + self.a; + //^^^^ error: no such value in this scope + let self: + self = + self; + //^^^^ error: no such value in this scope +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index a419f04bfae..6d1226d65c5 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -23,8 +23,6 @@ //! There are also a couple of ad-hoc diagnostics implemented directly here, we //! don't yet have a great pattern for how to do them properly. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod handlers { pub(crate) mod break_outside_of_loop; pub(crate) mod expected_function; @@ -233,7 +231,9 @@ pub struct DiagnosticsConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub prefer_prelude: bool, + pub prefer_absolute: bool, pub term_search_fuel: u64, + pub term_search_borrowck: bool, } impl DiagnosticsConfig { @@ -259,7 +259,9 @@ impl DiagnosticsConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, term_search_fuel: 400, + term_search_borrowck: true, } } } @@ -311,9 +313,13 @@ pub fn diagnostics( FileRange { file_id, range: err.range() }, ) })); + let parse_errors = res.len(); let parse = sema.parse(file_id); + // FIXME: This iterates the entire file which is a rather expensive operation. + // We should implement these differently in some form? + // Salsa caching + incremental re-parse would be better here for node in parse.syntax().descendants() { handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); @@ -326,7 +332,10 @@ pub fn diagnostics( let mut diags = Vec::new(); match module { - Some(m) => m.diagnostics(db, &mut diags, config.style_lints), + // A bunch of parse errors in a file indicate some bigger structural parse changes in the + // file, so we skip semantic diagnostics so we can show these faster. + Some(m) if parse_errors < 16 => m.diagnostics(db, &mut diags, config.style_lints), + Some(_) => (), None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id), } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index b5bf510aeed..407433ed192 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -3,8 +3,6 @@ //! Allows searching the AST for code that matches one or more patterns and then replacing that code //! based on a template. -#![warn(rust_2018_idioms, unused_lifetimes)] - // Feature: Structural Search and Replace // // Search and replace with named wildcards that will match any expression, type, path, pattern or item. diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs index b29053c0c2d..7c357b3c217 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs @@ -10,7 +10,7 @@ use hir::{ImportPathConfig, Semantics}; use ide_db::{base_db::FileRange, FxHashMap}; use std::{cell::Cell, iter::Peekable}; use syntax::{ - ast::{self, AstNode, AstToken}, + ast::{self, AstNode, AstToken, HasGenericArgs}, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken, }; @@ -663,7 +663,11 @@ impl Match { .module(); for (path, resolved_path) in &template.resolved_paths { if let hir::PathResolution::Def(module_def) = resolved_path.resolution { - let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let mod_path = module.find_path(sema.db, module_def, cfg).ok_or_else(|| { match_error!("Failed to render template path `{}` at match location") })?; diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs index 4731f14f4e6..d3c1af1f31e 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs @@ -3,7 +3,10 @@ use hir::AsAssocItem; use ide_db::{base_db::FilePosition, FxHashMap}; use parsing::Placeholder; -use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; +use syntax::{ + ast::{self, HasGenericArgs}, + SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, +}; use crate::{errors::error, parsing, SsrError}; diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml index a535015a27f..9d8400ba3ad 100644 --- a/src/tools/rust-analyzer/crates/ide/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml @@ -25,6 +25,7 @@ dot.workspace = true smallvec.workspace = true triomphe.workspace = true nohash-hasher.workspace = true +rustc_apfloat = "0.2.0" # local deps cfg.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs index f7e5b40dde1..3c29f2f4276 100644 --- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs +++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs @@ -9,14 +9,15 @@ use ide_db::{ search::FileReference, FxIndexMap, RootDatabase, }; -use syntax::{ast, AstNode, SyntaxKind::IDENT, TextRange}; +use span::FileRange; +use syntax::{ast, AstNode, SyntaxKind::IDENT}; use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav}; #[derive(Debug, Clone)] pub struct CallItem { pub target: NavigationTarget, - pub ranges: Vec<TextRange>, + pub ranges: Vec<FileRange>, } pub(crate) fn call_hierarchy( @@ -61,9 +62,10 @@ pub(crate) fn incoming_calls( def.try_to_nav(sema.db) }); if let Some(nav) = nav { - calls.add(nav.call_site, sema.original_range(name.syntax()).range); + let range = sema.original_range(name.syntax()); + calls.add(nav.call_site, range); if let Some(other) = nav.def_site { - calls.add(other, sema.original_range(name.syntax()).range); + calls.add(other, range); } } } @@ -107,12 +109,13 @@ pub(crate) fn outgoing_calls( hir::CallableKind::TupleStruct(it) => it.try_to_nav(db), _ => None, } - .zip(Some(expr.syntax().text_range())) + .zip(Some(sema.original_range(expr.syntax()))) } ast::CallableExpr::MethodCall(expr) => { - let range = expr.name_ref()?.syntax().text_range(); let function = sema.resolve_method_call(&expr)?; - function.try_to_nav(db).zip(Some(range)) + function + .try_to_nav(db) + .zip(Some(sema.original_range(expr.name_ref()?.syntax()))) } }?; Some(nav_target.into_iter().zip(iter::repeat(range))) @@ -125,11 +128,11 @@ pub(crate) fn outgoing_calls( #[derive(Default)] struct CallLocations { - funcs: FxIndexMap<NavigationTarget, Vec<TextRange>>, + funcs: FxIndexMap<NavigationTarget, Vec<FileRange>>, } impl CallLocations { - fn add(&mut self, target: NavigationTarget, range: TextRange) { + fn add(&mut self, target: NavigationTarget, range: FileRange) { self.funcs.entry(target).or_default().push(range); } @@ -153,7 +156,14 @@ mod tests { expected_outgoing: Expect, ) { fn debug_render(item: crate::CallItem) -> String { - format!("{} : {:?}", item.target.debug_render(), item.ranges) + format!( + "{} : {}", + item.target.debug_render(), + item.ranges.iter().format_with(", ", |range, f| f(&format_args!( + "{:?}:{:?}", + range.file_id, range.range + ))) + ) } let (analysis, pos) = fixture::position(ra_fixture); @@ -183,7 +193,7 @@ fn caller() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]], + expect!["caller Function FileId(0) 15..44 18..24 : FileId(0):33..39"], expect![[]], ); } @@ -199,7 +209,7 @@ fn caller() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]], + expect!["caller Function FileId(0) 15..44 18..24 : FileId(0):33..39"], expect![[]], ); } @@ -216,7 +226,7 @@ fn caller() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![["caller Function FileId(0) 15..58 18..24 : [33..39, 47..53]"]], + expect!["caller Function FileId(0) 15..58 18..24 : FileId(0):33..39, FileId(0):47..53"], expect![[]], ); } @@ -236,9 +246,9 @@ fn caller2() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![[" - caller1 Function FileId(0) 15..45 18..25 : [34..40] - caller2 Function FileId(0) 47..77 50..57 : [66..72]"]], + expect![[r#" + caller1 Function FileId(0) 15..45 18..25 : FileId(0):34..40 + caller2 Function FileId(0) 47..77 50..57 : FileId(0):66..72"#]], expect![[]], ); } @@ -265,8 +275,8 @@ mod tests { "#, expect![["callee Function FileId(0) 0..14 3..9"]], expect![[r#" - caller1 Function FileId(0) 15..45 18..25 : [34..40] - test_caller Function FileId(0) 95..149 110..121 tests : [134..140]"#]], + caller1 Function FileId(0) 15..45 18..25 : FileId(0):34..40 + test_caller Function FileId(0) 95..149 110..121 tests : FileId(0):134..140"#]], expect![[]], ); } @@ -287,7 +297,7 @@ fn caller() { pub fn callee() {} "#, expect!["callee Function FileId(1) 0..18 7..13 foo"], - expect![["caller Function FileId(0) 27..56 30..36 : [45..51]"]], + expect!["caller Function FileId(0) 27..56 30..36 : FileId(0):45..51"], expect![[]], ); } @@ -305,7 +315,7 @@ fn call$0er() { "#, expect![["caller Function FileId(0) 15..58 18..24"]], expect![[]], - expect![["callee Function FileId(0) 0..14 3..9 : [33..39, 47..53]"]], + expect!["callee Function FileId(0) 0..14 3..9 : FileId(0):33..39, FileId(0):47..53"], ); } @@ -326,7 +336,7 @@ pub fn callee() {} "#, expect![["caller Function FileId(0) 27..56 30..36"]], expect![[]], - expect!["callee Function FileId(1) 0..18 7..13 foo : [45..51]"], + expect!["callee Function FileId(1) 0..18 7..13 foo : FileId(0):45..51"], ); } @@ -348,8 +358,8 @@ fn caller3() { } "#, expect![["caller2 Function FileId(0) 33..64 36..43"]], - expect![["caller1 Function FileId(0) 0..31 3..10 : [19..26]"]], - expect![["caller3 Function FileId(0) 66..83 69..76 : [52..59]"]], + expect!["caller1 Function FileId(0) 0..31 3..10 : FileId(0):19..26"], + expect!["caller3 Function FileId(0) 66..83 69..76 : FileId(0):52..59"], ); } @@ -368,8 +378,8 @@ fn main() { } "#, expect![["a Function FileId(0) 0..18 3..4"]], - expect![["main Function FileId(0) 31..52 34..38 : [47..48]"]], - expect![["b Function FileId(0) 20..29 23..24 : [13..14]"]], + expect!["main Function FileId(0) 31..52 34..38 : FileId(0):47..48"], + expect!["b Function FileId(0) 20..29 23..24 : FileId(0):13..14"], ); check_hierarchy( @@ -385,7 +395,7 @@ fn main() { } "#, expect![["b Function FileId(0) 20..29 23..24"]], - expect![["a Function FileId(0) 0..18 3..4 : [13..14]"]], + expect!["a Function FileId(0) 0..18 3..4 : FileId(0):13..14"], expect![[]], ); } @@ -410,7 +420,7 @@ fn caller() { } "#, expect![[r#"callee Function FileId(0) 144..159 152..158"#]], - expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]], + expect!["caller Function FileId(0) 160..194 163..169 : FileId(0):184..190"], expect![[]], ); check_hierarchy( @@ -431,7 +441,7 @@ fn caller() { } "#, expect![[r#"callee Function FileId(0) 144..159 152..158"#]], - expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]], + expect!["caller Function FileId(0) 160..194 163..169 : FileId(0):184..190"], expect![[]], ); } @@ -461,6 +471,148 @@ fn caller$0() { expect![[]], ); } + #[test] + fn test_call_hierarchy_in_macros_incoming_different_files() { + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(callee) +fn caller() { + call!(call$0ee); +} +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + } +} +"#, + expect!["callee Function FileId(0) 22..37 30..36"], + expect!["caller Function FileId(0) 38..72 41..47 : FileId(0):62..68"], + expect![[]], + ); + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(cal$0lee) +fn caller() { + call!(callee); +} +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + } +} +"#, + expect!["callee Function FileId(0) 22..37 30..36"], + expect!["caller Function FileId(0) 38..72 41..47 : FileId(0):62..68"], + expect![[]], + ); + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(cal$0lee) +call!(callee); +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + fn caller() { + $ident() + } + fn $ident() { + $ident() + } + } +} +"#, + expect!["callee Function FileId(0) 22..37 30..36"], + expect![[r#" + callee Function FileId(0) 38..52 44..50 : FileId(0):44..50 + caller Function FileId(0) 38..52 : FileId(0):44..50 + caller Function FileId(1) 130..136 130..136 : FileId(0):44..50"#]], + expect![[]], + ); + } + + #[test] + fn test_call_hierarchy_in_macros_outgoing_different_files() { + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(callee) +fn caller$0() { + call!(callee); +} +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + callee() + } +} +"#, + expect!["caller Function FileId(0) 38..72 41..47"], + expect![[]], + // FIXME + expect![[]], + ); + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(callee) +fn caller$0() { + call!(callee); +} +//- /foo.rs +macro_rules! define { + () => { + fn callee {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + callee() + } +} +"#, + expect!["caller Function FileId(0) 38..72 41..47"], + expect![[]], + // FIXME + expect![[]], + ); + } #[test] fn test_trait_method_call_hierarchy() { @@ -481,7 +633,7 @@ fn caller() { } "#, expect!["callee Function FileId(0) 15..27 18..24 T1"], - expect![["caller Function FileId(0) 82..115 85..91 : [104..110]"]], + expect!["caller Function FileId(0) 82..115 85..91 : FileId(0):104..110"], expect![[]], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 99568c9922b..7091b15b8a4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -16,6 +16,10 @@ use ide_db::{ RootDatabase, }; use itertools::Itertools; +use rustc_apfloat::{ + ieee::{Half as f16, Quad as f128}, + Float, +}; use stdx::format_to; use syntax::{algo, ast, match_ast, AstNode, AstToken, Direction, SyntaxToken, T}; @@ -540,13 +544,22 @@ pub(super) fn literal(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> ast::Char(char) => char .value().as_ref().map_err(|e| format!("{e:?}")).map(ToString::to_string), ast::Byte(byte) => byte .value().as_ref().map_err(|e| format!("{e:?}")).map(|it| format!("0x{it:X}")), ast::FloatNumber(num) => { - let (text, _) = num.split_into_parts(); - let text = text.replace('_', ""); - if ty.as_builtin().map(|it| it.is_f32()).unwrap_or(false) { + let text = num.value_string(); + if ty.as_builtin().map(|it| it.is_f16()).unwrap_or(false) { + match text.parse::<f16>() { + Ok(num) => Ok(format!("{num} (bits: 0x{:X})", num.to_bits())), + Err(e) => Err(e.0.to_owned()), + } + } else if ty.as_builtin().map(|it| it.is_f32()).unwrap_or(false) { match text.parse::<f32>() { Ok(num) => Ok(format!("{num} (bits: 0x{:X})", num.to_bits())), Err(e) => Err(e.to_string()), } + } else if ty.as_builtin().map(|it| it.is_f128()).unwrap_or(false) { + match text.parse::<f128>() { + Ok(num) => Ok(format!("{num} (bits: 0x{:X})", num.to_bits())), + Err(e) => Err(e.0.to_owned()), + } } else { match text.parse::<f64>() { Ok(num) => Ok(format!("{num} (bits: 0x{:X})", num.to_bits())), diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 20d07bf9919..5036770fec8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -5310,6 +5310,71 @@ const FOO$0: f64 = expf64(1.2); ``` "#]], ); + // check `f32` isn't double rounded via `f64` + check( + r#" +/// This is a doc +const FOO$0: f32 = 1.9999999403953552_f32; +"#, + expect![[r#" + *FOO* + + ```rust + test + ``` + + ```rust + const FOO: f32 = 1.9999999 + ``` + + --- + + This is a doc + "#]], + ); + // Check `f16` and `f128` + check( + r#" +/// This is a doc +const FOO$0: f16 = -1.0f16; +"#, + expect![[r#" + *FOO* + + ```rust + test + ``` + + ```rust + const FOO: f16 = -1.0 + ``` + + --- + + This is a doc + "#]], + ); + check( + r#" +/// This is a doc +const FOO$0: f128 = -1.0f128; +"#, + expect![[r#" + *FOO* + + ```rust + test + ``` + + ```rust + const FOO: f128 = -1.0 + ``` + + --- + + This is a doc + "#]], + ); } #[test] @@ -6138,7 +6203,7 @@ fn hover_feature() { by the codegen backend, but not the MIR inliner. ```rust - #![feature(rustc_attrs, effects)] + #![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -6148,7 +6213,7 @@ fn hover_feature() { Since these are just regular functions, it is perfectly ok to create the intrinsic twice: ```rust - #![feature(rustc_attrs, effects)] + #![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -6172,12 +6237,23 @@ fn hover_feature() { Various intrinsics have native MIR operations that they correspond to. Instead of requiring backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass will convert the calls to the MIR operation. Backends do not need to know about these intrinsics - at all. + at all. These intrinsics only make sense without a body, and can either be declared as a "rust-intrinsic" + or as a `#[rustc_intrinsic]`. The body is never used, as calls to the intrinsic do not exist + anymore after MIR analyses. ## Intrinsics without fallback logic These must be implemented by all backends. + ### `#[rustc_intrinsic]` declarations + + These are written like intrinsics with fallback bodies, but the body is irrelevant. + Use `loop {}` for the body or call the intrinsic recursively and add + `#[rustc_intrinsic_must_be_overridden]` to the function to ensure that backends don't + invoke the body. + + ### Legacy extern ABI based intrinsics + These are imported as if they were FFI functions, with the special `rust-intrinsic` ABI. For example, if one was in a freestanding context, but wished to be able to `transmute` between types, and @@ -8013,6 +8089,22 @@ fn main() { check( r#" fn main() { + $01.0f16; +} +"#, + expect![[r#" + *1.0f16* + ```rust + f16 + ``` + ___ + + value of literal: 1 (bits: 0x3C00) + "#]], + ); + check( + r#" +fn main() { $01.0f32; } "#, @@ -8029,6 +8121,22 @@ fn main() { check( r#" fn main() { + $01.0f128; +} +"#, + expect![[r#" + *1.0f128* + ```rust + f128 + ``` + ___ + + value of literal: 1 (bits: 0x3FFF0000000000000000000000000000) + "#]], + ); + check( + r#" +fn main() { $0134e12; } "#, @@ -8357,8 +8465,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 7791..7999, - focus_range: 7856..7862, + full_range: 7800..8008, + focus_range: 7865..7871, name: "Future", kind: Trait, container_name: "future", @@ -8371,8 +8479,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 8629..9095, - focus_range: 8673..8681, + full_range: 8638..9104, + focus_range: 8682..8690, name: "Iterator", kind: Trait, container_name: "iterator", diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 3f10bed5110..944951f26a2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -29,6 +29,7 @@ mod closure_captures; mod closure_ret; mod discriminant; mod fn_lifetime_fn; +mod generic_param; mod implicit_drop; mod implicit_static; mod param_name; @@ -40,6 +41,7 @@ pub struct InlayHintsConfig { pub type_hints: bool, pub discriminant_hints: DiscriminantHints, pub parameter_hints: bool, + pub generic_parameter_hints: GenericParameterHints, pub chaining_hints: bool, pub adjustment_hints: AdjustmentHints, pub adjustment_hints_mode: AdjustmentHintsMode, @@ -95,6 +97,13 @@ pub enum DiscriminantHints { } #[derive(Clone, Debug, PartialEq, Eq)] +pub struct GenericParameterHints { + pub type_hints: bool, + pub lifetime_hints: bool, + pub const_hints: bool, +} + +#[derive(Clone, Debug, PartialEq, Eq)] pub enum LifetimeElisionHints { Always, SkipTrivial, @@ -127,6 +136,7 @@ pub enum InlayKind { GenericParamList, Lifetime, Parameter, + GenericParameter, Type, Drop, RangeExclusive, @@ -447,6 +457,7 @@ fn ty_to_text_edit( // // * types of local variables // * names of function arguments +// * names of const generic parameters // * types of chained expressions // // Optionally, one can enable additional hints for @@ -454,6 +465,7 @@ fn ty_to_text_edit( // * return types of closure expressions // * elided lifetimes // * compiler inserted reborrows +// * names of generic type and lifetime parameters // // Note: inlay hints for function argument names are heuristically omitted to reduce noise and will not appear if // any of the @@ -543,6 +555,9 @@ fn hints( node: SyntaxNode, ) { closing_brace::hints(hints, sema, config, file_id, node.clone()); + if let Some(any_has_generic_args) = ast::AnyHasGenericArgs::cast(node.clone()) { + generic_param::hints(hints, sema, config, any_has_generic_args); + } match_ast! { match node { ast::Expr(expr) => { @@ -645,13 +660,18 @@ mod tests { use crate::DiscriminantHints; use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints}; - use super::{ClosureReturnTypeHints, InlayFieldsToResolve}; + use super::{ClosureReturnTypeHints, GenericParameterHints, InlayFieldsToResolve}; pub(super) const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig { discriminant_hints: DiscriminantHints::Never, render_colons: false, type_hints: false, parameter_hints: false, + generic_parameter_hints: GenericParameterHints { + type_hints: false, + lifetime_hints: false, + const_hints: false, + }, chaining_hints: false, lifetime_elision_hints: LifetimeElisionHints::Never, closure_return_type_hints: ClosureReturnTypeHints::Never, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 3311bb48ad6..1118f11d99d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -8,7 +8,7 @@ use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase}; use itertools::Itertools; use syntax::{ - ast::{self, AstNode, HasName}, + ast::{self, AstNode, HasGenericArgs, HasName}, match_ast, }; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs new file mode 100644 index 00000000000..51855eeae23 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs @@ -0,0 +1,315 @@ +//! Implementation of inlay hints for generic parameters. +use ide_db::{active_parameter::generic_def_for_node, RootDatabase}; +use syntax::{ + ast::{self, AnyHasGenericArgs, HasGenericArgs, HasName}, + AstNode, +}; + +use crate::{inlay_hints::GenericParameterHints, InlayHint, InlayHintsConfig, InlayKind}; + +use super::param_name::{is_argument_similar_to_param_name, render_label}; + +pub(crate) fn hints( + acc: &mut Vec<InlayHint>, + sema: &hir::Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + node: AnyHasGenericArgs, +) -> Option<()> { + let GenericParameterHints { type_hints, lifetime_hints, const_hints } = + config.generic_parameter_hints; + if !(type_hints || lifetime_hints || const_hints) { + return None; + } + + let generic_arg_list = node.generic_arg_list()?; + + let (generic_def, _, _, _) = + generic_def_for_node(sema, &generic_arg_list, &node.syntax().first_token()?)?; + + let mut args = generic_arg_list.generic_args().peekable(); + let start_with_lifetime = matches!(args.peek()?, ast::GenericArg::LifetimeArg(_)); + let params = generic_def.params(sema.db).into_iter().filter(|p| { + if let hir::GenericParam::TypeParam(it) = p { + if it.is_implicit(sema.db) { + return false; + } + } + if !start_with_lifetime { + return !matches!(p, hir::GenericParam::LifetimeParam(_)); + } + true + }); + + let hints = params.zip(args).filter_map(|(param, arg)| { + if matches!(arg, ast::GenericArg::AssocTypeArg(_)) { + return None; + } + + let name = param.name(sema.db); + let param_name = name.as_str()?; + + let should_hide = { + let argument = get_string_representation(&arg)?; + is_argument_similar_to_param_name(&argument, param_name) + }; + + if should_hide { + return None; + } + + let range = sema.original_range_opt(arg.syntax())?.range; + + let source_syntax = match param { + hir::GenericParam::TypeParam(it) => { + if !type_hints || !matches!(arg, ast::GenericArg::TypeArg(_)) { + return None; + } + sema.source(it.merge())?.value.syntax().clone() + } + hir::GenericParam::ConstParam(it) => { + if !const_hints || !matches!(arg, ast::GenericArg::ConstArg(_)) { + return None; + } + let syntax = sema.source(it.merge())?.value.syntax().clone(); + let const_param = ast::ConstParam::cast(syntax)?; + const_param.name()?.syntax().clone() + } + hir::GenericParam::LifetimeParam(it) => { + if !lifetime_hints || !matches!(arg, ast::GenericArg::LifetimeArg(_)) { + return None; + } + sema.source(it)?.value.syntax().clone() + } + }; + let linked_location = sema.original_range_opt(&source_syntax); + let label = render_label(param_name, config, linked_location); + + Some(InlayHint { + range, + position: crate::InlayHintPosition::Before, + pad_left: false, + pad_right: true, + kind: InlayKind::GenericParameter, + label, + text_edit: None, + }) + }); + + acc.extend(hints); + Some(()) +} + +fn get_string_representation(arg: &ast::GenericArg) -> Option<String> { + return match arg { + ast::GenericArg::AssocTypeArg(_) => None, + ast::GenericArg::ConstArg(const_arg) => Some(const_arg.to_string()), + ast::GenericArg::LifetimeArg(lifetime_arg) => { + let lifetime = lifetime_arg.lifetime()?; + Some(lifetime.to_string()) + } + ast::GenericArg::TypeArg(type_arg) => { + let ty = type_arg.ty()?; + Some( + type_path_segment(&ty) + .map_or_else(|| type_arg.to_string(), |segment| segment.to_string()), + ) + } + }; + + fn type_path_segment(ty: &ast::Type) -> Option<ast::PathSegment> { + match ty { + ast::Type::ArrayType(it) => type_path_segment(&it.ty()?), + ast::Type::ForType(it) => type_path_segment(&it.ty()?), + ast::Type::ParenType(it) => type_path_segment(&it.ty()?), + ast::Type::PathType(path_type) => path_type.path()?.segment(), + ast::Type::PtrType(it) => type_path_segment(&it.ty()?), + ast::Type::RefType(it) => type_path_segment(&it.ty()?), + ast::Type::SliceType(it) => type_path_segment(&it.ty()?), + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::{ + tests::{check_with_config, DISABLED_CONFIG}, + GenericParameterHints, + }, + InlayHintsConfig, + }; + + #[track_caller] + fn generic_param_name_hints_always(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { + generic_parameter_hints: GenericParameterHints { + type_hints: true, + lifetime_hints: true, + const_hints: true, + }, + ..DISABLED_CONFIG + }, + ra_fixture, + ); + } + + #[track_caller] + fn generic_param_name_hints_const_only(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { + generic_parameter_hints: GenericParameterHints { + type_hints: false, + lifetime_hints: false, + const_hints: true, + }, + ..DISABLED_CONFIG + }, + ra_fixture, + ); + } + + #[test] + fn type_only() { + generic_param_name_hints_always( + r#" +struct A<X, Y> { + x: X, + y: Y, +} + +fn foo(a: A<usize, u32>) {} + //^^^^^ X ^^^ Y +"#, + ) + } + + #[test] + fn lifetime_and_type() { + generic_param_name_hints_always( + r#" +struct A<'a, X> { + x: &'a X +} + +fn foo<'b>(a: A<'b, u32>) {} + //^^ 'a^^^ X +"#, + ) + } + + #[test] + fn omit_lifetime() { + generic_param_name_hints_always( + r#" +struct A<'a, X> { + x: &'a X +} + +fn foo() { + let x: i32 = 1; + let a: A<i32> = A { x: &x }; + // ^^^ X +} +"#, + ) + } + + #[test] + fn const_only() { + generic_param_name_hints_always( + r#" +struct A<const X: usize, const Y: usize> {}; + +fn foo(a: A<12, 2>) {} + //^^ X^ Y +"#, + ) + } + + #[test] + fn lifetime_and_type_and_const() { + generic_param_name_hints_always( + r#" +struct A<'a, X, const LEN: usize> { + x: &'a [X; LEN], +} + +fn foo<'b>(a: A< + 'b, + // ^^ 'a + u32, + // ^^^ X + 3 + // ^ LEN + >) {} +"#, + ) + } + + #[test] + fn const_only_config() { + generic_param_name_hints_const_only( + r#" +struct A<'a, X, const LEN: usize> { + x: &'a [X; LEN], +} + +fn foo<'b>(a: A< + 'b, + u32, + 3 + // ^ LEN + >) {} +"#, + ) + } + + #[test] + fn assoc_type() { + generic_param_name_hints_always( + r#" +trait Trait<T> { + type Assoc1; + type Assoc2; +} + +fn foo() -> impl Trait<i32, Assoc1 = u32, Assoc2 = u32> {} + // ^^^ T +"#, + ) + } + + #[test] + fn hide_similar() { + generic_param_name_hints_always( + r#" +struct A<'a, X, const N: usize> { + x: &'a [X; N], +} + +const N: usize = 3; + +mod m { + type X = u32; +} + +fn foo<'a>(a: A<'a, m::X, N>) {} +"#, + ) + } + + #[test] + fn mismatching_args() { + generic_param_name_hints_always( + r#" +struct A<X, const N: usize> { + x: [X; N] +} + +type InvalidType = A<3, i32>; +"#, + ) + } +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 9819d0e3fbb..2e2a64c5520 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -3,6 +3,8 @@ //! fn max(x: i32, y: i32) -> i32 { x + y } //! _ = max(/*x*/4, /*y*/4); //! ``` +use std::fmt::Display; + use either::Either; use hir::{Callable, Semantics}; use ide_db::{base_db::FileRange, RootDatabase}; @@ -46,9 +48,7 @@ pub(super) fn hints( .map(|(param, param_name, _, FileRange { range, .. })| { let linked_location = param.and_then(|name| sema.original_range_opt(name.syntax())); - let colon = if config.render_colons { ":" } else { "" }; - let label = - InlayHintLabel::simple(format!("{param_name}{colon}"), None, linked_location); + let label = render_label(¶m_name, config, linked_location); InlayHint { range, kind: InlayKind::Parameter, @@ -64,6 +64,16 @@ pub(super) fn hints( Some(()) } +pub(super) fn render_label( + param_name: impl Display, + config: &InlayHintsConfig, + linked_location: Option<FileRange>, +) -> InlayHintLabel { + let colon = if config.render_colons { ":" } else { "" }; + + InlayHintLabel::simple(format!("{param_name}{colon}"), None, linked_location) +} + fn get_callable( sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr, @@ -113,7 +123,7 @@ fn should_hide_param_name_hint( }; let fn_name = fn_name.as_deref(); is_param_name_suffix_of_fn_name(param_name, callable, fn_name) - || is_argument_similar_to_param_name(argument, param_name) + || is_argument_expr_similar_to_param_name(argument, param_name) || param_name.starts_with("ra_fixture") || (callable.n_params() == 1 && is_obvious_param(param_name)) || is_adt_constructor_similar_to_param_name(sema, argument, param_name) @@ -143,14 +153,17 @@ fn is_param_name_suffix_of_fn_name( } } -fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool { - // check whether param_name and argument are the same or - // whether param_name is a prefix/suffix of argument(split at `_`) +fn is_argument_expr_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool { let argument = match get_string_representation(argument) { Some(argument) => argument, None => return false, }; + is_argument_similar_to_param_name(&argument, param_name) +} +/// Check whether param_name and argument are the same or +/// whether param_name is a prefix/suffix of argument(split at `_`). +pub(super) fn is_argument_similar_to_param_name(argument: &str, param_name: &str) -> bool { // std is honestly too panic happy... let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at)); diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index a2ac62341df..f0b35903f38 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -8,7 +8,7 @@ //! in this crate. // For proving that RootDatabase is RefUnwindSafe. -#![warn(rust_2018_idioms, unused_lifetimes)] + #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] @@ -89,8 +89,8 @@ pub use crate::{ }, inlay_hints::{ AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, - InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition, - InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, + GenericParameterHints, InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, + InlayHintPosition, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, }, join_lines::JoinLinesConfig, markup::Markup, diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 2feea09840f..a68ee4f8671 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -78,7 +78,6 @@ impl RunnableKind { } impl Runnable { - // test package::module::testname pub fn label(&self, target: Option<&str>) -> String { match &self.kind { RunnableKind::Test { test_id, .. } => format!("test {test_id}"), @@ -86,7 +85,7 @@ impl Runnable { RunnableKind::Bench { test_id } => format!("bench {test_id}"), RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"), RunnableKind::Bin => { - target.map_or_else(|| "run binary".to_owned(), |t| format!("run {t}")) + format!("run {}", target.unwrap_or("binary")) } } } @@ -513,11 +512,11 @@ impl TestAttr { } } -const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; -const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = - &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; - fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool { + const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; + const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = + &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; + docs_from_attrs(attrs).map_or(false, |doc| { let mut in_code_block = false; diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index 89c725a6c47..c5eaacdb10d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -272,7 +272,7 @@ fn signature_help_for_generics( arg_list: ast::GenericArgList, token: SyntaxToken, ) -> Option<SignatureHelp> { - let (mut generics_def, mut active_parameter, first_arg_is_non_lifetime) = + let (generics_def, mut active_parameter, first_arg_is_non_lifetime, variant) = generic_def_for_node(sema, &arg_list, &token)?; let mut res = SignatureHelp { doc: None, @@ -290,6 +290,12 @@ fn signature_help_for_generics( hir::GenericDef::Adt(hir::Adt::Enum(it)) => { res.doc = it.docs(db); format_to!(res.signature, "enum {}", it.name(db).display(db)); + if let Some(variant) = variant { + // In paths, generics of an enum can be specified *after* one of its variants. + // eg. `None::<u8>` + // We'll use the signature of the enum, but include the docs of the variant. + res.doc = variant.docs(db); + } } hir::GenericDef::Adt(hir::Adt::Struct(it)) => { res.doc = it.docs(db); @@ -311,15 +317,6 @@ fn signature_help_for_generics( res.doc = it.docs(db); format_to!(res.signature, "type {}", it.name(db).display(db)); } - hir::GenericDef::Variant(it) => { - // In paths, generics of an enum can be specified *after* one of its variants. - // eg. `None::<u8>` - // We'll use the signature of the enum, but include the docs of the variant. - res.doc = it.docs(db); - let enum_ = it.parent_enum(db); - format_to!(res.signature, "enum {}", enum_.name(db).display(db)); - generics_def = enum_.into(); - } // These don't have generic args that can be specified hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None, } diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 0d2311b6e98..5eb5c87f13e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -131,6 +131,11 @@ impl StaticIndex<'_> { discriminant_hints: crate::DiscriminantHints::Fieldless, type_hints: true, parameter_hints: true, + generic_parameter_hints: crate::GenericParameterHints { + type_hints: false, + lifetime_hints: false, + const_hints: true, + }, chaining_hints: true, closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock, lifetime_elision_hints: crate::LifetimeElisionHints::Never, diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index 8e7767c8e5d..b998c0bfc65 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -20,7 +20,6 @@ use ide_db::{ }; use itertools::Itertools; use profile::{memory_usage, Bytes}; -use std::env; use stdx::format_to; use syntax::{ast, Parse, SyntaxNode}; use triomphe::Arc; @@ -44,9 +43,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String { format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db))); format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db))); format_to!(buf, "{} in total\n", memory_usage()); - if env::var("RA_COUNT").is_ok() { - format_to!(buf, "\nCounts:\n{}", profile::countme::get_all()); - } format_to!(buf, "\nDebug info:\n"); format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db))); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 573b3d4bd52..17411fefbd9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -94,10 +94,20 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd <span class="brace">}</span> <span class="brace">}</span> +<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">id</span> <span class="brace">{</span> + <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">></span> <span class="brace">{</span> + <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span> + <span class="brace">}</span><span class="semicolon">;</span> +<span class="brace">}</span> <span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro default_library library macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="string_literal macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> +<span class="keyword">struct</span> <span class="struct declaration">S</span><span class="angle"><</span><span class="type_param declaration">T</span><span class="angle">></span><span class="parenthesis">(</span><span class="type_param">T</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span> + <span class="keyword">struct</span> <span class="struct declaration">TestLocal</span><span class="semicolon">;</span> + <span class="comment">// regression test, TestLocal here used to not resolve</span> + <span class="keyword">let</span> <span class="punctuation">_</span><span class="colon">:</span> <span class="struct">S</span><span class="angle"><</span><span class="macro">id</span><span class="macro_bang">!</span><span class="bracket macro">[</span><span class="struct macro">TestLocal</span><span class="bracket macro">]</span><span class="angle">></span><span class="semicolon">;</span> + <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="parenthesis macro">(</span><span class="numeric_literal macro">92</span><span class="comma macro">,</span><span class="parenthesis macro">)</span><span class="operator macro">.</span><span class="field library macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">noop</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index 08acfca2cb6..5f711600a29 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -102,10 +102,20 @@ macro without_args { } } +macro_rules! id { + ($($tt:tt)*) => { + $($tt)* + }; +} include!(concat!("foo/", "foo.rs")); +struct S<T>(T); fn main() { + struct TestLocal; + // regression test, TestLocal here used to not resolve + let _: S<id![TestLocal]>; + format_args!("Hello, {}!", (92,).0); dont_color_me_braces!(); noop!(noop!(1)); diff --git a/src/tools/rust-analyzer/crates/limit/src/lib.rs b/src/tools/rust-analyzer/crates/limit/src/lib.rs index 27471db6a34..c1caeed2f87 100644 --- a/src/tools/rust-analyzer/crates/limit/src/lib.rs +++ b/src/tools/rust-analyzer/crates/limit/src/lib.rs @@ -1,7 +1,5 @@ //! limit defines a struct to enforce limits. -#![warn(rust_2018_idioms, unused_lifetimes)] - #[cfg(feature = "tracking")] use std::sync::atomic::AtomicUsize; diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml index 2046fa943a8..18444018e1b 100644 --- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml +++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml @@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1" rustc-hash.workspace = true smallvec.workspace = true tracing.workspace = true +arrayvec.workspace = true # local deps syntax.workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index 19ba5c7a156..27dbc84a2b1 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -170,7 +170,7 @@ fn invocation_fixtures( Op::Literal(it) => token_trees.push(tt::Leaf::from(it.clone()).into()), Op::Ident(it) => token_trees.push(tt::Leaf::from(it.clone()).into()), Op::Punct(puncts) => { - for punct in puncts { + for punct in puncts.as_slice() { token_trees.push(tt::Leaf::from(*punct).into()); } } @@ -187,7 +187,7 @@ fn invocation_fixtures( } if i + 1 != cnt { if let Some(sep) = separator { - match sep { + match &**sep { Separator::Literal(it) => { token_trees.push(tt::Leaf::Literal(it.clone()).into()) } diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index 0cec4e70daa..b20d5579ca6 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -59,17 +59,17 @@ //! eof: [a $( a )* a b ·] //! ``` -use std::rc::Rc; +use std::{rc::Rc, sync::Arc}; use smallvec::{smallvec, SmallVec}; use span::{Edition, Span}; use syntax::SmolStr; -use tt::DelimSpan; +use tt::{iter::TtIter, DelimSpan}; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, + expect_fragment, parser::{MetaVarKind, Op, RepeatKind, Separator}, - tt_iter::TtIter, ExpandError, MetaTemplate, ValueResult, }; @@ -315,7 +315,7 @@ struct MatchState<'t> { up: Option<Box<MatchState<'t>>>, /// The separator if we are in a repetition. - sep: Option<Separator>, + sep: Option<Arc<Separator>>, /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option<RepeatKind>, @@ -406,7 +406,7 @@ fn match_loop_inner<'t>( if item.sep.is_some() && !item.sep_matched { let sep = item.sep.as_ref().unwrap(); let mut fork = src.clone(); - if fork.expect_separator(sep) { + if expect_separator(&mut fork, sep) { // HACK: here we use `meta_result` to pass `TtIter` back to caller because // it might have been advanced multiple times. `ValueResult` is // insignificant. @@ -746,7 +746,7 @@ fn match_meta_var( ) -> ExpandResult<Option<Fragment>> { let fragment = match kind { MetaVarKind::Path => { - return input.expect_fragment(parser::PrefixEntryPoint::Path, edition).map(|it| { + return expect_fragment(input, parser::PrefixEntryPoint::Path, edition).map(|it| { it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) }); } @@ -765,7 +765,7 @@ fn match_meta_var( } _ => {} }; - return input.expect_fragment(parser::PrefixEntryPoint::Expr, edition).map(|tt| { + return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition).map(|tt| { tt.map(|tt| match tt { tt::TokenTree::Leaf(leaf) => tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), @@ -787,14 +787,13 @@ fn match_meta_var( .expect_ident() .map(|ident| tt::Leaf::from(ident.clone()).into()) .map_err(|()| ExpandError::binding_error("expected ident")), - MetaVarKind::Tt => input - .expect_tt() - .map_err(|()| ExpandError::binding_error("expected token tree")), - MetaVarKind::Lifetime => input - .expect_lifetime() + MetaVarKind::Tt => { + expect_tt(input).map_err(|()| ExpandError::binding_error("expected token tree")) + } + MetaVarKind::Lifetime => expect_lifetime(input) .map_err(|()| ExpandError::binding_error("expected lifetime")), MetaVarKind::Literal => { - let neg = input.eat_char('-'); + let neg = eat_char(input, '-'); input .expect_literal() .map(|literal| { @@ -822,7 +821,7 @@ fn match_meta_var( MetaVarKind::Item => parser::PrefixEntryPoint::Item, MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, }; - input.expect_fragment(fragment, edition).map(|it| it.map(Fragment::Tokens)) + expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens)) } fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { @@ -905,86 +904,84 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } } -impl TtIter<'_, Span> { - fn expect_separator(&mut self, separator: &Separator) -> bool { - let mut fork = self.clone(); - let ok = match separator { - Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { - Ok(rhs) => rhs.text == lhs.text, - Err(_) => false, - }, - Separator::Literal(lhs) => match fork.expect_literal() { - Ok(rhs) => match rhs { - tt::Leaf::Literal(rhs) => rhs.text == lhs.text, - tt::Leaf::Ident(rhs) => rhs.text == lhs.text, - tt::Leaf::Punct(_) => false, - }, - Err(_) => false, +fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) -> bool { + let mut fork = iter.clone(); + let ok = match separator { + Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { + Ok(rhs) => rhs.text == lhs.text, + Err(_) => false, + }, + Separator::Literal(lhs) => match fork.expect_literal() { + Ok(rhs) => match rhs { + tt::Leaf::Literal(rhs) => rhs.text == lhs.text, + tt::Leaf::Ident(rhs) => rhs.text == lhs.text, + tt::Leaf::Punct(_) => false, }, - Separator::Puncts(lhs) => match fork.expect_glued_punct() { - Ok(rhs) => { - let lhs = lhs.iter().map(|it| it.char); - let rhs = rhs.iter().map(|it| it.char); - lhs.eq(rhs) - } - Err(_) => false, - }, - }; - if ok { - *self = fork; - } - ok - } - - fn expect_tt(&mut self) -> Result<tt::TokenTree<Span>, ()> { - if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { - if punct.char == '\'' { - self.expect_lifetime() - } else { - let puncts = self.expect_glued_punct()?; - let delimiter = tt::Delimiter { - open: puncts.first().unwrap().span, - close: puncts.last().unwrap().span, - kind: tt::DelimiterKind::Invisible, - }; - let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); - Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees })) + Err(_) => false, + }, + Separator::Puncts(lhs) => match fork.expect_glued_punct() { + Ok(rhs) => { + let lhs = lhs.iter().map(|it| it.char); + let rhs = rhs.iter().map(|it| it.char); + lhs.eq(rhs) } - } else { - self.next().ok_or(()).cloned() - } + Err(_) => false, + }, + }; + if ok { + *iter = fork; } + ok +} - fn expect_lifetime(&mut self) -> Result<tt::TokenTree<Span>, ()> { - let punct = self.expect_single_punct()?; - if punct.char != '\'' { - return Err(()); - } - let ident = self.expect_ident_or_underscore()?; - - Ok(tt::Subtree { - delimiter: tt::Delimiter { - open: punct.span, - close: ident.span, +fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<tt::TokenTree<S>, ()> { + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = iter.peek_n(0) { + if punct.char == '\'' { + expect_lifetime(iter) + } else { + let puncts = iter.expect_glued_punct()?; + let delimiter = tt::Delimiter { + open: puncts.first().unwrap().span, + close: puncts.last().unwrap().span, kind: tt::DelimiterKind::Invisible, - }, - token_trees: Box::new([ - tt::Leaf::Punct(*punct).into(), - tt::Leaf::Ident(ident.clone()).into(), - ]), + }; + let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); + Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees })) } - .into()) + } else { + iter.next().ok_or(()).cloned() } +} - fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<Span>> { - let mut fork = self.clone(); - match fork.expect_char(c) { - Ok(_) => { - let tt = self.next().cloned(); - *self = fork; - tt - } - Err(_) => None, +fn expect_lifetime<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<tt::TokenTree<S>, ()> { + let punct = iter.expect_single_punct()?; + if punct.char != '\'' { + return Err(()); + } + let ident = iter.expect_ident_or_underscore()?; + + Ok(tt::Subtree { + delimiter: tt::Delimiter { + open: punct.span, + close: ident.span, + kind: tt::DelimiterKind::Invisible, + }, + token_trees: Box::new([ + tt::Leaf::Punct(*punct).into(), + tt::Leaf::Ident(ident.clone()).into(), + ]), + } + .into()) +} + +fn eat_char<S: Copy>(iter: &mut TtIter<'_, S>, c: char) -> Option<tt::TokenTree<S>> { + let mut fork = iter.clone(); + match fork.expect_char(c) { + Ok(_) => { + let tt = iter.next().cloned(); + *iter = fork; + tt } + Err(_) => None, } } diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs index 0f689a2692c..c09cbd1d071 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs @@ -195,7 +195,7 @@ fn expand_subtree( .into(), ), Op::Punct(puncts) => { - for punct in puncts { + for punct in puncts.as_slice() { arena.push( tt::Leaf::from({ let mut it = *punct; @@ -222,7 +222,7 @@ fn expand_subtree( } Op::Repeat { tokens: subtree, kind, separator } => { let ExpandResult { value: fragment, err: e } = - expand_repeat(ctx, subtree, *kind, separator, arena, marker); + expand_repeat(ctx, subtree, *kind, separator.as_deref(), arena, marker); err = err.or(e); push_fragment(ctx, arena, fragment) } @@ -383,7 +383,7 @@ fn expand_repeat( ctx: &mut ExpandCtx<'_>, template: &MetaTemplate, kind: RepeatKind, - separator: &Option<Separator>, + separator: Option<&Separator>, arena: &mut Vec<tt::TokenTree<Span>>, marker: impl Fn(&mut Span) + Copy, ) -> ExpandResult<Fragment> { diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs index ed3200964df..b06c6cee12d 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs @@ -6,26 +6,21 @@ //! The tests for this functionality live in another crate: //! `hir_def::macro_expansion_tests::mbe`. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod expander; mod parser; mod syntax_bridge; mod to_parser_input; -mod tt_iter; #[cfg(test)] mod benchmark; use span::{Edition, Span, SyntaxContextId}; use stdx::impl_from; +use tt::iter::TtIter; use std::fmt; -use crate::{ - parser::{MetaTemplate, MetaVarKind, Op}, - tt_iter::TtIter, -}; +use crate::parser::{MetaTemplate, MetaVarKind, Op}; // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces pub use ::parser::TopEntryPoint; @@ -161,7 +156,7 @@ impl DeclarativeMacro { let mut err = None; while src.len() > 0 { - let rule = match Rule::parse(edition, &mut src, true, new_meta_vars) { + let rule = match Rule::parse(edition, &mut src, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -189,19 +184,34 @@ impl DeclarativeMacro { /// The new, unstable `macro m {}` flavor. pub fn parse_macro2( - tt: &tt::Subtree<Span>, + args: Option<&tt::Subtree<Span>>, + body: &tt::Subtree<Span>, edition: impl Copy + Fn(SyntaxContextId) -> Edition, // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) new_meta_vars: bool, ) -> DeclarativeMacro { - let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; - if tt::DelimiterKind::Brace == tt.delimiter.kind { + if let Some(args) = args { + cov_mark::hit!(parse_macro_def_simple); + + let rule = (|| { + let lhs = MetaTemplate::parse_pattern(edition, args)?; + let rhs = MetaTemplate::parse_template(edition, body, new_meta_vars)?; + + Ok(crate::Rule { lhs, rhs }) + })(); + + match rule { + Ok(rule) => rules.push(rule), + Err(e) => err = Some(Box::new(e)), + } + } else { cov_mark::hit!(parse_macro_def_rules); + let mut src = TtIter::new(body); while src.len() > 0 { - let rule = match Rule::parse(edition, &mut src, true, new_meta_vars) { + let rule = match Rule::parse(edition, &mut src, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -218,19 +228,6 @@ impl DeclarativeMacro { break; } } - } else { - cov_mark::hit!(parse_macro_def_simple); - match Rule::parse(edition, &mut src, false, new_meta_vars) { - Ok(rule) => { - if src.len() != 0 { - err = Some(Box::new(ParseError::expected("remaining tokens in macro def"))); - } - rules.push(rule); - } - Err(e) => { - err = Some(Box::new(e)); - } - } } for Rule { lhs, .. } in &rules { @@ -247,6 +244,10 @@ impl DeclarativeMacro { self.err.as_deref() } + pub fn num_rules(&self) -> usize { + self.rules.len() + } + pub fn expand( &self, tt: &tt::Subtree<Span>, @@ -263,14 +264,11 @@ impl Rule { fn parse( edition: impl Copy + Fn(SyntaxContextId) -> Edition, src: &mut TtIter<'_, Span>, - expect_arrow: bool, new_meta_vars: bool, ) -> Result<Self, ParseError> { let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; - if expect_arrow { - src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; - src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?; - } + src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; + src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?; let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; let lhs = MetaTemplate::parse_pattern(edition, lhs)?; @@ -361,3 +359,60 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> { result.map_or_else(Self::only_err, Self::ok) } } + +fn expect_fragment<S: Copy + fmt::Debug>( + tt_iter: &mut TtIter<'_, S>, + entry_point: ::parser::PrefixEntryPoint, + edition: ::parser::Edition, +) -> ExpandResult<Option<tt::TokenTree<S>>> { + use ::parser; + let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice()); + let parser_input = to_parser_input::to_parser_input(&buffer); + let tree_traversal = entry_point.parse(&parser_input, edition); + let mut cursor = buffer.begin(); + let mut error = false; + for step in tree_traversal.iter() { + match step { + parser::Step::Token { kind, mut n_input_tokens } => { + if kind == ::parser::SyntaxKind::LIFETIME_IDENT { + n_input_tokens = 2; + } + for _ in 0..n_input_tokens { + cursor = cursor.bump_subtree(); + } + } + parser::Step::FloatSplit { .. } => { + // FIXME: We need to split the tree properly here, but mutating the token trees + // in the buffer is somewhat tricky to pull off. + cursor = cursor.bump_subtree(); + } + parser::Step::Enter { .. } | parser::Step::Exit => (), + parser::Step::Error { .. } => error = true, + } + } + + let err = if error || !cursor.is_root() { + Some(ExpandError::binding_error(format!("expected {entry_point:?}"))) + } else { + None + }; + + let mut curr = buffer.begin(); + let mut res = vec![]; + + while curr != cursor { + let Some(token) = curr.token_tree() else { break }; + res.push(token.cloned()); + curr = curr.bump(); + } + + *tt_iter = TtIter::new_iter(tt_iter.as_slice()[res.len()..].iter()); + let res = match &*res { + [] | [_] => res.pop(), + [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree { + delimiter: Delimiter::invisible_spanned(first.first_span()), + token_trees: res.into_boxed_slice(), + })), + }; + ExpandResult { value: res, err } +} diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs index bbe00f0afca..5c499c06b15 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs @@ -1,11 +1,14 @@ //! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token //! trees. -use smallvec::{smallvec, SmallVec}; +use std::sync::Arc; + +use arrayvec::ArrayVec; use span::{Edition, Span, SyntaxContextId}; use syntax::SmolStr; +use tt::iter::TtIter; -use crate::{tt_iter::TtIter, ParseError}; +use crate::ParseError; /// Consider /// @@ -86,14 +89,14 @@ pub(crate) enum Op { Repeat { tokens: MetaTemplate, kind: RepeatKind, - separator: Option<Separator>, + separator: Option<Arc<Separator>>, }, Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter<Span>, }, Literal(tt::Literal<Span>), - Punct(SmallVec<[tt::Punct<Span>; 3]>), + Punct(Box<ArrayVec<tt::Punct<Span>, 3>>), Ident(tt::Ident<Span>), } @@ -126,7 +129,7 @@ pub(crate) enum MetaVarKind { pub(crate) enum Separator { Literal(tt::Literal<Span>), Ident(tt::Ident<Span>), - Puncts(SmallVec<[tt::Punct<Span>; 3]>), + Puncts(ArrayVec<tt::Punct<Span>, 3>), } // Note that when we compare a Separator, we just care about its textual value. @@ -165,7 +168,13 @@ fn next_op( src.next().expect("first token already peeked"); // Note that the '$' itself is a valid token inside macro_rules. let second = match src.next() { - None => return Ok(Op::Punct(smallvec![*p])), + None => { + return Ok(Op::Punct({ + let mut res = ArrayVec::new(); + res.push(*p); + Box::new(res) + })) + } Some(it) => it, }; match second { @@ -173,7 +182,7 @@ fn next_op( tt::DelimiterKind::Parenthesis => { let (separator, kind) = parse_repeat(src)?; let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; - Op::Repeat { tokens, separator, kind } + Op::Repeat { tokens, separator: separator.map(Arc::new), kind } } tt::DelimiterKind::Brace => match mode { Mode::Template => { @@ -216,7 +225,11 @@ fn next_op( "`$$` is not allowed on the pattern side", )) } - Mode::Template => Op::Punct(smallvec![*punct]), + Mode::Template => Op::Punct({ + let mut res = ArrayVec::new(); + res.push(*punct); + Box::new(res) + }), }, tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => { return Err(ParseError::expected("expected ident")) @@ -238,7 +251,7 @@ fn next_op( tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => { // There's at least one punct so this shouldn't fail. let puncts = src.expect_glued_punct().unwrap(); - Op::Punct(puncts) + Op::Punct(Box::new(puncts)) } tt::TokenTree::Subtree(subtree) => { @@ -290,7 +303,7 @@ fn is_boolean_literal(lit: &tt::Literal<Span>) -> bool { } fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> { - let mut separator = Separator::Puncts(SmallVec::new()); + let mut separator = Separator::Puncts(ArrayVec::new()); for tt in src { let tt = match tt { tt::TokenTree::Leaf(leaf) => leaf, @@ -312,7 +325,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat '+' => RepeatKind::OneOrMore, '?' => RepeatKind::ZeroOrOne, _ => match &mut separator { - Separator::Puncts(puncts) if puncts.len() != 3 => { + Separator::Puncts(puncts) if puncts.len() < 3 => { puncts.push(*punct); continue; } diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs index c8ff8c35e93..73a04f00d93 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs @@ -11,9 +11,12 @@ use syntax::{ SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; -use tt::buffer::{Cursor, TokenBuffer}; +use tt::{ + buffer::{Cursor, TokenBuffer}, + iter::TtIter, +}; -use crate::{to_parser_input::to_parser_input, tt_iter::TtIter}; +use crate::to_parser_input::to_parser_input; #[cfg(test)] mod tests; @@ -213,7 +216,7 @@ where let mut res = Vec::new(); while iter.peek_n(0).is_some() { - let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr, edition); + let expanded = crate::expect_fragment(&mut iter, parser::PrefixEntryPoint::Expr, edition); res.push(match expanded.value { None => break, diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs deleted file mode 100644 index 9c7d7af7b14..00000000000 --- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs +++ /dev/null @@ -1,208 +0,0 @@ -//! A "Parser" structure for token trees. We use this when parsing a declarative -//! macro definition into a list of patterns and templates. - -use core::fmt; - -use smallvec::{smallvec, SmallVec}; -use syntax::SyntaxKind; - -use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; - -#[derive(Debug, Clone)] -pub(crate) struct TtIter<'a, S> { - pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>, -} - -impl<'a, S: Copy> TtIter<'a, S> { - pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> { - TtIter { inner: subtree.token_trees.iter() } - } - - pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ()> { - match self.next() { - Some(&tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) if c == char => { - Ok(()) - } - _ => Err(()), - } - } - - pub(crate) fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> { - match self.next() { - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) - if chars.contains(c) => - { - Ok(()) - } - _ => Err(()), - } - } - - pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree<S>, ()> { - match self.next() { - Some(tt::TokenTree::Subtree(it)) => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf<S>, ()> { - match self.next() { - Some(tt::TokenTree::Leaf(it)) => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_dollar(&mut self) -> Result<(), ()> { - match self.expect_leaf()? { - tt::Leaf::Punct(tt::Punct { char: '$', .. }) => Ok(()), - _ => Err(()), - } - } - - pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident<S>, ()> { - match self.expect_leaf()? { - tt::Leaf::Ident(it) if it.text != "_" => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident<S>, ()> { - match self.expect_leaf()? { - tt::Leaf::Ident(it) => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf<S>, ()> { - let it = self.expect_leaf()?; - match it { - tt::Leaf::Literal(_) => Ok(it), - tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct<S>, ()> { - match self.expect_leaf()? { - tt::Leaf::Punct(it) => Ok(it), - _ => Err(()), - } - } - - /// Returns consecutive `Punct`s that can be glued together. - /// - /// This method currently may return a single quotation, which is part of lifetime ident and - /// conceptually not a punct in the context of mbe. Callers should handle this. - pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct<S>; 3]>, ()> { - let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { - return Err(()); - }; - - if first.spacing == tt::Spacing::Alone { - return Ok(smallvec![first]); - } - - let (second, third) = match (self.peek_n(0), self.peek_n(1)) { - ( - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), - ) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)), - (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None), - _ => return Ok(smallvec![first]), - }; - - match (first.char, second.char, third.map(|it| it.char)) { - ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { - let _ = self.next().unwrap(); - let _ = self.next().unwrap(); - Ok(smallvec![first, *second, *third.unwrap()]) - } - ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) - | ('-' | '=' | '>', '>', _) - | ('<', '-', _) - | (':', ':', _) - | ('.', '.', _) - | ('&', '&', _) - | ('<', '<', _) - | ('|', '|', _) => { - let _ = self.next().unwrap(); - Ok(smallvec![first, *second]) - } - _ => Ok(smallvec![first]), - } - } - pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> { - self.inner.as_slice().get(n) - } -} - -impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> { - pub(crate) fn expect_fragment( - &mut self, - entry_point: parser::PrefixEntryPoint, - edition: parser::Edition, - ) -> ExpandResult<Option<tt::TokenTree<S>>> { - let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); - let parser_input = to_parser_input(&buffer); - let tree_traversal = entry_point.parse(&parser_input, edition); - let mut cursor = buffer.begin(); - let mut error = false; - for step in tree_traversal.iter() { - match step { - parser::Step::Token { kind, mut n_input_tokens } => { - if kind == SyntaxKind::LIFETIME_IDENT { - n_input_tokens = 2; - } - for _ in 0..n_input_tokens { - cursor = cursor.bump_subtree(); - } - } - parser::Step::FloatSplit { .. } => { - // FIXME: We need to split the tree properly here, but mutating the token trees - // in the buffer is somewhat tricky to pull off. - cursor = cursor.bump_subtree(); - } - parser::Step::Enter { .. } | parser::Step::Exit => (), - parser::Step::Error { .. } => error = true, - } - } - - let err = if error || !cursor.is_root() { - Some(ExpandError::binding_error(format!("expected {entry_point:?}"))) - } else { - None - }; - - let mut curr = buffer.begin(); - let mut res = vec![]; - - while curr != cursor { - let Some(token) = curr.token_tree() else { break }; - res.push(token.cloned()); - curr = curr.bump(); - } - - self.inner = self.inner.as_slice()[res.len()..].iter(); - let res = match &*res { - [] | [_] => res.pop(), - [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(first.first_span()), - token_trees: res.into_boxed_slice(), - })), - }; - ExpandResult { value: res, err } - } -} - -impl<'a, S> Iterator for TtIter<'a, S> { - type Item = &'a tt::TokenTree<S>; - fn next(&mut self) -> Option<Self::Item> { - self.inner.next() - } - - fn size_hint(&self) -> (usize, Option<usize>) { - self.inner.size_hint() - } -} - -impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {} diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml index 1f84e3f3af3..54b57c201be 100644 --- a/src/tools/rust-analyzer/crates/parser/Cargo.toml +++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml @@ -21,7 +21,6 @@ tracing = { workspace = true, optional = true } expect-test = "1.4.0" stdx.workspace = true -sourcegen.workspace = true [features] default = ["tracing"] diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs index 25c00ccf5f3..99bbf47654b 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs @@ -372,13 +372,11 @@ fn macro_def(p: &mut Parser<'_>, m: Marker) { // macro m { ($i:ident) => {} } token_tree(p); } else if p.at(T!['(']) { - let m = p.start(); token_tree(p); match p.current() { T!['{'] | T!['['] | T!['('] => token_tree(p), _ => p.error("expected `{`, `[`, `(`"), } - m.complete(p, TOKEN_TREE); } else { p.error("unmatched `(`"); } diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs index c2f9ea47728..738ed239a7c 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lib.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs @@ -17,7 +17,6 @@ //! //! [`Parser`]: crate::parser::Parser -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(rustdoc::private_intra_doc_links)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index ef83420c523..ad3398453be 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_ast`, do not edit by hand. +//! Generated by `cargo codegen grammar`, do not edit by hand. #![allow(bad_style, missing_docs, unreachable_pub)] #[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."] diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs index 0e040965261..a38689791c4 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests.rs +++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs @@ -1,5 +1,4 @@ mod prefix_entries; -mod sourcegen_inline_tests; mod top_entries; use std::{ diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast index 01de13a907d..f73229b2e30 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast @@ -5,15 +5,14 @@ SOURCE_FILE NAME IDENT "m" TOKEN_TREE - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "i" - COLON ":" - IDENT "ident" - R_PAREN ")" - WHITESPACE " " - TOKEN_TREE - L_CURLY "{" - R_CURLY "}" + L_PAREN "(" + DOLLAR "$" + IDENT "i" + COLON ":" + IDENT "ident" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + R_CURLY "}" WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast index a95bc23016b..3d9322947b3 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast @@ -39,16 +39,15 @@ SOURCE_FILE NAME IDENT "m" TOKEN_TREE - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - COLON ":" - IDENT "ident" - R_PAREN ")" - WHITESPACE " " - TOKEN_TREE - L_CURLY "{" - R_CURLY "}" + L_PAREN "(" + DOLLAR "$" + COLON ":" + IDENT "ident" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + R_CURLY "}" WHITESPACE "\n" FN VISIBILITY diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast index 3915ed75064..1415a866b69 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast @@ -5,51 +5,50 @@ SOURCE_FILE NAME IDENT "parse_use_trees" TOKEN_TREE + L_PAREN "(" + DOLLAR "$" TOKEN_TREE L_PAREN "(" DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "s" - COLON ":" - IDENT "expr" - R_PAREN ")" + IDENT "s" + COLON ":" + IDENT "expr" + R_PAREN ")" + COMMA "," + STAR "*" + WHITESPACE " " + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" COMMA "," - STAR "*" - WHITESPACE " " + R_PAREN ")" + STAR "*" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + WHITESPACE "\n " + IDENT "vec" + BANG "!" + TOKEN_TREE + L_BRACK "[" + WHITESPACE "\n " DOLLAR "$" TOKEN_TREE L_PAREN "(" + IDENT "parse_use_tree" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + IDENT "s" + R_PAREN ")" COMMA "," R_PAREN ")" STAR "*" - R_PAREN ")" - WHITESPACE " " - TOKEN_TREE - L_CURLY "{" WHITESPACE "\n " - IDENT "vec" - BANG "!" - TOKEN_TREE - L_BRACK "[" - WHITESPACE "\n " - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - IDENT "parse_use_tree" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "s" - R_PAREN ")" - COMMA "," - R_PAREN ")" - STAR "*" - WHITESPACE "\n " - R_BRACK "]" - WHITESPACE "\n" - R_CURLY "}" + R_BRACK "]" + WHITESPACE "\n" + R_CURLY "}" WHITESPACE "\n\n" FN ATTR @@ -80,79 +79,62 @@ SOURCE_FILE NAME IDENT "test_merge" TOKEN_TREE + L_PAREN "(" TOKEN_TREE - L_PAREN "(" + L_BRACK "[" + DOLLAR "$" TOKEN_TREE - L_BRACK "[" - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "input" - COLON ":" - IDENT "expr" - R_PAREN ")" - COMMA "," - STAR "*" - WHITESPACE " " + L_PAREN "(" DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - COMMA "," - R_PAREN ")" - STAR "*" - R_BRACK "]" + IDENT "input" + COLON ":" + IDENT "expr" + R_PAREN ")" COMMA "," + STAR "*" WHITESPACE " " + DOLLAR "$" TOKEN_TREE - L_BRACK "[" - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "output" - COLON ":" - IDENT "expr" - R_PAREN ")" + L_PAREN "(" COMMA "," - STAR "*" - WHITESPACE " " - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - COMMA "," - R_PAREN ")" - STAR "*" - R_BRACK "]" - R_PAREN ")" + R_PAREN ")" + STAR "*" + R_BRACK "]" + COMMA "," WHITESPACE " " TOKEN_TREE - L_CURLY "{" - WHITESPACE "\n " - IDENT "assert_eq" - BANG "!" + L_BRACK "[" + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + IDENT "output" + COLON ":" + IDENT "expr" + R_PAREN ")" + COMMA "," + STAR "*" + WHITESPACE " " + DOLLAR "$" TOKEN_TREE L_PAREN "(" - WHITESPACE "\n " - IDENT "merge_use_trees" - TOKEN_TREE - L_PAREN "(" - IDENT "parse_use_trees" - BANG "!" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "input" - COMMA "," - R_PAREN ")" - STAR "*" - R_PAREN ")" - R_PAREN ")" COMMA "," - WHITESPACE "\n " + R_PAREN ")" + STAR "*" + R_BRACK "]" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + WHITESPACE "\n " + IDENT "assert_eq" + BANG "!" + TOKEN_TREE + L_PAREN "(" + WHITESPACE "\n " + IDENT "merge_use_trees" + TOKEN_TREE + L_PAREN "(" IDENT "parse_use_trees" BANG "!" TOKEN_TREE @@ -161,17 +143,33 @@ SOURCE_FILE TOKEN_TREE L_PAREN "(" DOLLAR "$" - IDENT "output" + IDENT "input" COMMA "," R_PAREN ")" STAR "*" R_PAREN ")" - COMMA "," - WHITESPACE "\n " R_PAREN ")" - SEMICOLON ";" - WHITESPACE "\n " - R_CURLY "}" + COMMA "," + WHITESPACE "\n " + IDENT "parse_use_trees" + BANG "!" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + IDENT "output" + COMMA "," + R_PAREN ")" + STAR "*" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n " + R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs index 33c3f83db50..885f071889e 100644 --- a/src/tools/rust-analyzer/crates/paths/src/lib.rs +++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs @@ -1,8 +1,6 @@ //! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and //! relative paths. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{ borrow::Borrow, ffi::OsStr, diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml index f30f6a0f23b..7f633d91ecc 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml @@ -12,15 +12,11 @@ rust-version.workspace = true doctest = false [dependencies] -object.workspace = true serde.workspace = true serde_json = { workspace = true, features = ["unbounded_depth"] } tracing.workspace = true -triomphe.workspace = true rustc-hash.workspace = true -memmap2 = "0.5.4" -snap = "1.1.0" -indexmap = "2.1.0" +indexmap.workspace = true # local deps paths = { workspace = true, features = ["serde1"] } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/json.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/json.rs new file mode 100644 index 00000000000..ec89f6a9e65 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/json.rs @@ -0,0 +1,35 @@ +//! Protocol functions for json. +use std::io::{self, BufRead, Write}; + +pub fn read_json<'a>( + inp: &mut impl BufRead, + buf: &'a mut String, +) -> io::Result<Option<&'a String>> { + loop { + buf.clear(); + + inp.read_line(buf)?; + buf.pop(); // Remove trailing '\n' + + if buf.is_empty() { + return Ok(None); + } + + // Some ill behaved macro try to use stdout for debugging + // We ignore it here + if !buf.starts_with('{') { + tracing::error!("proc-macro tried to print : {}", buf); + continue; + } + + return Ok(Some(buf)); + } +} + +pub fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { + tracing::debug!("> {}", msg); + out.write_all(msg.as_bytes())?; + out.write_all(b"\n")?; + out.flush()?; + Ok(()) +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 8970bdd0121..3a915e668bb 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -5,34 +5,27 @@ //! is used to provide basic infrastructure for communication between two //! processes: Client (RA itself), Server (the external program) -#![warn(rust_2018_idioms, unused_lifetimes)] - +pub mod json; pub mod msg; mod process; -mod version; use base_db::Env; -use indexmap::IndexSet; use paths::{AbsPath, AbsPathBuf}; -use rustc_hash::FxHashMap; use span::Span; -use std::{ - fmt, io, - sync::{Arc, Mutex}, -}; +use std::{fmt, io, sync::Arc}; +use tt::SmolStr; use serde::{Deserialize, Serialize}; use crate::{ msg::{ deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro, - ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT, + ExpnGlobals, FlatTree, PanicMessage, SpanDataIndexMap, HAS_GLOBAL_SPANS, + RUST_ANALYZER_SPAN_SUPPORT, }, process::ProcMacroProcessSrv, }; -pub use version::{read_dylib_info, read_version, RustCInfo}; - #[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] pub enum ProcMacroKind { CustomDerive, @@ -51,9 +44,7 @@ pub struct ProcMacroServer { /// /// That means that concurrent salsa requests may block each other when expanding proc macros, /// which is unfortunate, but simple and good enough for the time being. - /// - /// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here. - process: Arc<Mutex<ProcMacroProcessSrv>>, + process: Arc<ProcMacroProcessSrv>, path: AbsPathBuf, } @@ -73,9 +64,9 @@ impl MacroDylib { /// we share a single expander process for all macros. #[derive(Debug, Clone)] pub struct ProcMacro { - process: Arc<Mutex<ProcMacroProcessSrv>>, - dylib_path: AbsPathBuf, - name: String, + process: Arc<ProcMacroProcessSrv>, + dylib_path: Arc<AbsPathBuf>, + name: SmolStr, kind: ProcMacroKind, } @@ -84,7 +75,7 @@ impl PartialEq for ProcMacro { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.kind == other.kind - && self.dylib_path == other.dylib_path + && Arc::ptr_eq(&self.dylib_path, &other.dylib_path) && Arc::ptr_eq(&self.process, &other.process) } } @@ -92,7 +83,6 @@ impl PartialEq for ProcMacro { #[derive(Clone, Debug)] pub struct ServerError { pub message: String, - // io::Error isn't Clone for some reason pub io: Option<Arc<io::Error>>, } @@ -107,21 +97,15 @@ impl fmt::Display for ServerError { } } -pub struct MacroPanic { - pub message: String, -} - impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. pub fn spawn( process_path: &AbsPath, - env: &FxHashMap<String, String>, + env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)> + + Clone, ) -> io::Result<ProcMacroServer> { let process = ProcMacroProcessSrv::run(process_path, env)?; - Ok(ProcMacroServer { - process: Arc::new(Mutex::new(process)), - path: process_path.to_owned(), - }) + Ok(ProcMacroServer { process: Arc::new(process), path: process_path.to_owned() }) } pub fn path(&self) -> &AbsPath { @@ -130,22 +114,26 @@ impl ProcMacroServer { pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> { let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered(); - let macros = - self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?; + let macros = self.process.find_proc_macros(&dylib.path)?; + let dylib_path = Arc::new(dylib.path); match macros { Ok(macros) => Ok(macros .into_iter() .map(|(name, kind)| ProcMacro { process: self.process.clone(), - name, + name: name.into(), kind, - dylib_path: dylib.path.clone(), + dylib_path: dylib_path.clone(), }) .collect()), Err(message) => Err(ServerError { message, io: None }), } } + + pub fn exited(&self) -> Option<&ServerError> { + self.process.exited() + } } impl ProcMacro { @@ -166,38 +154,37 @@ impl ProcMacro { call_site: Span, mixed_site: Span, ) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> { - let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); + let version = self.process.version(); let current_dir = env.get("CARGO_MANIFEST_DIR"); - let mut span_data_table = IndexSet::default(); + let mut span_data_table = SpanDataIndexMap::default(); let def_site = span_data_table.insert_full(def_site).0; let call_site = span_data_table.insert_full(call_site).0; let mixed_site = span_data_table.insert_full(mixed_site).0; let task = ExpandMacro { - macro_body: FlatTree::new(subtree, version, &mut span_data_table), - macro_name: self.name.to_string(), - attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), + data: msg::ExpandMacroData { + macro_body: FlatTree::new(subtree, version, &mut span_data_table), + macro_name: self.name.to_string(), + attributes: attr + .map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), + has_global_spans: ExpnGlobals { + serialize: version >= HAS_GLOBAL_SPANS, + def_site, + call_site, + mixed_site, + }, + span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { + serialize_span_data_index_map(&span_data_table) + } else { + Vec::new() + }, + }, lib: self.dylib_path.to_path_buf().into(), env: env.into(), current_dir, - has_global_spans: ExpnGlobals { - serialize: version >= HAS_GLOBAL_SPANS, - def_site, - call_site, - mixed_site, - }, - span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { - serialize_span_data_index_map(&span_data_table) - } else { - Vec::new() - }, }; - let response = self - .process - .lock() - .unwrap_or_else(|e| e.into_inner()) - .send_task(msg::Request::ExpandMacro(Box::new(task)))?; + let response = self.process.send_task(msg::Request::ExpandMacro(Box::new(task)))?; match response { msg::Response::ExpandMacro(it) => { diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs index ad0e1f187b6..fa3ba9bbfcd 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs @@ -72,6 +72,16 @@ pub struct PanicMessage(pub String); #[derive(Debug, Serialize, Deserialize)] pub struct ExpandMacro { + pub lib: Utf8PathBuf, + /// Environment variables to set during macro expansion. + pub env: Vec<(String, String)>, + pub current_dir: Option<String>, + #[serde(flatten)] + pub data: ExpandMacroData, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpandMacroData { /// Argument of macro call. /// /// In custom derive this will be a struct or enum; in attribute-like macro - underlying @@ -86,13 +96,6 @@ pub struct ExpandMacro { /// Possible attributes for the attribute-like macros. pub attributes: Option<FlatTree>, - - pub lib: Utf8PathBuf, - - /// Environment variables to set during macro expansion. - pub env: Vec<(String, String)>, - - pub current_dir: Option<String>, /// marker for serde skip stuff #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] #[serde(default)] @@ -119,8 +122,12 @@ impl ExpnGlobals { } pub trait Message: Serialize + DeserializeOwned { - fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result<Option<Self>> { - Ok(match read_json(inp, buf)? { + fn read<R: BufRead>( + from_proto: ProtocolRead<R>, + inp: &mut R, + buf: &mut String, + ) -> io::Result<Option<Self>> { + Ok(match from_proto(inp, buf)? { None => None, Some(text) => { let mut deserializer = serde_json::Deserializer::from_str(text); @@ -131,44 +138,20 @@ pub trait Message: Serialize + DeserializeOwned { } }) } - fn write(self, out: &mut impl Write) -> io::Result<()> { + fn write<W: Write>(self, to_proto: ProtocolWrite<W>, out: &mut W) -> io::Result<()> { let text = serde_json::to_string(&self)?; - write_json(out, &text) + to_proto(out, &text) } } impl Message for Request {} impl Message for Response {} -fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result<Option<&'a String>> { - loop { - buf.clear(); - - inp.read_line(buf)?; - buf.pop(); // Remove trailing '\n' - - if buf.is_empty() { - return Ok(None); - } - - // Some ill behaved macro try to use stdout for debugging - // We ignore it here - if !buf.starts_with('{') { - tracing::error!("proc-macro tried to print : {}", buf); - continue; - } - - return Ok(Some(buf)); - } -} - -fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { - tracing::debug!("> {}", msg); - out.write_all(msg.as_bytes())?; - out.write_all(b"\n")?; - out.flush()?; - Ok(()) -} +#[allow(type_alias_bounds)] +type ProtocolRead<R: BufRead> = + for<'i, 'buf> fn(inp: &'i mut R, buf: &'buf mut String) -> io::Result<Option<&'buf String>>; +#[allow(type_alias_bounds)] +type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) -> io::Result<()>; #[cfg(test)] mod tests { @@ -268,25 +251,30 @@ mod tests { let tt = fixture_token_tree(); let mut span_data_table = Default::default(); let task = ExpandMacro { - macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), - macro_name: Default::default(), - attributes: None, + data: ExpandMacroData { + macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), + macro_name: Default::default(), + attributes: None, + has_global_spans: ExpnGlobals { + serialize: true, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, + span_data_table: Vec::new(), + }, lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(), env: Default::default(), current_dir: Default::default(), - has_global_spans: ExpnGlobals { - serialize: true, - def_site: 0, - call_site: 0, - mixed_site: 0, - }, - span_data_table: Vec::new(), }; let json = serde_json::to_string(&task).unwrap(); // println!("{}", json); let back: ExpandMacro = serde_json::from_str(&json).unwrap(); - assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + assert_eq!( + tt, + back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table) + ); } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs index 99cdbd930e1..11fd7596f2b 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs @@ -37,7 +37,6 @@ use std::collections::VecDeque; -use indexmap::IndexSet; use la_arena::RawIdx; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; @@ -46,7 +45,8 @@ use text_size::TextRange; use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -pub type SpanDataIndexMap = IndexSet<Span>; +pub type SpanDataIndexMap = + indexmap::IndexSet<Span, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>; pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> { map.iter() @@ -328,7 +328,7 @@ impl InternableSpan for TokenId { } } impl InternableSpan for Span { - type Table = IndexSet<Span>; + type Table = SpanDataIndexMap; fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { TokenId(table.insert_full(span).0 as u32) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index 718a96dc80f..c965257a5c5 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -2,46 +2,53 @@ use std::{ io::{self, BufRead, BufReader, Read, Write}, + panic::AssertUnwindSafe, process::{Child, ChildStdin, ChildStdout, Command, Stdio}, - sync::Arc, + sync::{Arc, Mutex, OnceLock}, }; use paths::AbsPath; -use rustc_hash::FxHashMap; use stdx::JodChild; use crate::{ + json::{read_json, write_json}, msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT}, ProcMacroKind, ServerError, }; #[derive(Debug)] pub(crate) struct ProcMacroProcessSrv { + /// The state of the proc-macro server process, the protocol is currently strictly sequential + /// hence the lock on the state. + state: Mutex<ProcessSrvState>, + version: u32, + mode: SpanMode, + /// Populated when the server exits. + exited: OnceLock<AssertUnwindSafe<ServerError>>, +} + +#[derive(Debug)] +struct ProcessSrvState { process: Process, stdin: ChildStdin, stdout: BufReader<ChildStdout>, - /// Populated when the server exits. - server_exited: Option<ServerError>, - version: u32, - mode: SpanMode, } impl ProcMacroProcessSrv { pub(crate) fn run( process_path: &AbsPath, - env: &FxHashMap<String, String>, + env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)> + + Clone, ) -> io::Result<ProcMacroProcessSrv> { let create_srv = |null_stderr| { - let mut process = Process::run(process_path, env, null_stderr)?; + let mut process = Process::run(process_path, env.clone(), null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { - process, - stdin, - stdout, - server_exited: None, + state: Mutex::new(ProcessSrvState { process, stdin, stdout }), version: 0, mode: SpanMode::Id, + exited: OnceLock::new(), }) }; let mut srv = create_srv(true)?; @@ -72,11 +79,15 @@ impl ProcMacroProcessSrv { } } + pub(crate) fn exited(&self) -> Option<&ServerError> { + self.exited.get().map(|it| &it.0) + } + pub(crate) fn version(&self) -> u32 { self.version } - pub(crate) fn version_check(&mut self) -> Result<u32, ServerError> { + fn version_check(&self) -> Result<u32, ServerError> { let request = Request::ApiVersionCheck {}; let response = self.send_task(request)?; @@ -86,7 +97,7 @@ impl ProcMacroProcessSrv { } } - fn enable_rust_analyzer_spans(&mut self) -> Result<SpanMode, ServerError> { + fn enable_rust_analyzer_spans(&self) -> Result<SpanMode, ServerError> { let request = Request::SetConfig(crate::msg::ServerConfig { span_mode: crate::msg::SpanMode::RustAnalyzer, }); @@ -99,7 +110,7 @@ impl ProcMacroProcessSrv { } pub(crate) fn find_proc_macros( - &mut self, + &self, dylib_path: &AbsPath, ) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> { let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() }; @@ -112,36 +123,53 @@ impl ProcMacroProcessSrv { } } - pub(crate) fn send_task(&mut self, req: Request) -> Result<Response, ServerError> { - if let Some(server_error) = &self.server_exited { - return Err(server_error.clone()); + pub(crate) fn send_task(&self, req: Request) -> Result<Response, ServerError> { + if let Some(server_error) = self.exited.get() { + return Err(server_error.0.clone()); } + let state = &mut *self.state.lock().unwrap(); let mut buf = String::new(); - send_request(&mut self.stdin, &mut self.stdout, req, &mut buf).map_err(|e| { - if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) { - match self.process.child.try_wait() { - Ok(None) => e, - Ok(Some(status)) => { - let mut msg = String::new(); - if !status.success() { - if let Some(stderr) = self.process.child.stderr.as_mut() { - _ = stderr.read_to_string(&mut msg); + send_request(&mut state.stdin, &mut state.stdout, req, &mut buf) + .and_then(|res| { + res.ok_or_else(|| { + let message = "proc-macro server did not respond with data".to_owned(); + ServerError { + io: Some(Arc::new(io::Error::new( + io::ErrorKind::BrokenPipe, + message.clone(), + ))), + message, + } + }) + }) + .map_err(|e| { + if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) { + match state.process.child.try_wait() { + Ok(None) | Err(_) => e, + Ok(Some(status)) => { + let mut msg = String::new(); + if !status.success() { + if let Some(stderr) = state.process.child.stderr.as_mut() { + _ = stderr.read_to_string(&mut msg); + } } + let server_error = ServerError { + message: format!( + "proc-macro server exited with {status}{}{msg}", + if msg.is_empty() { "" } else { ": " } + ), + io: None, + }; + // `AssertUnwindSafe` is fine here, we already correct initialized + // server_error at this point. + self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone() } - let server_error = ServerError { - message: format!("server exited with {status}: {msg}"), - io: None, - }; - self.server_exited = Some(server_error.clone()); - server_error } - Err(_) => e, + } else { + e } - } else { - e - } - }) + }) } } @@ -153,7 +181,7 @@ struct Process { impl Process { fn run( path: &AbsPath, - env: &FxHashMap<String, String>, + env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>, null_stderr: bool, ) -> io::Result<Process> { let child = JodChild(mk_child(path, env, null_stderr)?); @@ -171,7 +199,7 @@ impl Process { fn mk_child( path: &AbsPath, - env: &FxHashMap<String, String>, + env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>, null_stderr: bool, ) -> io::Result<Child> { let mut cmd = Command::new(path); @@ -195,14 +223,14 @@ fn send_request( mut reader: &mut impl BufRead, req: Request, buf: &mut String, -) -> Result<Response, ServerError> { - req.write(&mut writer).map_err(|err| ServerError { +) -> Result<Option<Response>, ServerError> { + req.write(write_json, &mut writer).map_err(|err| ServerError { message: "failed to write request".into(), io: Some(Arc::new(err)), })?; - let res = Response::read(&mut reader, buf).map_err(|err| ServerError { + let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError { message: "failed to read response".into(), io: Some(Arc::new(err)), })?; - res.ok_or_else(|| ServerError { message: "server exited".into(), io: None }) + Ok(res) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index df0ae3171f5..174f9c52462 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -6,6 +6,8 @@ #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +use proc_macro_api::json::{read_json, write_json}; + use std::io; fn main() -> std::io::Result<()> { @@ -26,19 +28,49 @@ fn main() -> std::io::Result<()> { #[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] fn run() -> io::Result<()> { - eprintln!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); - std::process::exit(70); + let err = "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function"; + eprintln!("{err}"); + use proc_macro_api::msg::{self, Message}; + + let read_request = + |buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf); + + let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock()); + + let mut buf = String::new(); + + while let Some(req) = read_request(&mut buf)? { + let res = match req { + msg::Request::ListMacros { .. } => msg::Response::ListMacros(Err(err.to_owned())), + msg::Request::ExpandMacro(_) => { + msg::Response::ExpandMacro(Err(msg::PanicMessage(err.to_owned()))) + } + msg::Request::ApiVersionCheck {} => { + msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION) + } + msg::Request::SetConfig(_) => { + msg::Response::SetConfig(proc_macro_api::msg::ServerConfig { + span_mode: msg::SpanMode::Id, + }) + } + }; + write_response(res)? + } + Ok(()) } #[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn run() -> io::Result<()> { use proc_macro_api::msg::{self, Message}; + use proc_macro_srv::EnvSnapshot; - let read_request = |buf: &mut String| msg::Request::read(&mut io::stdin().lock(), buf); + let read_request = + |buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf); - let write_response = |msg: msg::Response| msg.write(&mut io::stdout().lock()); + let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock()); - let mut srv = proc_macro_srv::ProcMacroSrv::default(); + let env = EnvSnapshot::new(); + let mut srv = proc_macro_srv::ProcMacroSrv::new(&env); let mut buf = String::new(); while let Some(req) = read_request(&mut buf)? { diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index f8db1c6a30b..735f781c439 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -13,8 +13,9 @@ doctest = false [dependencies] object.workspace = true -libloading = "0.8.0" -memmap2 = "0.5.4" +libloading.workspace = true +memmap2.workspace = true +snap.workspace = true stdx.workspace = true tt.workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs index 874d1c6cd38..9a17cfc9f36 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs @@ -1,27 +1,15 @@ //! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is //! build with and make it accessible at runtime for ABI selection. -use std::{env, fs::File, io::Write, path::PathBuf, process::Command}; +use std::{env, process::Command}; fn main() { - println!("cargo:rustc-check-cfg=cfg(rust_analyzer)"); - - let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - path.push("rustc_version.rs"); - let mut f = File::create(&path).unwrap(); + println!("cargo::rustc-check-cfg=cfg(rust_analyzer)"); let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set"); let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run"); let version_string = std::str::from_utf8(&output.stdout[..]) .expect("rustc --version output must be UTF-8") .trim(); - - write!( - f, - " - #[allow(dead_code)] - pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?}; - " - ) - .unwrap(); + println!("cargo::rustc-env=RUSTC_VERSION={}", version_string); } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs index 6a0ae362d88..ff2f5d18639 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs @@ -8,7 +8,7 @@ //! 1.58) and future ABIs (stage1, nightly) use std::{ - env, fs, + env, path::{Path, PathBuf}, process::Command, }; @@ -30,8 +30,7 @@ fn main() { if !has_features { println!("proc-macro-test testing only works on nightly toolchains"); - let info_path = out_dir.join("proc_macro_test_location.txt"); - fs::File::create(info_path).unwrap(); + println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION=\"\""); return; } @@ -121,6 +120,5 @@ fn main() { // This file is under `target_dir` and is already under `OUT_DIR`. let artifact_path = artifact_path.expect("no dylib for proc-macro-test-impl found"); - let info_path = out_dir.join("proc_macro_test_location.txt"); - fs::write(info_path, artifact_path.to_str().unwrap()).unwrap(); + println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION={}", artifact_path.display()); } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index 5f8530d08c4..a1707364f3c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -1,6 +1,6 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes)] + #![feature(proc_macro_span, proc_macro_def_site)] #![allow(clippy::all)] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/src/lib.rs index 739c6ec6f44..6464adb2ca7 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/src/lib.rs @@ -1,6 +1,3 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes)] - -pub static PROC_MACRO_TEST_LOCATION: &str = - include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt")); +pub static PROC_MACRO_TEST_LOCATION: &str = env!("PROC_MACRO_TEST_LOCATION"); diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs index 22c34ff1678..78ae4574c40 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs @@ -1,13 +1,15 @@ //! Handles dynamic library loading for proc macro +mod version; + +use proc_macro::bridge; use std::{fmt, fs::File, io}; use libloading::Library; use memmap2::Mmap; use object::Object; use paths::{AbsPath, Utf8Path, Utf8PathBuf}; -use proc_macro::bridge; -use proc_macro_api::{read_dylib_info, ProcMacroKind}; +use proc_macro_api::ProcMacroKind; use crate::ProcMacroSrvSpan; @@ -119,33 +121,45 @@ impl ProcMacroLibraryLibloading { let abs_file: &AbsPath = file .try_into() .map_err(|_| invalid_data_err(format!("expected an absolute path, got {file}")))?; - let version_info = read_dylib_info(abs_file)?; + let version_info = version::read_dylib_info(abs_file)?; let lib = load_library(file).map_err(invalid_data_err)?; - let proc_macros = - crate::proc_macros::ProcMacros::from_lib(&lib, symbol_name, version_info)?; + let proc_macros = crate::proc_macros::ProcMacros::from_lib( + &lib, + symbol_name, + &version_info.version_string, + )?; Ok(ProcMacroLibraryLibloading { _lib: lib, proc_macros }) } } -pub struct Expander { +pub(crate) struct Expander { inner: ProcMacroLibraryLibloading, + path: Utf8PathBuf, +} + +impl Drop for Expander { + fn drop(&mut self) { + #[cfg(windows)] + std::fs::remove_file(&self.path).ok(); + _ = self.path; + } } impl Expander { - pub fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> { + pub(crate) fn new(lib: &Utf8Path) -> Result<Expander, LoadProcMacroDylibError> { // Some libraries for dynamic loading require canonicalized path even when it is // already absolute let lib = lib.canonicalize_utf8()?; - let lib = ensure_file_with_lock_free_access(&lib)?; + let path = ensure_file_with_lock_free_access(&lib)?; - let library = ProcMacroLibraryLibloading::open(lib.as_ref())?; + let library = ProcMacroLibraryLibloading::open(path.as_ref())?; - Ok(Expander { inner: library }) + Ok(Expander { inner: library, path }) } - pub fn expand<S: ProcMacroSrvSpan>( + pub(crate) fn expand<S: ProcMacroSrvSpan>( &self, macro_name: &str, macro_body: tt::Subtree<S>, @@ -164,7 +178,7 @@ impl Expander { result.map_err(|e| e.into_string().unwrap_or_default()) } - pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { + pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { self.inner.proc_macros.list_macros() } } @@ -193,7 +207,7 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result<Utf8PathBuf> unique_name.push_str(file_name); to.push(unique_name); - std::fs::copy(path, &to).unwrap(); + std::fs::copy(path, &to)?; Ok(to) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs index 09b81250715..1f7ef7914ba 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs @@ -11,6 +11,7 @@ use paths::AbsPath; use snap::read::FrameDecoder as SnapDecoder; #[derive(Debug)] +#[allow(dead_code)] pub struct RustCInfo { pub version: (usize, usize, usize), pub channel: String, @@ -164,3 +165,16 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> { let version_string = String::from_utf8(version_string_utf8); version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) } + +#[test] +fn test_version_check() { + let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path()); + let info = read_dylib_info(&path).unwrap(); + assert_eq!( + info.version_string, + crate::RUSTC_VERSION_STRING, + "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}", + info.version_string, + crate::RUSTC_VERSION_STRING, + ); +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index 2472c1e3119..e6281035e1a 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -13,7 +13,6 @@ #![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unreachable_pub, internal_features)] extern crate proc_macro; @@ -27,13 +26,15 @@ extern crate rustc_lexer; mod dylib; mod proc_macros; -mod server; +mod server_impl; use std::{ collections::{hash_map::Entry, HashMap}, env, ffi::OsString, - fs, thread, + fs, + path::{Path, PathBuf}, + thread, time::SystemTime, }; @@ -47,46 +48,25 @@ use proc_macro_api::{ }; use span::Span; -use crate::server::TokenStream; +use crate::server_impl::TokenStream; -// see `build.rs` -include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); +pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION"); -trait ProcMacroSrvSpan: Copy { - type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>; - fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; +pub struct ProcMacroSrv<'env> { + expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>, + span_mode: SpanMode, + env: &'env EnvSnapshot, } -impl ProcMacroSrvSpan for TokenId { - type Server = server::token_id::TokenIdServer; - - fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { - Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site } +impl<'env> ProcMacroSrv<'env> { + pub fn new(env: &'env EnvSnapshot) -> Self { + Self { expanders: Default::default(), span_mode: Default::default(), env } } } -impl ProcMacroSrvSpan for Span { - type Server = server::rust_analyzer_span::RaSpanServer; - fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { - Self::Server { - interner: &server::SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - tracked_env_vars: Default::default(), - tracked_paths: Default::default(), - } - } -} - -#[derive(Default)] -pub struct ProcMacroSrv { - expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>, - span_mode: SpanMode, -} const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; -impl ProcMacroSrv { +impl<'env> ProcMacroSrv<'env> { pub fn set_span_mode(&mut self, span_mode: SpanMode) { self.span_mode = span_mode; } @@ -97,52 +77,24 @@ impl ProcMacroSrv { pub fn expand( &mut self, - task: msg::ExpandMacro, + msg::ExpandMacro { lib, env, current_dir, data }: msg::ExpandMacro, ) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> { let span_mode = self.span_mode; - let expander = self.expander(task.lib.as_ref()).map_err(|err| { + let snapped_env = self.env; + let expander = self.expander(lib.as_ref()).map_err(|err| { debug_assert!(false, "should list macros before asking to expand"); msg::PanicMessage(format!("failed to load macro: {err}")) })?; - let prev_env = EnvSnapshot::new(); - for (k, v) in &task.env { - env::set_var(k, v); - } - let prev_working_dir = match &task.current_dir { - Some(dir) => { - let prev_working_dir = std::env::current_dir().ok(); - if let Err(err) = std::env::set_current_dir(dir) { - eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}") - } - prev_working_dir - } - None => None, - }; - - let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans; + let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref)); let result = match span_mode { - SpanMode::Id => { - expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![])) - } - SpanMode::RustAnalyzer => { - expand_ra_span(task, expander, def_site, call_site, mixed_site) - } + SpanMode::Id => expand_id(data, expander).map(|it| (it, vec![])), + SpanMode::RustAnalyzer => expand_ra_span(data, expander), }; prev_env.rollback(); - if let Some(dir) = prev_working_dir { - if let Err(err) = std::env::set_current_dir(&dir) { - eprintln!( - "Failed to set the current working dir to {}. Error: {:?}", - dir.display(), - err - ) - } - } - result.map_err(msg::PanicMessage) } @@ -169,33 +121,55 @@ impl ProcMacroSrv { } } +trait ProcMacroSrvSpan: Copy { + type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; +} + +impl ProcMacroSrvSpan for TokenId { + type Server = server_impl::token_id::TokenIdServer; + + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { interner: &server_impl::SYMBOL_INTERNER, call_site, def_site, mixed_site } + } +} +impl ProcMacroSrvSpan for Span { + type Server = server_impl::rust_analyzer_span::RaSpanServer; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { + interner: &server_impl::SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + tracked_env_vars: Default::default(), + tracked_paths: Default::default(), + } + } +} + fn expand_id( - task: msg::ExpandMacro, + msg::ExpandMacroData { + macro_body, + macro_name, + attributes, + has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site }, + span_data_table: _, + }: msg::ExpandMacroData, expander: &dylib::Expander, - def_site: usize, - call_site: usize, - mixed_site: usize, ) -> Result<msg::FlatTree, String> { let def_site = TokenId(def_site as u32); let call_site = TokenId(call_site as u32); let mixed_site = TokenId(mixed_site as u32); - let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); - let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); + let macro_body = macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) - .name(task.macro_name.clone()) + .name(macro_name.clone()) .spawn_scoped(s, || { expander - .expand( - &task.macro_name, - macro_body, - attributes, - def_site, - call_site, - mixed_site, - ) + .expand(¯o_name, macro_body, attributes, def_site, call_site, mixed_site) .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) }); let res = match thread { @@ -212,35 +186,33 @@ fn expand_id( } fn expand_ra_span( - task: msg::ExpandMacro, + msg::ExpandMacroData { + macro_body, + macro_name, + attributes, + has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site }, + span_data_table, + }: msg::ExpandMacroData, expander: &dylib::Expander, - def_site: usize, - call_site: usize, - mixed_site: usize, ) -> Result<(msg::FlatTree, Vec<u32>), String> { - let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table); + let mut span_data_table = deserialize_span_data_index_map(&span_data_table); let def_site = span_data_table[def_site]; let call_site = span_data_table[call_site]; let mixed_site = span_data_table[mixed_site]; - let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); + let macro_body = macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); let attributes = - task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + // Note, we spawn a new thread here so that thread locals allocation don't accumulate (this + // includes the proc-macro symbol interner) let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) - .name(task.macro_name.clone()) + .name(macro_name.clone()) .spawn_scoped(s, || { expander - .expand( - &task.macro_name, - macro_body, - attributes, - def_site, - call_site, - mixed_site, - ) + .expand(¯o_name, macro_body, attributes, def_site, call_site, mixed_site) .map(|it| { ( msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table), @@ -271,31 +243,74 @@ impl PanicMessage { } } -struct EnvSnapshot { +pub struct EnvSnapshot { vars: HashMap<OsString, OsString>, } impl EnvSnapshot { - fn new() -> EnvSnapshot { + pub fn new() -> EnvSnapshot { EnvSnapshot { vars: env::vars_os().collect() } } +} + +struct EnvChange<'snap> { + changed_vars: Vec<String>, + prev_working_dir: Option<PathBuf>, + snap: &'snap EnvSnapshot, +} + +impl<'snap> EnvChange<'snap> { + fn apply( + snap: &'snap EnvSnapshot, + new_vars: Vec<(String, String)>, + current_dir: Option<&Path>, + ) -> EnvChange<'snap> { + let prev_working_dir = match current_dir { + Some(dir) => { + let prev_working_dir = std::env::current_dir().ok(); + if let Err(err) = std::env::set_current_dir(dir) { + eprintln!( + "Failed to set the current working dir to {}. Error: {err:?}", + dir.display() + ) + } + prev_working_dir + } + None => None, + }; + EnvChange { + snap, + changed_vars: new_vars + .into_iter() + .map(|(k, v)| { + env::set_var(&k, v); + k + }) + .collect(), + prev_working_dir, + } + } fn rollback(self) {} } -impl Drop for EnvSnapshot { +impl Drop for EnvChange<'_> { fn drop(&mut self) { - for (name, value) in env::vars_os() { - let old_value = self.vars.remove(&name); - if old_value != Some(value) { - match old_value { - None => env::remove_var(name), - Some(old_value) => env::set_var(name, old_value), - } + for name in self.changed_vars.drain(..) { + match self.snap.vars.get::<std::ffi::OsStr>(name.as_ref()) { + Some(prev_val) => env::set_var(name, prev_val), + None => env::remove_var(name), } } - for (name, old_value) in self.vars.drain() { - env::set_var(name, old_value) + + if let Some(dir) = &self.prev_working_dir { + if let Err(err) = std::env::set_current_dir(&dir) { + eprintln!( + "Failed to set the current working dir to {}. Error: {:?}", + dir.display(), + err + ) + } } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs index 631fd84aa24..d48c5b30dee 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs @@ -1,8 +1,9 @@ //! Proc macro ABI -use libloading::Library; use proc_macro::bridge; -use proc_macro_api::{ProcMacroKind, RustCInfo}; +use proc_macro_api::ProcMacroKind; + +use libloading::Library; use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan}; @@ -29,15 +30,15 @@ impl ProcMacros { pub(crate) fn from_lib( lib: &Library, symbol_name: String, - info: RustCInfo, + version_string: &str, ) -> Result<ProcMacros, LoadProcMacroDylibError> { - if info.version_string == crate::RUSTC_VERSION_STRING { + if version_string == crate::RUSTC_VERSION_STRING { let macros = unsafe { lib.get::<&&[bridge::client::ProcMacro]>(symbol_name.as_bytes()) }?; return Ok(Self { exported_macros: macros.to_vec() }); } - Err(LoadProcMacroDylibError::AbiMismatch(info.version_string)) + Err(LoadProcMacroDylibError::AbiMismatch(version_string.to_owned())) } pub(crate) fn expand<S: ProcMacroSrvSpan>( @@ -49,11 +50,12 @@ impl ProcMacros { call_site: S, mixed_site: S, ) -> Result<tt::Subtree<S>, crate::PanicMessage> { - let parsed_body = crate::server::TokenStream::with_subtree(macro_body); + let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body); - let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| { - crate::server::TokenStream::with_subtree(attr) - }); + let parsed_attributes = attributes + .map_or_else(crate::server_impl::TokenStream::new, |attr| { + crate::server_impl::TokenStream::with_subtree(attr) + }); for proc_macro in &self.exported_macros { match proc_macro { @@ -117,16 +119,3 @@ impl ProcMacros { .collect() } } - -#[test] -fn test_version_check() { - let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path()); - let info = proc_macro_api::read_dylib_info(&path).unwrap(); - assert_eq!( - info.version_string, - crate::RUSTC_VERSION_STRING, - "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}", - info.version_string, - crate::RUSTC_VERSION_STRING, - ); -} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs index e8b340a43d3..e8b340a43d3 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 0350bde4122..bb174ba1b22 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -14,7 +14,7 @@ use proc_macro::bridge::{self, server}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; use tt::{TextRange, TextSize}; -use crate::server::{ +use crate::server_impl::{ delim_to_external, delim_to_internal, literal_with_stringify_parts, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; @@ -29,7 +29,7 @@ mod tt { pub type Ident = ::tt::Ident<super::Span>; } -type TokenStream = crate::server::TokenStream<Span>; +type TokenStream = crate::server_impl::TokenStream<Span>; #[derive(Clone)] pub struct SourceFile; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/symbol.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/symbol.rs index 540d06457f2..540d06457f2 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/symbol.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/symbol.rs diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs index ad7bd954cf1..12edacbe39d 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_id.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs @@ -7,7 +7,7 @@ use std::{ use proc_macro::bridge::{self, server}; -use crate::server::{ +use crate::server_impl::{ delim_to_external, delim_to_internal, literal_with_stringify_parts, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; @@ -31,7 +31,7 @@ type Spacing = tt::Spacing; #[allow(unused)] type Literal = tt::Literal; type Span = tt::TokenId; -type TokenStream = crate::server::TokenStream<Span>; +type TokenStream = crate::server_impl::TokenStream<Span>; #[derive(Clone)] pub struct SourceFile; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs index b1a448427c6..b1a448427c6 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 6050bc9e36e..03b1117a5bd 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -5,10 +5,10 @@ use proc_macro_api::msg::TokenId; use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; use tt::TextRange; -use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; +use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv}; -fn parse_string(call_site: TokenId, src: &str) -> crate::server::TokenStream<TokenId> { - crate::server::TokenStream::with_subtree( +fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> { + crate::server_impl::TokenStream::with_subtree( mbe::parse_to_token_tree_static_span(call_site, src).unwrap(), ) } @@ -17,8 +17,8 @@ fn parse_string_spanned( anchor: SpanAnchor, call_site: SyntaxContextId, src: &str, -) -> crate::server::TokenStream<Span> { - crate::server::TokenStream::with_subtree( +) -> crate::server_impl::TokenStream<Span> { + crate::server_impl::TokenStream::with_subtree( mbe::parse_to_token_tree(anchor, call_site, src).unwrap(), ) } @@ -96,7 +96,8 @@ fn assert_expand_impl( pub(crate) fn list() -> Vec<String> { let dylib_path = proc_macro_test_dylib_path(); - let mut srv = ProcMacroSrv::default(); + let env = EnvSnapshot::new(); + let mut srv = ProcMacroSrv::new(&env); let res = srv.list_macros(&dylib_path).unwrap(); res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect() } diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml index 11a8e7af56a..5989dc6c962 100644 --- a/src/tools/rust-analyzer/crates/profile/Cargo.toml +++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml @@ -12,12 +12,8 @@ rust-version.workspace = true doctest = false [dependencies] -once_cell = "1.17.0" -tracing.workspace = true cfg-if = "1.0.0" -la-arena.workspace = true libc.workspace = true -countme = { version = "3.0.1", features = ["enable"] } jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true } [target.'cfg(target_os = "linux")'.dependencies] diff --git a/src/tools/rust-analyzer/crates/profile/src/lib.rs b/src/tools/rust-analyzer/crates/profile/src/lib.rs index 2ccb1cd06ae..205341f162d 100644 --- a/src/tools/rust-analyzer/crates/profile/src/lib.rs +++ b/src/tools/rust-analyzer/crates/profile/src/lib.rs @@ -1,7 +1,5 @@ //! A collection of tools for profiling rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes)] - #[cfg(feature = "cpu_profiler")] mod google_cpu_profiler; mod memory_usage; @@ -14,13 +12,6 @@ pub use crate::{ stop_watch::{StopWatch, StopWatchSpan}, }; -pub use countme; -/// Include `_c: Count<Self>` field in important structs to count them. -/// -/// To view the counts, run with `RA_COUNT=1`. The overhead of disabled count is -/// almost zero. -pub use countme::Count; - thread_local!(static IN_SCOPE: RefCell<bool> = const { RefCell::new(false) }); /// A wrapper around google_cpu_profiler. diff --git a/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs index 990b59cad42..0a803959eed 100644 --- a/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs +++ b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs @@ -29,11 +29,10 @@ impl StopWatch { // When debugging rust-analyzer using rr, the perf-related syscalls cause it to abort. // We allow disabling perf by setting the env var `RA_DISABLE_PERF`. - use once_cell::sync::Lazy; - static PERF_ENABLED: Lazy<bool> = - Lazy::new(|| std::env::var_os("RA_DISABLE_PERF").is_none()); + use std::sync::OnceLock; + static PERF_ENABLED: OnceLock<bool> = OnceLock::new(); - if *PERF_ENABLED { + if *PERF_ENABLED.get_or_init(|| std::env::var_os("RA_DISABLE_PERF").is_none()) { let mut counter = perf_event::Builder::new() .build() .map_err(|err| eprintln!("Failed to create perf counter: {err}")) diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs index d2f423590e2..839d8e569fe 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs @@ -18,7 +18,6 @@ use itertools::Itertools; use la_arena::ArenaMap; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; -use semver::Version; use serde::Deserialize; use toolchain::Tool; @@ -64,10 +63,8 @@ impl WorkspaceBuildScripts { config: &CargoConfig, allowed_features: &FxHashSet<String>, manifest_path: &ManifestPath, - toolchain: Option<&Version>, sysroot: &Sysroot, ) -> io::Result<Command> { - const RUST_1_75: Version = Version::new(1, 75, 0); let mut cmd = match config.run_build_script_command.as_deref() { Some([program, args @ ..]) => { let mut cmd = Command::new(program); @@ -122,9 +119,7 @@ impl WorkspaceBuildScripts { cmd.arg("-Zscript"); } - if toolchain.map_or(false, |it| *it >= RUST_1_75) { - cmd.arg("--keep-going"); - } + cmd.arg("--keep-going"); cmd } @@ -148,7 +143,6 @@ impl WorkspaceBuildScripts { config: &CargoConfig, workspace: &CargoWorkspace, progress: &dyn Fn(String), - toolchain: Option<&Version>, sysroot: &Sysroot, ) -> io::Result<WorkspaceBuildScripts> { let current_dir = match &config.invocation_location { @@ -160,13 +154,8 @@ impl WorkspaceBuildScripts { .as_ref(); let allowed_features = workspace.workspace_features(); - let cmd = Self::build_command( - config, - &allowed_features, - workspace.manifest_path(), - toolchain, - sysroot, - )?; + let cmd = + Self::build_command(config, &allowed_features, workspace.manifest_path(), sysroot)?; Self::run_per_ws(cmd, workspace, current_dir, progress) } @@ -194,7 +183,6 @@ impl WorkspaceBuildScripts { &Default::default(), // This is not gonna be used anyways, so just construct a dummy here &ManifestPath::try_from(workspace_root.clone()).unwrap(), - None, &Sysroot::empty(), )?; // NB: Cargo.toml could have been modified between `cargo metadata` and diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 35643dcc028..92bf6a08f87 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -15,8 +15,6 @@ //! procedural macros). //! * Lowering of concrete model to a [`base_db::CrateGraph`] -#![warn(rust_2018_idioms, unused_lifetimes)] - mod build_scripts; mod cargo_workspace; mod cfg; diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 17e40e74de3..5e27ce29873 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -450,16 +450,10 @@ impl ProjectWorkspace { match &self.kind { ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } | ProjectWorkspaceKind::Cargo { cargo, .. } => { - WorkspaceBuildScripts::run_for_workspace( - config, - cargo, - progress, - self.toolchain.as_ref(), - &self.sysroot, - ) - .with_context(|| { - format!("Failed to run build scripts for {}", cargo.workspace_root()) - }) + WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, &self.sysroot) + .with_context(|| { + format!("Failed to run build scripts for {}", cargo.workspace_root()) + }) } ProjectWorkspaceKind::DetachedFile { cargo: None, .. } | ProjectWorkspaceKind::Json { .. } => Ok(WorkspaceBuildScripts::default()), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index 8ff7235b8fa..93fb55ede8e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -47,7 +47,6 @@ always-assert = "0.2.0" walkdir = "2.3.2" semver.workspace = true memchr = "2.7.1" -indexmap = { workspace = true, features = ["serde"] } cfg.workspace = true flycheck.workspace = true @@ -82,7 +81,6 @@ xshell.workspace = true test-utils.workspace = true test-fixture.workspace = true -sourcegen.workspace = true mbe.workspace = true [features] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 774784f37b0..1985093bc5c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -2,7 +2,6 @@ //! //! Based on cli flags, either spawns an LSP server, or runs a batch analysis -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(clippy::print_stdout, clippy::print_stderr)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs new file mode 100644 index 00000000000..212294b5d32 --- /dev/null +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs @@ -0,0 +1,493 @@ +//! Advertises the capabilities of the LSP Server. +use ide_db::{line_index::WideEncoding, FxHashSet}; +use lsp_types::{ + CallHierarchyServerCapability, CodeActionKind, CodeActionOptions, CodeActionProviderCapability, + CodeLensOptions, CompletionOptions, CompletionOptionsCompletionItem, DeclarationCapability, + DocumentOnTypeFormattingOptions, FileOperationFilter, FileOperationPattern, + FileOperationPatternKind, FileOperationRegistrationOptions, FoldingRangeProviderCapability, + HoverProviderCapability, ImplementationProviderCapability, InlayHintOptions, + InlayHintServerCapabilities, OneOf, PositionEncodingKind, RenameOptions, SaveOptions, + SelectionRangeProviderCapability, SemanticTokensFullOptions, SemanticTokensLegend, + SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, + TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability, + WorkDoneProgressOptions, WorkspaceFileOperationsServerCapabilities, + WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities, +}; +use serde_json::json; + +use crate::{ + config::{Config, RustfmtConfig}, + line_index::PositionEncoding, + lsp::{ext, semantic_tokens}, +}; + +pub fn server_capabilities(config: &Config) -> ServerCapabilities { + ServerCapabilities { + position_encoding: match config.caps().negotiated_encoding() { + PositionEncoding::Utf8 => Some(PositionEncodingKind::UTF8), + PositionEncoding::Wide(wide) => match wide { + WideEncoding::Utf16 => Some(PositionEncodingKind::UTF16), + WideEncoding::Utf32 => Some(PositionEncodingKind::UTF32), + _ => None, + }, + }, + text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::INCREMENTAL), + will_save: None, + will_save_wait_until: None, + save: Some(SaveOptions::default().into()), + })), + hover_provider: Some(HoverProviderCapability::Simple(true)), + completion_provider: Some(CompletionOptions { + resolve_provider: config.caps().completions_resolve_provider(), + trigger_characters: Some(vec![ + ":".to_owned(), + ".".to_owned(), + "'".to_owned(), + "(".to_owned(), + ]), + all_commit_characters: None, + completion_item: config.caps().completion_item(), + work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, + }), + signature_help_provider: Some(SignatureHelpOptions { + trigger_characters: Some(vec!["(".to_owned(), ",".to_owned(), "<".to_owned()]), + retrigger_characters: None, + work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, + }), + declaration_provider: Some(DeclarationCapability::Simple(true)), + definition_provider: Some(OneOf::Left(true)), + type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), + implementation_provider: Some(ImplementationProviderCapability::Simple(true)), + references_provider: Some(OneOf::Left(true)), + document_highlight_provider: Some(OneOf::Left(true)), + document_symbol_provider: Some(OneOf::Left(true)), + workspace_symbol_provider: Some(OneOf::Left(true)), + code_action_provider: Some(config.caps().code_action_capabilities()), + code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), + document_formatting_provider: Some(OneOf::Left(true)), + document_range_formatting_provider: match config.rustfmt() { + RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)), + _ => Some(OneOf::Left(false)), + }, + document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { + first_trigger_character: "=".to_owned(), + more_trigger_character: Some(more_trigger_character(config)), + }), + selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), + folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), + rename_provider: Some(OneOf::Right(RenameOptions { + prepare_provider: Some(true), + work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, + })), + linked_editing_range_provider: None, + document_link_provider: None, + color_provider: None, + execute_command_provider: None, + workspace: Some(WorkspaceServerCapabilities { + workspace_folders: Some(WorkspaceFoldersServerCapabilities { + supported: Some(true), + change_notifications: Some(OneOf::Left(true)), + }), + file_operations: Some(WorkspaceFileOperationsServerCapabilities { + did_create: None, + will_create: None, + did_rename: None, + will_rename: Some(FileOperationRegistrationOptions { + filters: vec![ + FileOperationFilter { + scheme: Some(String::from("file")), + pattern: FileOperationPattern { + glob: String::from("**/*.rs"), + matches: Some(FileOperationPatternKind::File), + options: None, + }, + }, + FileOperationFilter { + scheme: Some(String::from("file")), + pattern: FileOperationPattern { + glob: String::from("**"), + matches: Some(FileOperationPatternKind::Folder), + options: None, + }, + }, + ], + }), + did_delete: None, + will_delete: None, + }), + }), + call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), + semantic_tokens_provider: Some( + SemanticTokensOptions { + legend: SemanticTokensLegend { + token_types: semantic_tokens::SUPPORTED_TYPES.to_vec(), + token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(), + }, + + full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }), + range: Some(true), + work_done_progress_options: Default::default(), + } + .into(), + ), + moniker_provider: None, + inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options( + InlayHintOptions { + work_done_progress_options: Default::default(), + resolve_provider: Some(true), + }, + ))), + inline_value_provider: None, + experimental: Some(json!({ + "externalDocs": true, + "hoverRange": true, + "joinLines": true, + "matchingBrace": true, + "moveItem": true, + "onEnter": true, + "openCargoToml": true, + "parentModule": true, + "runnables": { + "kinds": [ "cargo" ], + }, + "ssr": true, + "workspaceSymbolScopeKindFiltering": true, + })), + diagnostic_provider: None, + inline_completion_provider: None, + } +} + +#[derive(Debug, PartialEq, Clone, Default)] +pub struct ClientCapabilities(lsp_types::ClientCapabilities); + +impl ClientCapabilities { + pub fn new(caps: lsp_types::ClientCapabilities) -> Self { + Self(caps) + } + + fn completions_resolve_provider(&self) -> Option<bool> { + self.completion_item_edit_resolve().then_some(true) + } + + fn experimental_bool(&self, index: &'static str) -> bool { + || -> _ { self.0.experimental.as_ref()?.get(index)?.as_bool() }().unwrap_or_default() + } + + fn experimental<T: serde::de::DeserializeOwned>(&self, index: &'static str) -> Option<T> { + serde_json::from_value(self.0.experimental.as_ref()?.get(index)?.clone()).ok() + } + + /// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports. + pub fn completion_item_edit_resolve(&self) -> bool { + (|| { + Some( + self.0 + .text_document + .as_ref()? + .completion + .as_ref()? + .completion_item + .as_ref()? + .resolve_support + .as_ref()? + .properties + .iter() + .any(|cap_string| cap_string.as_str() == "additionalTextEdits"), + ) + })() == Some(true) + } + + pub fn completion_label_details_support(&self) -> bool { + (|| -> _ { + self.0 + .text_document + .as_ref()? + .completion + .as_ref()? + .completion_item + .as_ref()? + .label_details_support + .as_ref() + })() + .is_some() + } + + fn completion_item(&self) -> Option<CompletionOptionsCompletionItem> { + Some(CompletionOptionsCompletionItem { + label_details_support: Some(self.completion_label_details_support()), + }) + } + + fn code_action_capabilities(&self) -> CodeActionProviderCapability { + self.0 + .text_document + .as_ref() + .and_then(|it| it.code_action.as_ref()) + .and_then(|it| it.code_action_literal_support.as_ref()) + .map_or(CodeActionProviderCapability::Simple(true), |_| { + CodeActionProviderCapability::Options(CodeActionOptions { + // Advertise support for all built-in CodeActionKinds. + // Ideally we would base this off of the client capabilities + // but the client is supposed to fall back gracefully for unknown values. + code_action_kinds: Some(vec![ + CodeActionKind::EMPTY, + CodeActionKind::QUICKFIX, + CodeActionKind::REFACTOR, + CodeActionKind::REFACTOR_EXTRACT, + CodeActionKind::REFACTOR_INLINE, + CodeActionKind::REFACTOR_REWRITE, + ]), + resolve_provider: Some(true), + work_done_progress_options: Default::default(), + }) + }) + } + + pub fn negotiated_encoding(&self) -> PositionEncoding { + let client_encodings = match &self.0.general { + Some(general) => general.position_encodings.as_deref().unwrap_or_default(), + None => &[], + }; + + for enc in client_encodings { + if enc == &PositionEncodingKind::UTF8 { + return PositionEncoding::Utf8; + } else if enc == &PositionEncodingKind::UTF32 { + return PositionEncoding::Wide(WideEncoding::Utf32); + } + // NB: intentionally prefer just about anything else to utf-16. + } + + PositionEncoding::Wide(WideEncoding::Utf16) + } + + pub fn workspace_edit_resource_operations( + &self, + ) -> Option<&[lsp_types::ResourceOperationKind]> { + self.0.workspace.as_ref()?.workspace_edit.as_ref()?.resource_operations.as_deref() + } + + pub fn semantics_tokens_augments_syntax_tokens(&self) -> bool { + (|| -> _ { + self.0.text_document.as_ref()?.semantic_tokens.as_ref()?.augments_syntax_tokens + })() + .unwrap_or(false) + } + + pub fn did_save_text_document_dynamic_registration(&self) -> bool { + let caps = (|| -> _ { self.0.text_document.as_ref()?.synchronization.clone() })() + .unwrap_or_default(); + caps.did_save == Some(true) && caps.dynamic_registration == Some(true) + } + + pub fn did_change_watched_files_dynamic_registration(&self) -> bool { + (|| -> _ { + self.0.workspace.as_ref()?.did_change_watched_files.as_ref()?.dynamic_registration + })() + .unwrap_or_default() + } + + pub fn did_change_watched_files_relative_pattern_support(&self) -> bool { + (|| -> _ { + self.0.workspace.as_ref()?.did_change_watched_files.as_ref()?.relative_pattern_support + })() + .unwrap_or_default() + } + + pub fn location_link(&self) -> bool { + (|| -> _ { self.0.text_document.as_ref()?.definition?.link_support })().unwrap_or_default() + } + + pub fn line_folding_only(&self) -> bool { + (|| -> _ { self.0.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only })() + .unwrap_or_default() + } + + pub fn hierarchical_symbols(&self) -> bool { + (|| -> _ { + self.0 + .text_document + .as_ref()? + .document_symbol + .as_ref()? + .hierarchical_document_symbol_support + })() + .unwrap_or_default() + } + + pub fn code_action_literals(&self) -> bool { + (|| -> _ { + self.0 + .text_document + .as_ref()? + .code_action + .as_ref()? + .code_action_literal_support + .as_ref() + })() + .is_some() + } + + pub fn work_done_progress(&self) -> bool { + (|| -> _ { self.0.window.as_ref()?.work_done_progress })().unwrap_or_default() + } + + pub fn will_rename(&self) -> bool { + (|| -> _ { self.0.workspace.as_ref()?.file_operations.as_ref()?.will_rename })() + .unwrap_or_default() + } + + pub fn change_annotation_support(&self) -> bool { + (|| -> _ { + self.0.workspace.as_ref()?.workspace_edit.as_ref()?.change_annotation_support.as_ref() + })() + .is_some() + } + + pub fn code_action_resolve(&self) -> bool { + (|| -> _ { + Some( + self.0 + .text_document + .as_ref()? + .code_action + .as_ref()? + .resolve_support + .as_ref()? + .properties + .as_slice(), + ) + })() + .unwrap_or_default() + .iter() + .any(|it| it == "edit") + } + + pub fn signature_help_label_offsets(&self) -> bool { + (|| -> _ { + self.0 + .text_document + .as_ref()? + .signature_help + .as_ref()? + .signature_information + .as_ref()? + .parameter_information + .as_ref()? + .label_offset_support + })() + .unwrap_or_default() + } + + pub fn code_action_group(&self) -> bool { + self.experimental_bool("codeActionGroup") + } + + pub fn commands(&self) -> Option<ext::ClientCommandOptions> { + self.experimental("commands") + } + + pub fn local_docs(&self) -> bool { + self.experimental_bool("localDocs") + } + + pub fn open_server_logs(&self) -> bool { + self.experimental_bool("openServerLogs") + } + + pub fn server_status_notification(&self) -> bool { + self.experimental_bool("serverStatusNotification") + } + + pub fn snippet_text_edit(&self) -> bool { + self.experimental_bool("snippetTextEdit") + } + + pub fn hover_actions(&self) -> bool { + self.experimental_bool("hoverActions") + } + + /// Whether the client supports colored output for full diagnostics from `checkOnSave`. + pub fn color_diagnostic_output(&self) -> bool { + self.experimental_bool("colorDiagnosticOutput") + } + + pub fn test_explorer(&self) -> bool { + self.experimental_bool("testExplorer") + } + + pub fn completion_snippet(&self) -> bool { + (|| -> _ { + self.0 + .text_document + .as_ref()? + .completion + .as_ref()? + .completion_item + .as_ref()? + .snippet_support + })() + .unwrap_or_default() + } + + pub fn semantic_tokens_refresh(&self) -> bool { + (|| -> _ { self.0.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support })() + .unwrap_or_default() + } + + pub fn code_lens_refresh(&self) -> bool { + (|| -> _ { self.0.workspace.as_ref()?.code_lens.as_ref()?.refresh_support })() + .unwrap_or_default() + } + + pub fn inlay_hints_refresh(&self) -> bool { + (|| -> _ { self.0.workspace.as_ref()?.inlay_hint.as_ref()?.refresh_support })() + .unwrap_or_default() + } + + pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<String> { + self.0 + .text_document + .as_ref() + .and_then(|text| text.inlay_hint.as_ref()) + .and_then(|inlay_hint_caps| inlay_hint_caps.resolve_support.as_ref()) + .map(|inlay_resolve| inlay_resolve.properties.iter()) + .into_iter() + .flatten() + .cloned() + .collect::<FxHashSet<_>>() + } + + pub fn hover_markdown_support(&self) -> bool { + (|| -> _ { + Some(self.0.text_document.as_ref()?.hover.as_ref()?.content_format.as_ref()?.as_slice()) + })() + .unwrap_or_default() + .contains(&lsp_types::MarkupKind::Markdown) + } + + pub fn insert_replace_support(&self) -> bool { + (|| -> _ { + self.0 + .text_document + .as_ref()? + .completion + .as_ref()? + .completion_item + .as_ref()? + .insert_replace_support + })() + .unwrap_or_default() + } +} + +fn more_trigger_character(config: &Config) -> Vec<String> { + let mut res = vec![".".to_owned(), ">".to_owned(), "{".to_owned(), "(".to_owned()]; + if config.snippet_cap().is_some() { + res.push("<".to_owned()); + } + res +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs deleted file mode 100644 index a207be3cac3..00000000000 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs +++ /dev/null @@ -1,230 +0,0 @@ -//! Advertises the capabilities of the LSP Server. -use ide_db::line_index::WideEncoding; -use lsp_types::{ - CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions, - CodeActionProviderCapability, CodeLensOptions, CompletionOptions, - CompletionOptionsCompletionItem, DeclarationCapability, DocumentOnTypeFormattingOptions, - FileOperationFilter, FileOperationPattern, FileOperationPatternKind, - FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability, - ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf, - PositionEncodingKind, RenameOptions, SaveOptions, SelectionRangeProviderCapability, - SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, - SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, - TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions, - WorkspaceFileOperationsServerCapabilities, WorkspaceFoldersServerCapabilities, - WorkspaceServerCapabilities, -}; -use serde_json::json; - -use crate::{ - config::{Config, RustfmtConfig}, - line_index::PositionEncoding, - lsp::semantic_tokens, - lsp_ext::negotiated_encoding, -}; - -pub fn server_capabilities(config: &Config) -> ServerCapabilities { - ServerCapabilities { - position_encoding: match negotiated_encoding(config.caps()) { - PositionEncoding::Utf8 => Some(PositionEncodingKind::UTF8), - PositionEncoding::Wide(wide) => match wide { - WideEncoding::Utf16 => Some(PositionEncodingKind::UTF16), - WideEncoding::Utf32 => Some(PositionEncodingKind::UTF32), - _ => None, - }, - }, - text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { - open_close: Some(true), - change: Some(TextDocumentSyncKind::INCREMENTAL), - will_save: None, - will_save_wait_until: None, - save: Some(SaveOptions::default().into()), - })), - hover_provider: Some(HoverProviderCapability::Simple(true)), - completion_provider: Some(CompletionOptions { - resolve_provider: completions_resolve_provider(config.caps()), - trigger_characters: Some(vec![ - ":".to_owned(), - ".".to_owned(), - "'".to_owned(), - "(".to_owned(), - ]), - all_commit_characters: None, - completion_item: completion_item(config), - work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, - }), - signature_help_provider: Some(SignatureHelpOptions { - trigger_characters: Some(vec!["(".to_owned(), ",".to_owned(), "<".to_owned()]), - retrigger_characters: None, - work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, - }), - declaration_provider: Some(DeclarationCapability::Simple(true)), - definition_provider: Some(OneOf::Left(true)), - type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), - implementation_provider: Some(ImplementationProviderCapability::Simple(true)), - references_provider: Some(OneOf::Left(true)), - document_highlight_provider: Some(OneOf::Left(true)), - document_symbol_provider: Some(OneOf::Left(true)), - workspace_symbol_provider: Some(OneOf::Left(true)), - code_action_provider: Some(code_action_capabilities(config.caps())), - code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), - document_formatting_provider: Some(OneOf::Left(true)), - document_range_formatting_provider: match config.rustfmt() { - RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)), - _ => Some(OneOf::Left(false)), - }, - document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { - first_trigger_character: "=".to_owned(), - more_trigger_character: Some(more_trigger_character(config)), - }), - selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), - folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), - rename_provider: Some(OneOf::Right(RenameOptions { - prepare_provider: Some(true), - work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, - })), - linked_editing_range_provider: None, - document_link_provider: None, - color_provider: None, - execute_command_provider: None, - workspace: Some(WorkspaceServerCapabilities { - workspace_folders: Some(WorkspaceFoldersServerCapabilities { - supported: Some(true), - change_notifications: Some(OneOf::Left(true)), - }), - file_operations: Some(WorkspaceFileOperationsServerCapabilities { - did_create: None, - will_create: None, - did_rename: None, - will_rename: Some(FileOperationRegistrationOptions { - filters: vec![ - FileOperationFilter { - scheme: Some(String::from("file")), - pattern: FileOperationPattern { - glob: String::from("**/*.rs"), - matches: Some(FileOperationPatternKind::File), - options: None, - }, - }, - FileOperationFilter { - scheme: Some(String::from("file")), - pattern: FileOperationPattern { - glob: String::from("**"), - matches: Some(FileOperationPatternKind::Folder), - options: None, - }, - }, - ], - }), - did_delete: None, - will_delete: None, - }), - }), - call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), - semantic_tokens_provider: Some( - SemanticTokensOptions { - legend: SemanticTokensLegend { - token_types: semantic_tokens::SUPPORTED_TYPES.to_vec(), - token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(), - }, - - full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }), - range: Some(true), - work_done_progress_options: Default::default(), - } - .into(), - ), - moniker_provider: None, - inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options( - InlayHintOptions { - work_done_progress_options: Default::default(), - resolve_provider: Some(true), - }, - ))), - inline_value_provider: None, - experimental: Some(json!({ - "externalDocs": true, - "hoverRange": true, - "joinLines": true, - "matchingBrace": true, - "moveItem": true, - "onEnter": true, - "openCargoToml": true, - "parentModule": true, - "runnables": { - "kinds": [ "cargo" ], - }, - "ssr": true, - "workspaceSymbolScopeKindFiltering": true, - })), - diagnostic_provider: None, - inline_completion_provider: None, - } -} - -fn completions_resolve_provider(client_caps: &ClientCapabilities) -> Option<bool> { - if completion_item_edit_resolve(client_caps) { - Some(true) - } else { - tracing::info!("No `additionalTextEdits` completion resolve capability was found in the client capabilities, autoimport completion is disabled"); - None - } -} - -/// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports. -pub(crate) fn completion_item_edit_resolve(caps: &ClientCapabilities) -> bool { - (|| { - Some( - caps.text_document - .as_ref()? - .completion - .as_ref()? - .completion_item - .as_ref()? - .resolve_support - .as_ref()? - .properties - .iter() - .any(|cap_string| cap_string.as_str() == "additionalTextEdits"), - ) - })() == Some(true) -} - -fn completion_item(config: &Config) -> Option<CompletionOptionsCompletionItem> { - Some(CompletionOptionsCompletionItem { - label_details_support: Some(config.completion_label_details_support()), - }) -} - -fn code_action_capabilities(client_caps: &ClientCapabilities) -> CodeActionProviderCapability { - client_caps - .text_document - .as_ref() - .and_then(|it| it.code_action.as_ref()) - .and_then(|it| it.code_action_literal_support.as_ref()) - .map_or(CodeActionProviderCapability::Simple(true), |_| { - CodeActionProviderCapability::Options(CodeActionOptions { - // Advertise support for all built-in CodeActionKinds. - // Ideally we would base this off of the client capabilities - // but the client is supposed to fall back gracefully for unknown values. - code_action_kinds: Some(vec![ - CodeActionKind::EMPTY, - CodeActionKind::QUICKFIX, - CodeActionKind::REFACTOR, - CodeActionKind::REFACTOR_EXTRACT, - CodeActionKind::REFACTOR_INLINE, - CodeActionKind::REFACTOR_REWRITE, - ]), - resolve_provider: Some(true), - work_done_progress_options: Default::default(), - }) - }) -} - -fn more_trigger_character(config: &Config) -> Vec<String> { - let mut res = vec![".".to_owned(), ">".to_owned(), "{".to_owned(), "(".to_owned()]; - if config.snippet_cap().is_some() { - res.push("<".to_owned()); - } - res -} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 90b81d0a80d..90316f3b89d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -250,10 +250,6 @@ impl flags::AnalysisStats { } report_metric("total memory", total_span.memory.allocated.megabytes() as u64, "MB"); - if env::var("RA_COUNT").is_ok() { - eprintln!("{}", profile::countme::get_all()); - } - if self.source_stats { let mut total_file_size = Bytes::default(); for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() { @@ -443,7 +439,11 @@ impl flags::AnalysisStats { .gen_source_code( &scope, &mut formatter, - ImportPathConfig { prefer_no_std: false, prefer_prelude: true }, + ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }, ) .unwrap(); syntax_hit_found |= trim(&original_text) == trim(&generated); @@ -651,7 +651,7 @@ impl flags::AnalysisStats { if let Some(src) = source { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file); - let syntax_range = src.value.text_range(); + let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { format!("processing: {}", full_name()) @@ -945,7 +945,7 @@ impl flags::AnalysisStats { if let Some(src) = source { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file); - let syntax_range = src.value.text_range(); + let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { format!("processing: {}", full_name()) @@ -992,8 +992,10 @@ impl flags::AnalysisStats { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, style_lints: false, term_search_fuel: 400, + term_search_borrowck: true, }, ide::AssistResolveStrategy::All, file_id, @@ -1006,6 +1008,11 @@ impl flags::AnalysisStats { type_hints: true, discriminant_hints: ide::DiscriminantHints::Always, parameter_hints: true, + generic_parameter_hints: ide::GenericParameterHints { + type_hints: true, + lifetime_hints: true, + const_hints: true, + }, chaining_hints: true, adjustment_hints: ide::AdjustmentHints::Always, adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index e8504979bed..3594cdda2e9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -10,17 +10,15 @@ use dirs::config_dir; use flycheck::{CargoOptions, FlycheckConfig}; use ide::{ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, - HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayFieldsToResolve, - InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, - Snippet, SnippetScope, SourceRootId, + GenericParameterHints, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, + InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig, + MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId, }; use ide_db::{ imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind}, SnippetCap, }; -use indexmap::IndexMap; use itertools::Itertools; -use lsp_types::{ClientCapabilities, MarkupKind}; use paths::{Utf8Path, Utf8PathBuf}; use project_model::{ CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustLibSource, @@ -36,10 +34,9 @@ use triomphe::Arc; use vfs::{AbsPath, AbsPathBuf, VfsPath}; use crate::{ - caps::completion_item_edit_resolve, + capabilities::ClientCapabilities, diagnostics::DiagnosticsMapConfig, - line_index::PositionEncoding, - lsp_ext::{self, negotiated_encoding, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope}, + lsp_ext::{WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope}, }; mod patch_old_style; @@ -341,8 +338,10 @@ config_data! { assist_emitMustUse: bool = false, /// Placeholder expression to use for missing expressions in assists. assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, - /// Term search fuel in "units of work" for assists (Defaults to 400). - assist_termSearch_fuel: usize = 400, + /// Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. + assist_termSearch_borrowcheck: bool = true, + /// Term search fuel in "units of work" for assists (Defaults to 1800). + assist_termSearch_fuel: usize = 1800, /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. imports_granularity_enforce: bool = false, @@ -358,6 +357,8 @@ config_data! { imports_preferPrelude: bool = false, /// The path structure for newly inserted paths to use. imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain, + /// Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". + imports_prefixExternPrelude: bool = false, } } @@ -382,8 +383,7 @@ config_data! { /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. completion_privateEditable_enable: bool = false, /// Custom completion snippets. - // NOTE: we use IndexMap for deterministic serialization ordering - completion_snippets_custom: IndexMap<String, SnippetDef> = serde_json::from_str(r#"{ + completion_snippets_custom: FxHashMap<String, SnippetDef> = serde_json::from_str(r#"{ "Arc::new": { "postfix": "arc", "body": "Arc::new(${receiver})", @@ -426,8 +426,8 @@ config_data! { }"#).unwrap(), /// Whether to enable term search based snippets like `Some(foo.bar().baz())`. completion_termSearch_enable: bool = false, - /// Term search fuel in "units of work" for autocompletion (Defaults to 200). - completion_termSearch_fuel: usize = 200, + /// Term search fuel in "units of work" for autocompletion (Defaults to 1000). + completion_termSearch_fuel: usize = 1000, /// Controls file watching implementation. files_watcher: FilesWatcherDef = FilesWatcherDef::Client, @@ -509,6 +509,12 @@ config_data! { inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = false, /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = AdjustmentHintsModeDef::Prefix, + /// Whether to show const generic parameter name inlay hints. + inlayHints_genericParameterHints_const_enable: bool= false, + /// Whether to show generic lifetime parameter name inlay hints. + inlayHints_genericParameterHints_lifetime_enable: bool = true, + /// Whether to show generic type parameter name inlay hints. + inlayHints_genericParameterHints_type_enable: bool = false, /// Whether to show implicit drop hints. inlayHints_implicitDrops_enable: bool = false, /// Whether to show inlay type hints for elided lifetimes in function signatures. @@ -659,7 +665,7 @@ pub struct Config { discovered_projects: Vec<ProjectManifest>, /// The workspace roots as registered by the LSP client workspace_roots: Vec<AbsPathBuf>, - caps: lsp_types::ClientCapabilities, + caps: ClientCapabilities, root_path: AbsPathBuf, snippets: Vec<Snippet>, visual_studio_code_version: Option<Version>, @@ -698,6 +704,15 @@ pub struct Config { detached_files: Vec<AbsPathBuf>, } +// Delegate capability fetching methods +impl std::ops::Deref for Config { + type Target = ClientCapabilities; + + fn deref(&self) -> &Self::Target { + &self.caps + } +} + impl Config { pub fn user_config_path(&self) -> &VfsPath { &self.user_config_path @@ -844,6 +859,9 @@ impl Config { config.source_root_parent_map = source_root_map; } + // IMPORTANT : This holds as long as ` completion_snippets_custom` is declared `client`. + config.snippets.clear(); + let snips = self.completion_snippets_custom().to_owned(); for (name, def) in snips.iter() { @@ -951,23 +969,6 @@ impl ConfigChange { } } -macro_rules! try_ { - ($expr:expr) => { - || -> _ { Some($expr) }() - }; -} -macro_rules! try_or { - ($expr:expr, $or:expr) => { - try_!($expr).unwrap_or($or) - }; -} - -macro_rules! try_or_def { - ($expr:expr) => { - try_!($expr).unwrap_or_default() - }; -} - #[derive(Debug, Clone, Eq, PartialEq)] pub enum LinkedProject { ProjectManifest(ProjectManifest), @@ -1174,7 +1175,7 @@ impl std::error::Error for ConfigErrors {} impl Config { pub fn new( root_path: AbsPathBuf, - caps: ClientCapabilities, + caps: lsp_types::ClientCapabilities, workspace_roots: Vec<AbsPathBuf>, visual_studio_code_version: Option<Version>, user_config_path: Option<Utf8PathBuf>, @@ -1202,7 +1203,7 @@ impl Config { }; Config { - caps, + caps: ClientCapabilities::new(caps), discovered_projects: Vec::new(), root_path, snippets: Default::default(), @@ -1240,7 +1241,19 @@ impl Config { } pub fn json_schema() -> serde_json::Value { - FullConfigInput::json_schema() + let mut s = FullConfigInput::json_schema(); + + fn sort_objects_by_field(json: &mut serde_json::Value) { + if let serde_json::Value::Object(object) = json { + let old = std::mem::take(object); + old.into_iter().sorted_by(|(k, _), (k2, _)| k.cmp(k2)).for_each(|(k, mut v)| { + sort_objects_by_field(&mut v); + object.insert(k, v); + }); + } + } + sort_objects_by_field(&mut s); + s } pub fn root_path(&self) -> &AbsPathBuf { @@ -1251,7 +1264,7 @@ impl Config { &self.root_ratoml_path } - pub fn caps(&self) -> &lsp_types::ClientCapabilities { + pub fn caps(&self) -> &ClientCapabilities { &self.caps } } @@ -1265,7 +1278,9 @@ impl Config { prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), assist_emit_must_use: self.assist_emitMustUse(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), + prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, + term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), } } @@ -1273,7 +1288,7 @@ impl Config { CompletionConfig { enable_postfix_completions: self.completion_postfix_enable().to_owned(), enable_imports_on_the_fly: self.completion_autoimport_enable().to_owned() - && completion_item_edit_resolve(&self.caps), + && self.caps.completion_item_edit_resolve(), enable_self_on_the_fly: self.completion_autoself_enable().to_owned(), enable_private_editable: self.completion_privateEditable_enable().to_owned(), full_function_signatures: self.completion_fullFunctionSignatures_enable().to_owned(), @@ -1282,19 +1297,11 @@ impl Config { CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses), CallableCompletionDef::None => None, }, - snippet_cap: SnippetCap::new(try_or_def!( - self.caps - .text_document - .as_ref()? - .completion - .as_ref()? - .completion_item - .as_ref()? - .snippet_support? - )), + snippet_cap: SnippetCap::new(self.completion_snippet()), insert_use: self.insert_use_config(source_root), prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), + prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), snippets: self.snippets.clone().to_vec(), limit: self.completion_limit().to_owned(), enable_term_search: self.completion_termSearch_enable().to_owned(), @@ -1323,8 +1330,10 @@ impl Config { insert_use: self.insert_use_config(source_root), prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), + prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), style_lints: self.diagnostics_styleLints_enable().to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, + term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), } } pub fn expand_proc_attr_macros(&self) -> bool { @@ -1342,7 +1351,7 @@ impl Config { } pub fn hover_actions(&self) -> HoverActionsConfig { - let enable = self.experimental("hoverActions") && self.hover_actions_enable().to_owned(); + let enable = self.caps.hover_actions() && self.hover_actions_enable().to_owned(); HoverActionsConfig { implementations: enable && self.hover_actions_implementations_enable().to_owned(), references: enable && self.hover_actions_references_enable().to_owned(), @@ -1368,17 +1377,7 @@ impl Config { }), documentation: self.hover_documentation_enable().to_owned(), format: { - let is_markdown = try_or_def!(self - .caps - .text_document - .as_ref()? - .hover - .as_ref()? - .content_format - .as_ref()? - .as_slice()) - .contains(&MarkupKind::Markdown); - if is_markdown { + if self.caps.hover_markdown_support() { HoverDocFormat::Markdown } else { HoverDocFormat::PlainText @@ -1392,22 +1391,17 @@ impl Config { } pub fn inlay_hints(&self) -> InlayHintsConfig { - let client_capability_fields = self - .caps - .text_document - .as_ref() - .and_then(|text| text.inlay_hint.as_ref()) - .and_then(|inlay_hint_caps| inlay_hint_caps.resolve_support.as_ref()) - .map(|inlay_resolve| inlay_resolve.properties.iter()) - .into_iter() - .flatten() - .cloned() - .collect::<FxHashSet<_>>(); + let client_capability_fields = self.inlay_hint_resolve_support_properties(); InlayHintsConfig { render_colons: self.inlayHints_renderColons().to_owned(), type_hints: self.inlayHints_typeHints_enable().to_owned(), parameter_hints: self.inlayHints_parameterHints_enable().to_owned(), + generic_parameter_hints: GenericParameterHints { + type_hints: self.inlayHints_genericParameterHints_type_enable().to_owned(), + lifetime_hints: self.inlayHints_genericParameterHints_lifetime_enable().to_owned(), + const_hints: self.inlayHints_genericParameterHints_const_enable().to_owned(), + }, chaining_hints: self.inlayHints_chainingHints_enable().to_owned(), discriminant_hints: match self.inlayHints_discriminantHints_enable() { DiscriminantHintsDef::Always => ide::DiscriminantHints::Always, @@ -1573,165 +1567,10 @@ impl Config { } } - pub fn did_save_text_document_dynamic_registration(&self) -> bool { - let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?); - caps.did_save == Some(true) && caps.dynamic_registration == Some(true) - } - - pub fn did_change_watched_files_dynamic_registration(&self) -> bool { - try_or_def!( - self.caps.workspace.as_ref()?.did_change_watched_files.as_ref()?.dynamic_registration? - ) - } - - pub fn did_change_watched_files_relative_pattern_support(&self) -> bool { - try_or_def!( - self.caps - .workspace - .as_ref()? - .did_change_watched_files - .as_ref()? - .relative_pattern_support? - ) - } - pub fn prefill_caches(&self) -> bool { self.cachePriming_enable().to_owned() } - pub fn location_link(&self) -> bool { - try_or_def!(self.caps.text_document.as_ref()?.definition?.link_support?) - } - - pub fn line_folding_only(&self) -> bool { - try_or_def!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?) - } - - pub fn hierarchical_symbols(&self) -> bool { - try_or_def!( - self.caps - .text_document - .as_ref()? - .document_symbol - .as_ref()? - .hierarchical_document_symbol_support? - ) - } - - pub fn code_action_literals(&self) -> bool { - try_!(self - .caps - .text_document - .as_ref()? - .code_action - .as_ref()? - .code_action_literal_support - .as_ref()?) - .is_some() - } - - pub fn work_done_progress(&self) -> bool { - try_or_def!(self.caps.window.as_ref()?.work_done_progress?) - } - - pub fn will_rename(&self) -> bool { - try_or_def!(self.caps.workspace.as_ref()?.file_operations.as_ref()?.will_rename?) - } - - pub fn change_annotation_support(&self) -> bool { - try_!(self - .caps - .workspace - .as_ref()? - .workspace_edit - .as_ref()? - .change_annotation_support - .as_ref()?) - .is_some() - } - - pub fn code_action_resolve(&self) -> bool { - try_or_def!(self - .caps - .text_document - .as_ref()? - .code_action - .as_ref()? - .resolve_support - .as_ref()? - .properties - .as_slice()) - .iter() - .any(|it| it == "edit") - } - - pub fn signature_help_label_offsets(&self) -> bool { - try_or_def!( - self.caps - .text_document - .as_ref()? - .signature_help - .as_ref()? - .signature_information - .as_ref()? - .parameter_information - .as_ref()? - .label_offset_support? - ) - } - - pub fn completion_label_details_support(&self) -> bool { - try_!(self - .caps - .text_document - .as_ref()? - .completion - .as_ref()? - .completion_item - .as_ref()? - .label_details_support - .as_ref()?) - .is_some() - } - - pub fn semantics_tokens_augments_syntax_tokens(&self) -> bool { - try_!(self.caps.text_document.as_ref()?.semantic_tokens.as_ref()?.augments_syntax_tokens?) - .unwrap_or(false) - } - - pub fn position_encoding(&self) -> PositionEncoding { - negotiated_encoding(&self.caps) - } - - fn experimental(&self, index: &'static str) -> bool { - try_or_def!(self.caps.experimental.as_ref()?.get(index)?.as_bool()?) - } - - pub fn code_action_group(&self) -> bool { - self.experimental("codeActionGroup") - } - - pub fn local_docs(&self) -> bool { - self.experimental("localDocs") - } - - pub fn open_server_logs(&self) -> bool { - self.experimental("openServerLogs") - } - - pub fn server_status_notification(&self) -> bool { - self.experimental("serverStatusNotification") - } - - /// Whether the client supports colored output for full diagnostics from `checkOnSave`. - pub fn color_diagnostic_output(&self) -> bool { - self.experimental("colorDiagnosticOutput") - } - - pub fn test_explorer(&self) -> bool { - self.experimental("testExplorer") - } - pub fn publish_diagnostics(&self) -> bool { self.diagnostics_enable().to_owned() } @@ -2009,7 +1848,7 @@ impl Config { pub fn snippet_cap(&self) -> Option<SnippetCap> { // FIXME: Also detect the proposed lsp version at caps.workspace.workspaceEdit.snippetEditSupport // once lsp-types has it. - SnippetCap::new(self.experimental("snippetTextEdit")) + SnippetCap::new(self.snippet_text_edit()) } pub fn call_info(&self) -> CallInfoConfig { @@ -2049,36 +1888,8 @@ impl Config { } } - pub fn semantic_tokens_refresh(&self) -> bool { - try_or_def!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?) - } - - pub fn code_lens_refresh(&self) -> bool { - try_or_def!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?) - } - - pub fn inlay_hints_refresh(&self) -> bool { - try_or_def!(self.caps.workspace.as_ref()?.inlay_hint.as_ref()?.refresh_support?) - } - - pub fn insert_replace_support(&self) -> bool { - try_or_def!( - self.caps - .text_document - .as_ref()? - .completion - .as_ref()? - .completion_item - .as_ref()? - .insert_replace_support? - ) - } - pub fn client_commands(&self) -> ClientCommandsConfig { - let commands = - try_or!(self.caps.experimental.as_ref()?.get("commands")?, &serde_json::Value::Null); - let commands: Option<lsp_ext::ClientCommandOptions> = - serde_json::from_value(commands.clone()).ok(); + let commands = self.commands(); let force = commands.is_none() && *self.lens_forceCustomCommands(); let commands = commands.map(|it| it.commands).unwrap_or_default(); @@ -2637,9 +2448,8 @@ macro_rules! _config_data { /// All fields `Option<T>`, `None` representing fields not set in a particular JSON/TOML blob. #[allow(non_snake_case)] - #[derive(Clone, Serialize, Default)] + #[derive(Clone, Default)] struct $input { $( - #[serde(skip_serializing_if = "Option::is_none")] $field: Option<$ty>, )* } @@ -2722,7 +2532,7 @@ struct DefaultConfigData { /// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. -#[derive(Debug, Clone, Default, Serialize)] +#[derive(Debug, Clone, Default)] struct FullConfigInput { global: GlobalConfigInput, local: LocalConfigInput, @@ -2767,7 +2577,7 @@ impl FullConfigInput { /// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. -#[derive(Debug, Clone, Default, Serialize)] +#[derive(Debug, Clone, Default)] struct GlobalLocalConfigInput { global: GlobalConfigInput, local: LocalConfigInput, @@ -2929,7 +2739,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "FxHashMap<Box<str>, Box<[Box<str>]>>" => set! { "type": "object", }, - "IndexMap<String, SnippetDef>" => set! { + "FxHashMap<String, SnippetDef>" => set! { "type": "object", }, "FxHashMap<String, String>" => set! { @@ -3344,6 +3154,7 @@ mod tests { #[test] fn generate_package_json_config() { let s = Config::json_schema(); + let schema = format!("{s:#}"); let mut schema = schema .trim_start_matches('[') @@ -3364,7 +3175,7 @@ mod tests { for idx in url_offsets { let link = &schema[idx..]; // matching on whitespace to ignore normal links - if let Some(link_end) = link.find(|c| c == ' ' || c == '[') { + if let Some(link_end) = link.find([' ', '[']) { if link.chars().nth(link_end) == Some('[') { if let Some(link_text_end) = link.find(']') { let link_text = link[link_end..(link_text_end + 1)].to_string(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs index 4832e8cab43..defa464f2ba 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -66,7 +66,7 @@ fn location( let uri = url_from_abs_path(&file_name); let range = { - let position_encoding = snap.config.position_encoding(); + let position_encoding = snap.config.negotiated_encoding(); lsp_types::Range::new( position( &position_encoding, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 717d8a632c3..de4c9586dfd 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -529,7 +529,7 @@ impl GlobalStateSnapshot { pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> { let endings = self.vfs.read().1[&file_id]; let index = self.analysis.file_line_index(file_id)?; - let res = LineIndex { index, endings, encoding: self.config.position_encoding() }; + let res = LineIndex { index, endings, encoding: self.config.caps().negotiated_encoding() }; Ok(res) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 2dbc297ea6c..095d7c941c1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -100,7 +100,7 @@ pub(crate) fn handle_did_change_text_document( *version = params.text_document.version; let new_contents = apply_document_changes( - state.config.position_encoding(), + state.config.negotiated_encoding(), std::str::from_utf8(data).unwrap(), params.content_changes, ) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index 8e39b15da3d..e19f7a4898b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -847,7 +847,7 @@ pub(crate) fn handle_runnables( if expect_test { if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args { runnable.label = format!("{} + expect", runnable.label); - r.expect_test = Some(true); + r.environment.insert("UPDATE_EXPECT".to_owned(), "1".to_owned()); } } res.push(runnable); @@ -874,6 +874,7 @@ pub(crate) fn handle_runnables( if all_targets { cargo_args.push("--all-targets".to_owned()); } + cargo_args.extend(config.cargo_extra_args.iter().cloned()); res.push(lsp_ext::Runnable { label: format!( "cargo {cmd} -p {}{all_targets}", @@ -884,12 +885,11 @@ pub(crate) fn handle_runnables( kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { workspace_root: Some(spec.workspace_root.clone().into()), - cwd: Some(cwd.into()), + cwd: cwd.into(), override_cargo: config.override_cargo.clone(), cargo_args, - cargo_extra_args: config.cargo_extra_args.clone(), executable_args: Vec::new(), - expect_test: None, + environment: Default::default(), }), }) } @@ -897,20 +897,23 @@ pub(crate) fn handle_runnables( Some(TargetSpec::ProjectJson(_)) => {} None => { if !snap.config.linked_or_discovered_projects().is_empty() { - res.push(lsp_ext::Runnable { - label: "cargo check --workspace".to_owned(), - location: None, - kind: lsp_ext::RunnableKind::Cargo, - args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { - workspace_root: None, - cwd: None, - override_cargo: config.override_cargo, - cargo_args: vec!["check".to_owned(), "--workspace".to_owned()], - cargo_extra_args: config.cargo_extra_args, - executable_args: Vec::new(), - expect_test: None, - }), - }); + if let Some(path) = snap.file_id_to_file_path(file_id).parent() { + let mut cargo_args = vec!["check".to_owned(), "--workspace".to_owned()]; + cargo_args.extend(config.cargo_extra_args.iter().cloned()); + res.push(lsp_ext::Runnable { + label: "cargo check --workspace".to_owned(), + location: None, + kind: lsp_ext::RunnableKind::Cargo, + args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { + workspace_root: None, + cwd: path.as_path().unwrap().to_path_buf().into(), + override_cargo: config.override_cargo, + cargo_args, + executable_args: Vec::new(), + environment: Default::default(), + }), + }); + }; } } } @@ -1637,7 +1640,9 @@ pub(crate) fn handle_call_hierarchy_incoming( from_ranges: call_item .ranges .into_iter() - .map(|it| to_proto::range(&line_index, it)) + // This is the range relative to the item + .filter(|it| it.file_id == file_id) + .map(|it| to_proto::range(&line_index, it.range)) .collect(), }); } @@ -1672,7 +1677,9 @@ pub(crate) fn handle_call_hierarchy_outgoing( from_ranges: call_item .ranges .into_iter() - .map(|it| to_proto::range(&line_index, it)) + // This is the range relative to the caller + .filter(|it| it.file_id == fpos.file_id) + .map(|it| to_proto::range(&line_index, it.range)) .collect(), }); } @@ -2291,19 +2298,8 @@ fn to_url(path: VfsPath) -> Option<Url> { } fn resource_ops_supported(config: &Config, kind: ResourceOperationKind) -> anyhow::Result<()> { - #[rustfmt::skip] - let resops = (|| { - config - .caps() - .workspace - .as_ref()? - .workspace_edit - .as_ref()? - .resource_operations - .as_ref() - })(); - - if !matches!(resops, Some(resops) if resops.contains(&kind)) { + if !matches!(config.workspace_edit_resource_operations(), Some(resops) if resops.contains(&kind)) + { return Err(LspError::new( ErrorCode::RequestFailed as i32, format!( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index 1e2cd4339b3..ff8eb6c8612 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -152,6 +152,7 @@ fn integrated_completion_benchmark() { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, snippets: Vec::new(), limit: None, }; @@ -197,6 +198,7 @@ fn integrated_completion_benchmark() { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, snippets: Vec::new(), limit: None, }; @@ -240,6 +242,7 @@ fn integrated_completion_benchmark() { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, snippets: Vec::new(), limit: None, }; @@ -299,7 +302,9 @@ fn integrated_diagnostics_benchmark() { }, prefer_no_std: false, prefer_prelude: false, + prefer_absolute: false, term_search_fuel: 400, + term_search_borrowck: true, }; host.analysis() .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index a398e98f093..174979edede 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -9,11 +9,9 @@ //! The `cli` submodule implements some batch-processing analysis, primarily as //! a debugging aid. -#![warn(rust_2018_idioms, unused_lifetimes)] - pub mod cli; -mod caps; +mod capabilities; mod diagnostics; mod diff; mod dispatch; @@ -49,7 +47,8 @@ mod integrated_benchmarks; use serde::de::DeserializeOwned; pub use crate::{ - caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, + capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, + version::version, }; pub fn from_json<T: DeserializeOwned>( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index b82ba441904..9a852067f2e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -4,19 +4,16 @@ use std::ops; -use ide_db::line_index::WideEncoding; use lsp_types::request::Request; +use lsp_types::Url; use lsp_types::{ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams, PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; -use lsp_types::{PositionEncodingKind, Url}; use paths::Utf8PathBuf; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; -use crate::line_index::PositionEncoding; - pub enum InternalTestingFetchConfig {} impl Request for InternalTestingFetchConfig { @@ -460,28 +457,27 @@ pub enum RunnableKind { #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct CargoRunnableArgs { - // command to be executed instead of cargo + #[serde(skip_serializing_if = "FxHashMap::is_empty")] + pub environment: FxHashMap<String, String>, + pub cwd: Utf8PathBuf, + /// Command to be executed instead of cargo pub override_cargo: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] pub workspace_root: Option<Utf8PathBuf>, - #[serde(skip_serializing_if = "Option::is_none")] - pub cwd: Option<Utf8PathBuf>, // command, --package and --lib stuff pub cargo_args: Vec<String>, - // user-specified additional cargo args, like `--release`. - pub cargo_extra_args: Vec<String>, // stuff after -- pub executable_args: Vec<String>, - #[serde(skip_serializing_if = "Option::is_none")] - pub expect_test: Option<bool>, } #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct ShellRunnableArgs { + #[serde(skip_serializing_if = "FxHashMap::is_empty")] + pub environment: FxHashMap<String, String>, + pub cwd: Utf8PathBuf, pub program: String, pub args: Vec<String>, - pub cwd: Utf8PathBuf, } pub enum RelatedTests {} @@ -738,24 +734,6 @@ pub enum CodeLensResolveDataKind { References(lsp_types::TextDocumentPositionParams), } -pub fn negotiated_encoding(caps: &lsp_types::ClientCapabilities) -> PositionEncoding { - let client_encodings = match &caps.general { - Some(general) => general.position_encodings.as_deref().unwrap_or_default(), - None => &[], - }; - - for enc in client_encodings { - if enc == &PositionEncodingKind::UTF8 { - return PositionEncoding::Utf8; - } else if enc == &PositionEncodingKind::UTF32 { - return PositionEncoding::Wide(WideEncoding::Utf32); - } - // NB: intentionally prefer just about anything else to utf-16. - } - - PositionEncoding::Wide(WideEncoding::Utf16) -} - pub enum MoveItem {} impl Request for MoveItem { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index db5f666a5b9..de394d3d118 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -501,7 +501,9 @@ pub(crate) fn inlay_hint( padding_left: Some(inlay_hint.pad_left), padding_right: Some(inlay_hint.pad_right), kind: match inlay_hint.kind { - InlayKind::Parameter => Some(lsp_types::InlayHintKind::PARAMETER), + InlayKind::Parameter | InlayKind::GenericParameter => { + Some(lsp_types::InlayHintKind::PARAMETER) + } InlayKind::Type | InlayKind::Chaining => Some(lsp_types::InlayHintKind::TYPE), _ => None, }, @@ -1390,10 +1392,9 @@ pub(crate) fn runnable( workspace_root: Some(workspace_root.into()), override_cargo: config.override_cargo, cargo_args, - cwd: Some(cwd.into()), - cargo_extra_args: config.cargo_extra_args, + cwd: cwd.into(), executable_args, - expect_test: None, + environment: Default::default(), }), })) } @@ -1407,6 +1408,7 @@ pub(crate) fn runnable( program: json_shell_runnable_args.program, args: json_shell_runnable_args.args, cwd: json_shell_runnable_args.cwd, + environment: Default::default(), }; Ok(Some(lsp_ext::Runnable { label, @@ -1419,6 +1421,9 @@ pub(crate) fn runnable( } } None => { + let Some(path) = snap.file_id_to_file_path(runnable.nav.file_id).parent() else { + return Ok(None); + }; let (cargo_args, executable_args) = CargoTargetSpec::runnable_args(snap, None, &runnable.kind, &runnable.cfg); @@ -1433,10 +1438,9 @@ pub(crate) fn runnable( workspace_root: None, override_cargo: config.override_cargo, cargo_args, - cwd: None, - cargo_extra_args: config.cargo_extra_args, + cwd: path.as_path().unwrap().to_path_buf().into(), executable_args, - expect_test: None, + environment: Default::default(), }), })) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index bd0f733ef39..1039daf850c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -184,14 +184,28 @@ impl GlobalState { message.push_str(err); message.push_str("\n\n"); }; - if let Some(Err(err)) = proc_macro_client { - status.health |= lsp_ext::Health::Warning; - format_to!( - message, - "Failed spawning proc-macro server for workspace `{}`: {err}", - ws.manifest_or_root() - ); - message.push_str("\n\n"); + match proc_macro_client { + Some(Err(err)) => { + status.health |= lsp_ext::Health::Warning; + format_to!( + message, + "Failed spawning proc-macro server for workspace `{}`: {err}", + ws.manifest_or_root() + ); + message.push_str("\n\n"); + } + Some(Ok(client)) => { + if let Some(err) = client.exited() { + status.health |= lsp_ext::Health::Warning; + format_to!( + message, + "proc-macro server for workspace `{}` exited: {err}", + ws.manifest_or_root() + ); + message.push_str("\n\n"); + } + } + _ => (), } } } @@ -529,7 +543,7 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; - let env = match &ws.kind { + let env: FxHashMap<_, _> = match &ws.kind { ProjectWorkspaceKind::Cargo { cargo_config_extra_env, .. } | ProjectWorkspaceKind::DetachedFile { cargo: Some(_), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs index 6145f7e05f9..863ff064399 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs @@ -110,7 +110,8 @@ impl CargoTargetSpec { kind: &RunnableKind, cfg: &Option<CfgExpr>, ) -> (Vec<String>, Vec<String>) { - let extra_test_binary_args = snap.config.runnables().extra_test_binary_args; + let config = snap.config.runnables(); + let extra_test_binary_args = config.extra_test_binary_args; let mut cargo_args = Vec::new(); let mut executable_args = Vec::new(); @@ -196,6 +197,7 @@ impl CargoTargetSpec { } } } + cargo_args.extend(config.cargo_extra_args.iter().cloned()); (cargo_args, executable_args) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index f886df60e68..56f416a0b6e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -8,15 +8,11 @@ //! specific JSON shapes here -- there's little value in such tests, as we can't //! be sure without a real client anyway. -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(clippy::disallowed_types)] mod ratoml; -#[cfg(not(feature = "in-rust-tree"))] -mod sourcegen; mod support; mod testdir; -mod tidy; use std::{collections::HashMap, path::PathBuf, time::Instant}; @@ -259,7 +255,6 @@ fn main() {} "args": { "cargoArgs": ["test", "--package", "foo", "--test", "spam"], "executableArgs": ["test_eggs", "--exact", "--show-output"], - "cargoExtraArgs": [], "overrideCargo": null, "cwd": server.path().join("foo"), "workspaceRoot": server.path().join("foo") @@ -290,7 +285,6 @@ fn main() {} "--test", "spam" ], - "cargoExtraArgs": [], "executableArgs": [ "", "--show-output" @@ -326,7 +320,6 @@ fn main() {} "args": { "cargoArgs": ["check", "--package", "foo", "--all-targets"], "executableArgs": [], - "cargoExtraArgs": [], "overrideCargo": null, "cwd": server.path().join("foo"), "workspaceRoot": server.path().join("foo") @@ -338,7 +331,6 @@ fn main() {} "args": { "cargoArgs": ["test", "--package", "foo", "--all-targets"], "executableArgs": [], - "cargoExtraArgs": [], "overrideCargo": null, "cwd": server.path().join("foo"), "workspaceRoot": server.path().join("foo") @@ -427,7 +419,6 @@ mod tests { runnable, "--all-targets" ], - "cargoExtraArgs": [], "executableArgs": [] }, }, @@ -490,7 +481,6 @@ fn otherpkg() {} "mainpkg", "--all-targets" ], - "cargoExtraArgs": [], "executableArgs": [] }, }, @@ -516,7 +506,6 @@ fn otherpkg() {} "otherpkg", "--all-targets" ], - "cargoExtraArgs": [], "executableArgs": [] }, }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index c4383255323..66100971fbf 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -388,9 +388,8 @@ impl Server { } fn recv(&self) -> Result<Option<Message>, Timeout> { let msg = recv_timeout(&self.client.receiver)?; - let msg = msg.map(|msg| { + let msg = msg.inspect(|msg| { self.messages.borrow_mut().push(msg.clone()); - msg }); Ok(msg) } diff --git a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml deleted file mode 100644 index d5ea4c39aa1..00000000000 --- a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "sourcegen" -version = "0.0.0" -description = "TBD" -publish = false - -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true - -[lib] -doctest = false - -[dependencies] -xshell.workspace = true - -[lints] -workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs deleted file mode 100644 index 829b4d5b0ff..00000000000 --- a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs +++ /dev/null @@ -1,203 +0,0 @@ -//! rust-analyzer relies heavily on source code generation. -//! -//! Things like feature documentation or assist tests are implemented by -//! processing rust-analyzer's own source code and generating the appropriate -//! output. See `sourcegen_` tests in various crates. -//! -//! This crate contains utilities to make this kind of source-gen easy. - -#![warn(rust_2018_idioms, unused_lifetimes)] - -use std::{ - fmt, fs, mem, - path::{Path, PathBuf}, -}; - -use xshell::{cmd, Shell}; - -pub fn list_rust_files(dir: &Path) -> Vec<PathBuf> { - let mut res = list_files(dir); - res.retain(|it| { - it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs") - }); - res -} - -pub fn list_files(dir: &Path) -> Vec<PathBuf> { - let mut res = Vec::new(); - let mut work = vec![dir.to_path_buf()]; - while let Some(dir) = work.pop() { - for entry in dir.read_dir().unwrap() { - let entry = entry.unwrap(); - let file_type = entry.file_type().unwrap(); - let path = entry.path(); - let is_hidden = - path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.'); - if !is_hidden { - if file_type.is_dir() { - work.push(path); - } else if file_type.is_file() { - res.push(path); - } - } - } - } - res -} - -#[derive(Clone)] -pub struct CommentBlock { - pub id: String, - pub line: usize, - pub contents: Vec<String>, - is_doc: bool, -} - -impl CommentBlock { - pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> { - assert!(tag.starts_with(char::is_uppercase)); - - let tag = format!("{tag}:"); - let mut blocks = CommentBlock::extract_untagged(text); - blocks.retain_mut(|block| { - let first = block.contents.remove(0); - let Some(id) = first.strip_prefix(&tag) else { - return false; - }; - - if block.is_doc { - panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}"); - } - - id.trim().clone_into(&mut block.id); - true - }); - blocks - } - - pub fn extract_untagged(text: &str) -> Vec<CommentBlock> { - let mut res = Vec::new(); - - let lines = text.lines().map(str::trim_start); - - let dummy_block = - CommentBlock { id: String::new(), line: 0, contents: Vec::new(), is_doc: false }; - let mut block = dummy_block.clone(); - for (line_num, line) in lines.enumerate() { - match line.strip_prefix("//") { - Some(mut contents) => { - if let Some('/' | '!') = contents.chars().next() { - contents = &contents[1..]; - block.is_doc = true; - } - if let Some(' ') = contents.chars().next() { - contents = &contents[1..]; - } - block.contents.push(contents.to_owned()); - } - None => { - if !block.contents.is_empty() { - let block = mem::replace(&mut block, dummy_block.clone()); - res.push(block); - } - block.line = line_num + 2; - } - } - } - if !block.contents.is_empty() { - res.push(block); - } - res - } -} - -#[derive(Debug)] -pub struct Location { - pub file: PathBuf, - pub line: usize, -} - -impl fmt::Display for Location { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let path = self.file.strip_prefix(project_root()).unwrap().display().to_string(); - let path = path.replace('\\', "/"); - let name = self.file.file_name().unwrap(); - write!( - f, - "https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}[{}]", - path, - self.line, - name.to_str().unwrap() - ) - } -} - -fn ensure_rustfmt(sh: &Shell) { - let version = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap_or_default(); - if !version.contains("stable") { - panic!( - "Failed to run rustfmt from toolchain 'stable'. \ - Please run `rustup component add rustfmt --toolchain stable` to install it.", - ); - } -} - -pub fn reformat(text: String) -> String { - let sh = Shell::new().unwrap(); - ensure_rustfmt(&sh); - let rustfmt_toml = project_root().join("rustfmt.toml"); - let mut stdout = cmd!( - sh, - "rustup run stable rustfmt --config-path {rustfmt_toml} --config fn_single_line=true" - ) - .stdin(text) - .read() - .unwrap(); - if !stdout.ends_with('\n') { - stdout.push('\n'); - } - stdout -} - -pub fn add_preamble(generator: &'static str, mut text: String) -> String { - let preamble = format!("//! Generated by `{generator}`, do not edit by hand.\n\n"); - text.insert_str(0, &preamble); - text -} - -/// Checks that the `file` has the specified `contents`. If that is not the -/// case, updates the file and then fails the test. -#[allow(clippy::print_stderr)] -pub fn ensure_file_contents(file: &Path, contents: &str) { - if let Ok(old_contents) = fs::read_to_string(file) { - if normalize_newlines(&old_contents) == normalize_newlines(contents) { - // File is already up to date. - return; - } - } - - let display_path = file.strip_prefix(project_root()).unwrap_or(file); - eprintln!( - "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", - display_path.display() - ); - if std::env::var("CI").is_ok() { - eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n"); - } - if let Some(parent) = file.parent() { - let _ = fs::create_dir_all(parent); - } - fs::write(file, contents).unwrap(); - panic!("some file was not up to date and has been updated, simply re-run the tests"); -} - -fn normalize_newlines(s: &str) -> String { - s.replace("\r\n", "\n") -} - -pub fn project_root() -> PathBuf { - let dir = env!("CARGO_MANIFEST_DIR"); - let res = PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned(); - assert!(res.join("triagebot.toml").exists()); - res -} diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs index 54f10df42a8..76dbd42ff6b 100644 --- a/src/tools/rust-analyzer/crates/stdx/src/lib.rs +++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs @@ -1,7 +1,5 @@ //! Missing batteries for standard libraries. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::io as sio; use std::process::Command; use std::{cmp::Ordering, ops, time::Instant}; diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index 1809ca7dea5..b371ec6ebd5 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -33,6 +33,7 @@ text-edit.workspace = true [dev-dependencies] rayon.workspace = true expect-test = "1.4.0" +rustc_apfloat = "0.2.0" test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs index 168ca9f1328..3282bd6eff2 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs @@ -31,8 +31,8 @@ pub use self::{ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}, token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix}, traits::{ - AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams, - HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility, + AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericArgs, + HasGenericParams, HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility, }, }; @@ -149,14 +149,17 @@ pub trait RangeItem { mod support { use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken}; + #[inline] pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> { parent.children().find_map(N::cast) } + #[inline] pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> { AstChildren::new(parent) } + #[inline] pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> { parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind) } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index 2445e4f1a32..f1286e7aa21 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -6,7 +6,7 @@ use parser::{SyntaxKind, T}; use crate::{ algo::{self, neighbor}, - ast::{self, edit::IndentLevel, make, HasGenericParams}, + ast::{self, edit::IndentLevel, make, HasGenericArgs, HasGenericParams}, ted::{self, Position}, AstNode, AstToken, Direction, SyntaxElement, SyntaxKind::{ATTR, COMMENT, WHITESPACE}, diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs index 28a9dadacef..b0ee9dfd507 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs @@ -6,7 +6,8 @@ use crate::{ ast::{ self, operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}, - support, AstChildren, AstNode, + support, ArgList, AstChildren, AstNode, BlockExpr, ClosureExpr, Const, Expr, Fn, + FormatArgsArg, FormatArgsExpr, MacroDef, Static, TokenTree, }, AstToken, SyntaxKind::*, @@ -435,3 +436,57 @@ impl AstNode for CallableExpr { } } } + +impl MacroDef { + fn tts(&self) -> (Option<ast::TokenTree>, Option<ast::TokenTree>) { + let mut types = support::children(self.syntax()); + let first = types.next(); + let second = types.next(); + (first, second) + } + + pub fn args(&self) -> Option<TokenTree> { + match self.tts() { + (Some(args), Some(_)) => Some(args), + _ => None, + } + } + + pub fn body(&self) -> Option<TokenTree> { + match self.tts() { + (Some(body), None) | (_, Some(body)) => Some(body), + _ => None, + } + } +} + +impl ClosureExpr { + pub fn body(&self) -> Option<Expr> { + support::child(&self.syntax) + } +} +impl Const { + pub fn body(&self) -> Option<Expr> { + support::child(&self.syntax) + } +} +impl Fn { + pub fn body(&self) -> Option<BlockExpr> { + support::child(&self.syntax) + } +} +impl Static { + pub fn body(&self) -> Option<Expr> { + support::child(&self.syntax) + } +} +impl FormatArgsExpr { + pub fn args(&self) -> AstChildren<FormatArgsArg> { + support::children(&self.syntax) + } +} +impl ArgList { + pub fn args(&self) -> AstChildren<Expr> { + support::children(&self.syntax) + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 98186c5473d..0373e7c5529 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_ast`, do not edit by hand. +//! Generated by `cargo codegen grammar`, do not edit by hand. #![allow(non_snake_case)] use crate::{ @@ -12,6 +12,7 @@ pub struct Abi { pub(crate) syntax: SyntaxNode, } impl Abi { + #[inline] pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) } } @@ -20,8 +21,9 @@ pub struct ArgList { pub(crate) syntax: SyntaxNode, } impl ArgList { - pub fn args(&self) -> AstChildren<Expr> { support::children(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -31,10 +33,15 @@ pub struct ArrayExpr { } impl ast::HasAttrs for ArrayExpr {} impl ArrayExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) } + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } } @@ -43,10 +50,15 @@ pub struct ArrayType { pub(crate) syntax: SyntaxNode, } impl ArrayType { + #[inline] pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } } @@ -56,11 +68,17 @@ pub struct AsmExpr { } impl ast::HasAttrs for AsmExpr {} impl AsmExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn asm_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![asm]) } + #[inline] pub fn builtin_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![builtin]) } } @@ -70,8 +88,11 @@ pub struct AssocItemList { } impl ast::HasAttrs for AssocItemList {} impl AssocItemList { + #[inline] pub fn assoc_items(&self) -> AstChildren<AssocItem> { support::children(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -79,14 +100,20 @@ impl AssocItemList { pub struct AssocTypeArg { pub(crate) syntax: SyntaxNode, } +impl ast::HasGenericArgs for AssocTypeArg {} impl ast::HasTypeBounds for AssocTypeArg {} impl AssocTypeArg { + #[inline] pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) } - pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) } + #[inline] pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } + #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } + #[inline] pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } } @@ -95,10 +122,15 @@ pub struct Attr { pub(crate) syntax: SyntaxNode, } impl Attr { + #[inline] pub fn meta(&self) -> Option<Meta> { support::child(&self.syntax) } + #[inline] pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) } + #[inline] pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) } + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } } @@ -108,8 +140,11 @@ pub struct AwaitExpr { } impl ast::HasAttrs for AwaitExpr {} impl AwaitExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) } + #[inline] pub fn await_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![await]) } } @@ -119,7 +154,9 @@ pub struct BecomeExpr { } impl ast::HasAttrs for BecomeExpr {} impl BecomeExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn become_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![become]) } } @@ -136,11 +173,17 @@ pub struct BlockExpr { } impl ast::HasAttrs for BlockExpr {} impl BlockExpr { + #[inline] pub fn label(&self) -> Option<Label> { support::child(&self.syntax) } + #[inline] pub fn stmt_list(&self) -> Option<StmtList> { support::child(&self.syntax) } + #[inline] pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) } + #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } @@ -149,7 +192,9 @@ pub struct BoxPat { pub(crate) syntax: SyntaxNode, } impl BoxPat { + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) } } @@ -159,8 +204,11 @@ pub struct BreakExpr { } impl ast::HasAttrs for BreakExpr {} impl BreakExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } + #[inline] pub fn break_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![break]) } } @@ -171,6 +219,7 @@ pub struct CallExpr { impl ast::HasArgList for CallExpr {} impl ast::HasAttrs for CallExpr {} impl CallExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } } @@ -180,8 +229,11 @@ pub struct CastExpr { } impl ast::HasAttrs for CastExpr {} impl CastExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) } } @@ -191,14 +243,21 @@ pub struct ClosureExpr { } impl ast::HasAttrs for ClosureExpr {} impl ClosureExpr { - pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } + #[inline] pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) } + #[inline] pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } + #[inline] pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) } + #[inline] pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) } } @@ -211,13 +270,19 @@ impl ast::HasDocComments for Const {} impl ast::HasName for Const {} impl ast::HasVisibility for Const {} impl Const { - pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) } } @@ -226,6 +291,7 @@ pub struct ConstArg { pub(crate) syntax: SyntaxNode, } impl ConstArg { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } } @@ -234,7 +300,9 @@ pub struct ConstBlockPat { pub(crate) syntax: SyntaxNode, } impl ConstBlockPat { + #[inline] pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } } @@ -245,10 +313,15 @@ pub struct ConstParam { impl ast::HasAttrs for ConstParam {} impl ast::HasName for ConstParam {} impl ConstParam { + #[inline] pub fn default_val(&self) -> Option<ConstArg> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } } @@ -258,7 +331,9 @@ pub struct ContinueExpr { } impl ast::HasAttrs for ContinueExpr {} impl ContinueExpr { + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } + #[inline] pub fn continue_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![continue]) } @@ -269,7 +344,9 @@ pub struct DynTraitType { pub(crate) syntax: SyntaxNode, } impl DynTraitType { + #[inline] pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) } + #[inline] pub fn dyn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![dyn]) } } @@ -283,7 +360,9 @@ impl ast::HasGenericParams for Enum {} impl ast::HasName for Enum {} impl ast::HasVisibility for Enum {} impl Enum { + #[inline] pub fn variant_list(&self) -> Option<VariantList> { support::child(&self.syntax) } + #[inline] pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![enum]) } } @@ -292,7 +371,9 @@ pub struct ExprStmt { pub(crate) syntax: SyntaxNode, } impl ExprStmt { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } } @@ -303,8 +384,11 @@ pub struct ExternBlock { impl ast::HasAttrs for ExternBlock {} impl ast::HasDocComments for ExternBlock {} impl ExternBlock { + #[inline] pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) } + #[inline] pub fn extern_item_list(&self) -> Option<ExternItemList> { support::child(&self.syntax) } + #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } @@ -316,10 +400,15 @@ impl ast::HasAttrs for ExternCrate {} impl ast::HasDocComments for ExternCrate {} impl ast::HasVisibility for ExternCrate {} impl ExternCrate { + #[inline] pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } + #[inline] pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) } + #[inline] pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) } } @@ -329,8 +418,11 @@ pub struct ExternItemList { } impl ast::HasAttrs for ExternItemList {} impl ExternItemList { + #[inline] pub fn extern_items(&self) -> AstChildren<ExternItem> { support::children(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -340,8 +432,11 @@ pub struct FieldExpr { } impl ast::HasAttrs for FieldExpr {} impl FieldExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } + #[inline] pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) } } @@ -355,15 +450,23 @@ impl ast::HasGenericParams for Fn {} impl ast::HasName for Fn {} impl ast::HasVisibility for Fn {} impl Fn { + #[inline] pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) } - pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) } + #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } + #[inline] pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) } + #[inline] pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) } + #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } @@ -372,12 +475,19 @@ pub struct FnPtrType { pub(crate) syntax: SyntaxNode, } impl FnPtrType { + #[inline] pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) } + #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } + #[inline] pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) } + #[inline] pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) } + #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } @@ -387,8 +497,11 @@ pub struct ForExpr { } impl ast::HasAttrs for ForExpr {} impl ForExpr { + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } + #[inline] pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) } } @@ -397,8 +510,11 @@ pub struct ForType { pub(crate) syntax: SyntaxNode, } impl ForType { + #[inline] pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } } @@ -408,7 +524,9 @@ pub struct FormatArgsArg { } impl ast::HasName for FormatArgsArg {} impl FormatArgsArg { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } } @@ -418,13 +536,19 @@ pub struct FormatArgsExpr { } impl ast::HasAttrs for FormatArgsExpr {} impl FormatArgsExpr { - pub fn args(&self) -> AstChildren<FormatArgsArg> { support::children(&self.syntax) } + #[inline] pub fn template(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) } + #[inline] pub fn builtin_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![builtin]) } + #[inline] pub fn format_args_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![format_args]) } @@ -435,9 +559,13 @@ pub struct GenericArgList { pub(crate) syntax: SyntaxNode, } impl GenericArgList { + #[inline] pub fn generic_args(&self) -> AstChildren<GenericArg> { support::children(&self.syntax) } + #[inline] pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } + #[inline] pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) } + #[inline] pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) } } @@ -446,8 +574,11 @@ pub struct GenericParamList { pub(crate) syntax: SyntaxNode, } impl GenericParamList { + #[inline] pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) } + #[inline] pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) } + #[inline] pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) } } @@ -458,9 +589,13 @@ pub struct IdentPat { impl ast::HasAttrs for IdentPat {} impl ast::HasName for IdentPat {} impl IdentPat { + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn at_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![@]) } + #[inline] pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) } + #[inline] pub fn ref_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ref]) } } @@ -470,7 +605,9 @@ pub struct IfExpr { } impl ast::HasAttrs for IfExpr {} impl IfExpr { + #[inline] pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) } + #[inline] pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) } } @@ -483,12 +620,19 @@ impl ast::HasDocComments for Impl {} impl ast::HasGenericParams for Impl {} impl ast::HasVisibility for Impl {} impl Impl { + #[inline] pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) } + #[inline] pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) } + #[inline] pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } + #[inline] pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) } + #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } @@ -497,7 +641,9 @@ pub struct ImplTraitType { pub(crate) syntax: SyntaxNode, } impl ImplTraitType { + #[inline] pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) } + #[inline] pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) } } @@ -507,7 +653,9 @@ pub struct IndexExpr { } impl ast::HasAttrs for IndexExpr {} impl IndexExpr { + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } } @@ -516,6 +664,7 @@ pub struct InferType { pub(crate) syntax: SyntaxNode, } impl InferType { + #[inline] pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) } } @@ -526,7 +675,9 @@ pub struct ItemList { impl ast::HasAttrs for ItemList {} impl ast::HasModuleItem for ItemList {} impl ItemList { + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -535,7 +686,9 @@ pub struct Label { pub(crate) syntax: SyntaxNode, } impl Label { + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } } @@ -544,7 +697,9 @@ pub struct LetElse { pub(crate) syntax: SyntaxNode, } impl LetElse { + #[inline] pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) } + #[inline] pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) } } @@ -554,9 +709,13 @@ pub struct LetExpr { } impl ast::HasAttrs for LetExpr {} impl LetExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) } } @@ -566,13 +725,21 @@ pub struct LetStmt { } impl ast::HasAttrs for LetStmt {} impl LetStmt { + #[inline] pub fn initializer(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn let_else(&self) -> Option<LetElse> { support::child(&self.syntax) } + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) } } @@ -581,6 +748,7 @@ pub struct Lifetime { pub(crate) syntax: SyntaxNode, } impl Lifetime { + #[inline] pub fn lifetime_ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![lifetime_ident]) } @@ -591,6 +759,7 @@ pub struct LifetimeArg { pub(crate) syntax: SyntaxNode, } impl LifetimeArg { + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } } @@ -601,6 +770,7 @@ pub struct LifetimeParam { impl ast::HasAttrs for LifetimeParam {} impl ast::HasTypeBounds for LifetimeParam {} impl LifetimeParam { + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } } @@ -616,7 +786,9 @@ pub struct LiteralPat { pub(crate) syntax: SyntaxNode, } impl LiteralPat { + #[inline] pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) } + #[inline] pub fn minus_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![-]) } } @@ -627,6 +799,7 @@ pub struct LoopExpr { impl ast::HasAttrs for LoopExpr {} impl ast::HasLoopBody for LoopExpr {} impl LoopExpr { + #[inline] pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) } } @@ -637,9 +810,13 @@ pub struct MacroCall { impl ast::HasAttrs for MacroCall {} impl ast::HasDocComments for MacroCall {} impl MacroCall { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) } + #[inline] pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } } @@ -652,8 +829,7 @@ impl ast::HasDocComments for MacroDef {} impl ast::HasName for MacroDef {} impl ast::HasVisibility for MacroDef {} impl MacroDef { - pub fn args(&self) -> Option<TokenTree> { support::child(&self.syntax) } - pub fn body(&self) -> Option<TokenTree> { support::child(&self.syntax) } + #[inline] pub fn macro_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![macro]) } } @@ -662,12 +838,19 @@ pub struct MacroEagerInput { pub(crate) syntax: SyntaxNode, } impl MacroEagerInput { + #[inline] pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -676,6 +859,7 @@ pub struct MacroExpr { pub(crate) syntax: SyntaxNode, } impl MacroExpr { + #[inline] pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) } } @@ -691,6 +875,7 @@ pub struct MacroPat { pub(crate) syntax: SyntaxNode, } impl MacroPat { + #[inline] pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) } } @@ -703,8 +888,11 @@ impl ast::HasDocComments for MacroRules {} impl ast::HasName for MacroRules {} impl ast::HasVisibility for MacroRules {} impl MacroRules { + #[inline] pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) } + #[inline] pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) } + #[inline] pub fn macro_rules_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![macro_rules]) } @@ -715,7 +903,9 @@ pub struct MacroStmts { pub(crate) syntax: SyntaxNode, } impl MacroStmts { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) } } @@ -724,6 +914,7 @@ pub struct MacroType { pub(crate) syntax: SyntaxNode, } impl MacroType { + #[inline] pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) } } @@ -733,10 +924,15 @@ pub struct MatchArm { } impl ast::HasAttrs for MatchArm {} impl MatchArm { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn guard(&self) -> Option<MatchGuard> { support::child(&self.syntax) } + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) } + #[inline] pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) } } @@ -746,8 +942,11 @@ pub struct MatchArmList { } impl ast::HasAttrs for MatchArmList {} impl MatchArmList { + #[inline] pub fn arms(&self) -> AstChildren<MatchArm> { support::children(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -757,8 +956,11 @@ pub struct MatchExpr { } impl ast::HasAttrs for MatchExpr {} impl MatchExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn match_arm_list(&self) -> Option<MatchArmList> { support::child(&self.syntax) } + #[inline] pub fn match_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![match]) } } @@ -767,6 +969,7 @@ pub struct MatchGuard { pub(crate) syntax: SyntaxNode, } impl MatchGuard { + #[inline] pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) } } @@ -775,12 +978,19 @@ pub struct Meta { pub(crate) syntax: SyntaxNode, } impl Meta { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } @@ -790,10 +1000,13 @@ pub struct MethodCallExpr { } impl ast::HasArgList for MethodCallExpr {} impl ast::HasAttrs for MethodCallExpr {} +impl ast::HasGenericArgs for MethodCallExpr {} impl MethodCallExpr { - pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) } + #[inline] pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } + #[inline] pub fn receiver(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) } } @@ -806,8 +1019,11 @@ impl ast::HasDocComments for Module {} impl ast::HasName for Module {} impl ast::HasVisibility for Module {} impl Module { + #[inline] pub fn item_list(&self) -> Option<ItemList> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn mod_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mod]) } } @@ -816,7 +1032,9 @@ pub struct Name { pub(crate) syntax: SyntaxNode, } impl Name { + #[inline] pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) } + #[inline] pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) } } @@ -825,10 +1043,15 @@ pub struct NameRef { pub(crate) syntax: SyntaxNode, } impl NameRef { + #[inline] pub fn Self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![Self]) } + #[inline] pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) } + #[inline] pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) } + #[inline] pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) } + #[inline] pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) } } @@ -837,6 +1060,7 @@ pub struct NeverType { pub(crate) syntax: SyntaxNode, } impl NeverType { + #[inline] pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) } } @@ -846,13 +1070,21 @@ pub struct OffsetOfExpr { } impl ast::HasAttrs for OffsetOfExpr {} impl OffsetOfExpr { + #[inline] pub fn fields(&self) -> AstChildren<NameRef> { support::children(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) } + #[inline] pub fn builtin_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![builtin]) } + #[inline] pub fn offset_of_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![offset_of]) } @@ -863,6 +1095,7 @@ pub struct OrPat { pub(crate) syntax: SyntaxNode, } impl OrPat { + #[inline] pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) } } @@ -872,9 +1105,13 @@ pub struct Param { } impl ast::HasAttrs for Param {} impl Param { + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![...]) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } } @@ -883,11 +1120,17 @@ pub struct ParamList { pub(crate) syntax: SyntaxNode, } impl ParamList { + #[inline] pub fn params(&self) -> AstChildren<Param> { support::children(&self.syntax) } + #[inline] pub fn self_param(&self) -> Option<SelfParam> { support::child(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) } + #[inline] pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) } } @@ -897,8 +1140,11 @@ pub struct ParenExpr { } impl ast::HasAttrs for ParenExpr {} impl ParenExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -907,8 +1153,11 @@ pub struct ParenPat { pub(crate) syntax: SyntaxNode, } impl ParenPat { + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -917,8 +1166,11 @@ pub struct ParenType { pub(crate) syntax: SyntaxNode, } impl ParenType { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -927,8 +1179,11 @@ pub struct Path { pub(crate) syntax: SyntaxNode, } impl Path { + #[inline] pub fn qualifier(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn segment(&self) -> Option<PathSegment> { support::child(&self.syntax) } + #[inline] pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } } @@ -938,6 +1193,7 @@ pub struct PathExpr { } impl ast::HasAttrs for PathExpr {} impl PathExpr { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } } @@ -946,6 +1202,7 @@ pub struct PathPat { pub(crate) syntax: SyntaxNode, } impl PathPat { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } } @@ -953,16 +1210,25 @@ impl PathPat { pub struct PathSegment { pub(crate) syntax: SyntaxNode, } +impl ast::HasGenericArgs for PathSegment {} impl PathSegment { - pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) } + #[inline] pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } + #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } + #[inline] pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) } + #[inline] pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } + #[inline] pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) } + #[inline] pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) } + #[inline] pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) } } @@ -971,6 +1237,7 @@ pub struct PathType { pub(crate) syntax: SyntaxNode, } impl PathType { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } } @@ -980,6 +1247,7 @@ pub struct PrefixExpr { } impl ast::HasAttrs for PrefixExpr {} impl PrefixExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } } @@ -988,9 +1256,13 @@ pub struct PtrType { pub(crate) syntax: SyntaxNode, } impl PtrType { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) } } @@ -1012,7 +1284,9 @@ pub struct RecordExpr { pub(crate) syntax: SyntaxNode, } impl RecordExpr { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn record_expr_field_list(&self) -> Option<RecordExprFieldList> { support::child(&self.syntax) } @@ -1024,8 +1298,11 @@ pub struct RecordExprField { } impl ast::HasAttrs for RecordExprField {} impl RecordExprField { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } } @@ -1035,10 +1312,15 @@ pub struct RecordExprFieldList { } impl ast::HasAttrs for RecordExprFieldList {} impl RecordExprFieldList { + #[inline] pub fn fields(&self) -> AstChildren<RecordExprField> { support::children(&self.syntax) } + #[inline] pub fn spread(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } + #[inline] pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) } } @@ -1051,7 +1333,9 @@ impl ast::HasDocComments for RecordField {} impl ast::HasName for RecordField {} impl ast::HasVisibility for RecordField {} impl RecordField { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } } @@ -1060,8 +1344,11 @@ pub struct RecordFieldList { pub(crate) syntax: SyntaxNode, } impl RecordFieldList { + #[inline] pub fn fields(&self) -> AstChildren<RecordField> { support::children(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -1070,7 +1357,9 @@ pub struct RecordPat { pub(crate) syntax: SyntaxNode, } impl RecordPat { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn record_pat_field_list(&self) -> Option<RecordPatFieldList> { support::child(&self.syntax) } @@ -1082,8 +1371,11 @@ pub struct RecordPatField { } impl ast::HasAttrs for RecordPatField {} impl RecordPatField { + #[inline] pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } } @@ -1092,9 +1384,13 @@ pub struct RecordPatFieldList { pub(crate) syntax: SyntaxNode, } impl RecordPatFieldList { + #[inline] pub fn fields(&self) -> AstChildren<RecordPatField> { support::children(&self.syntax) } + #[inline] pub fn rest_pat(&self) -> Option<RestPat> { support::child(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -1104,10 +1400,15 @@ pub struct RefExpr { } impl ast::HasAttrs for RefExpr {} impl RefExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) } + #[inline] pub fn raw_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![raw]) } } @@ -1116,8 +1417,11 @@ pub struct RefPat { pub(crate) syntax: SyntaxNode, } impl RefPat { + #[inline] pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } + #[inline] pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) } + #[inline] pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) } } @@ -1126,9 +1430,13 @@ pub struct RefType { pub(crate) syntax: SyntaxNode, } impl RefType { + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) } + #[inline] pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) } } @@ -1138,7 +1446,9 @@ pub struct Rename { } impl ast::HasName for Rename {} impl Rename { + #[inline] pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) } + #[inline] pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) } } @@ -1148,6 +1458,7 @@ pub struct RestPat { } impl ast::HasAttrs for RestPat {} impl RestPat { + #[inline] pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) } } @@ -1156,7 +1467,9 @@ pub struct RetType { pub(crate) syntax: SyntaxNode, } impl RetType { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn thin_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![->]) } } @@ -1166,7 +1479,9 @@ pub struct ReturnExpr { } impl ast::HasAttrs for ReturnExpr {} impl ReturnExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn return_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![return]) } } @@ -1177,10 +1492,15 @@ pub struct SelfParam { impl ast::HasAttrs for SelfParam {} impl ast::HasName for SelfParam {} impl SelfParam { + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } + #[inline] pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) } } @@ -1189,8 +1509,11 @@ pub struct SlicePat { pub(crate) syntax: SyntaxNode, } impl SlicePat { + #[inline] pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) } + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } } @@ -1199,8 +1522,11 @@ pub struct SliceType { pub(crate) syntax: SyntaxNode, } impl SliceType { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } } @@ -1212,6 +1538,7 @@ impl ast::HasAttrs for SourceFile {} impl ast::HasDocComments for SourceFile {} impl ast::HasModuleItem for SourceFile {} impl SourceFile { + #[inline] pub fn shebang_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![shebang]) } } @@ -1224,12 +1551,17 @@ impl ast::HasDocComments for Static {} impl ast::HasName for Static {} impl ast::HasVisibility for Static {} impl Static { - pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) } + #[inline] pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) } } @@ -1239,9 +1571,13 @@ pub struct StmtList { } impl ast::HasAttrs for StmtList {} impl StmtList { + #[inline] pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) } + #[inline] pub fn tail_expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -1255,8 +1591,11 @@ impl ast::HasGenericParams for Struct {} impl ast::HasName for Struct {} impl ast::HasVisibility for Struct {} impl Struct { + #[inline] pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn struct_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![struct]) } } @@ -1265,11 +1604,17 @@ pub struct TokenTree { pub(crate) syntax: SyntaxNode, } impl TokenTree { + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -1284,9 +1629,13 @@ impl ast::HasName for Trait {} impl ast::HasTypeBounds for Trait {} impl ast::HasVisibility for Trait {} impl Trait { + #[inline] pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) } + #[inline] pub fn auto_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![auto]) } + #[inline] pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) } + #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } @@ -1300,9 +1649,13 @@ impl ast::HasGenericParams for TraitAlias {} impl ast::HasName for TraitAlias {} impl ast::HasVisibility for TraitAlias {} impl TraitAlias { + #[inline] pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) } } @@ -1312,7 +1665,9 @@ pub struct TryExpr { } impl ast::HasAttrs for TryExpr {} impl TryExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) } } @@ -1322,8 +1677,11 @@ pub struct TupleExpr { } impl ast::HasAttrs for TupleExpr {} impl TupleExpr { + #[inline] pub fn fields(&self) -> AstChildren<Expr> { support::children(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -1335,6 +1693,7 @@ impl ast::HasAttrs for TupleField {} impl ast::HasDocComments for TupleField {} impl ast::HasVisibility for TupleField {} impl TupleField { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } } @@ -1343,8 +1702,11 @@ pub struct TupleFieldList { pub(crate) syntax: SyntaxNode, } impl TupleFieldList { + #[inline] pub fn fields(&self) -> AstChildren<TupleField> { support::children(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -1353,8 +1715,11 @@ pub struct TuplePat { pub(crate) syntax: SyntaxNode, } impl TuplePat { + #[inline] pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -1363,9 +1728,13 @@ pub struct TupleStructPat { pub(crate) syntax: SyntaxNode, } impl TupleStructPat { + #[inline] pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) } + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -1374,8 +1743,11 @@ pub struct TupleType { pub(crate) syntax: SyntaxNode, } impl TupleType { + #[inline] pub fn fields(&self) -> AstChildren<Type> { support::children(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } } @@ -1390,10 +1762,15 @@ impl ast::HasName for TypeAlias {} impl ast::HasTypeBounds for TypeAlias {} impl ast::HasVisibility for TypeAlias {} impl TypeAlias { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } + #[inline] pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) } + #[inline] pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) } } @@ -1402,6 +1779,7 @@ pub struct TypeArg { pub(crate) syntax: SyntaxNode, } impl TypeArg { + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } } @@ -1410,11 +1788,17 @@ pub struct TypeBound { pub(crate) syntax: SyntaxNode, } impl TypeBound { + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) } + #[inline] pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } + #[inline] pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) } + #[inline] pub fn tilde_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![~]) } } @@ -1423,6 +1807,7 @@ pub struct TypeBoundList { pub(crate) syntax: SyntaxNode, } impl TypeBoundList { + #[inline] pub fn bounds(&self) -> AstChildren<TypeBound> { support::children(&self.syntax) } } @@ -1434,7 +1819,9 @@ impl ast::HasAttrs for TypeParam {} impl ast::HasName for TypeParam {} impl ast::HasTypeBounds for TypeParam {} impl TypeParam { + #[inline] pub fn default_type(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } } @@ -1444,6 +1831,7 @@ pub struct UnderscoreExpr { } impl ast::HasAttrs for UnderscoreExpr {} impl UnderscoreExpr { + #[inline] pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) } } @@ -1457,7 +1845,9 @@ impl ast::HasGenericParams for Union {} impl ast::HasName for Union {} impl ast::HasVisibility for Union {} impl Union { + #[inline] pub fn record_field_list(&self) -> Option<RecordFieldList> { support::child(&self.syntax) } + #[inline] pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![union]) } } @@ -1469,8 +1859,11 @@ impl ast::HasAttrs for Use {} impl ast::HasDocComments for Use {} impl ast::HasVisibility for Use {} impl Use { + #[inline] pub fn use_tree(&self) -> Option<UseTree> { support::child(&self.syntax) } + #[inline] pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) } + #[inline] pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) } } @@ -1479,10 +1872,15 @@ pub struct UseTree { pub(crate) syntax: SyntaxNode, } impl UseTree { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) } + #[inline] pub fn use_tree_list(&self) -> Option<UseTreeList> { support::child(&self.syntax) } + #[inline] pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) } + #[inline] pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } } @@ -1491,8 +1889,11 @@ pub struct UseTreeList { pub(crate) syntax: SyntaxNode, } impl UseTreeList { + #[inline] pub fn use_trees(&self) -> AstChildren<UseTree> { support::children(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -1505,8 +1906,11 @@ impl ast::HasDocComments for Variant {} impl ast::HasName for Variant {} impl ast::HasVisibility for Variant {} impl Variant { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) } + #[inline] pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } } @@ -1515,8 +1919,11 @@ pub struct VariantList { pub(crate) syntax: SyntaxNode, } impl VariantList { + #[inline] pub fn variants(&self) -> AstChildren<Variant> { support::children(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) } } @@ -1525,10 +1932,15 @@ pub struct Visibility { pub(crate) syntax: SyntaxNode, } impl Visibility { + #[inline] pub fn path(&self) -> Option<Path> { support::child(&self.syntax) } + #[inline] pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) } + #[inline] pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) } + #[inline] pub fn pub_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![pub]) } } @@ -1537,7 +1949,9 @@ pub struct WhereClause { pub(crate) syntax: SyntaxNode, } impl WhereClause { + #[inline] pub fn predicates(&self) -> AstChildren<WherePred> { support::children(&self.syntax) } + #[inline] pub fn where_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![where]) } } @@ -1547,9 +1961,13 @@ pub struct WherePred { } impl ast::HasTypeBounds for WherePred {} impl WherePred { + #[inline] pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } + #[inline] pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } } @@ -1559,6 +1977,7 @@ pub struct WhileExpr { } impl ast::HasAttrs for WhileExpr {} impl WhileExpr { + #[inline] pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) } } @@ -1567,6 +1986,7 @@ pub struct WildcardPat { pub(crate) syntax: SyntaxNode, } impl WildcardPat { + #[inline] pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) } } @@ -1576,8 +1996,11 @@ pub struct YeetExpr { } impl ast::HasAttrs for YeetExpr {} impl YeetExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn do_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![do]) } + #[inline] pub fn yeet_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![yeet]) } } @@ -1587,7 +2010,9 @@ pub struct YieldExpr { } impl ast::HasAttrs for YieldExpr {} impl YieldExpr { + #[inline] pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn yield_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![yield]) } } @@ -1772,6 +2197,12 @@ pub struct AnyHasDocComments { impl ast::HasDocComments for AnyHasDocComments {} #[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AnyHasGenericArgs { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasGenericArgs for AnyHasGenericArgs {} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AnyHasGenericParams { pub(crate) syntax: SyntaxNode, } @@ -1807,7 +2238,9 @@ pub struct AnyHasVisibility { } impl ast::HasVisibility for AnyHasVisibility {} impl AstNode for Abi { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ABI } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1815,10 +2248,13 @@ impl AstNode for Abi { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ArgList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ARG_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1826,10 +2262,13 @@ impl AstNode for ArgList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ArrayExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1837,10 +2276,13 @@ impl AstNode for ArrayExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ArrayType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1848,10 +2290,13 @@ impl AstNode for ArrayType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for AsmExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1859,10 +2304,13 @@ impl AstNode for AsmExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for AssocItemList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_ITEM_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1870,10 +2318,13 @@ impl AstNode for AssocItemList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for AssocTypeArg { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_TYPE_ARG } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1881,10 +2332,13 @@ impl AstNode for AssocTypeArg { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Attr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ATTR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1892,10 +2346,13 @@ impl AstNode for Attr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for AwaitExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == AWAIT_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1903,10 +2360,13 @@ impl AstNode for AwaitExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for BecomeExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == BECOME_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1914,10 +2374,13 @@ impl AstNode for BecomeExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for BinExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == BIN_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1925,10 +2388,13 @@ impl AstNode for BinExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for BlockExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1936,10 +2402,13 @@ impl AstNode for BlockExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for BoxPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1947,10 +2416,13 @@ impl AstNode for BoxPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for BreakExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == BREAK_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1958,10 +2430,13 @@ impl AstNode for BreakExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for CallExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CALL_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1969,10 +2444,13 @@ impl AstNode for CallExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for CastExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CAST_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1980,10 +2458,13 @@ impl AstNode for CastExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ClosureExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -1991,10 +2472,13 @@ impl AstNode for ClosureExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Const { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CONST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2002,10 +2486,13 @@ impl AstNode for Const { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ConstArg { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_ARG } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2013,10 +2500,13 @@ impl AstNode for ConstArg { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ConstBlockPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_BLOCK_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2024,10 +2514,13 @@ impl AstNode for ConstBlockPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ConstParam { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_PARAM } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2035,10 +2528,13 @@ impl AstNode for ConstParam { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ContinueExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == CONTINUE_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2046,10 +2542,13 @@ impl AstNode for ContinueExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for DynTraitType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == DYN_TRAIT_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2057,10 +2556,13 @@ impl AstNode for DynTraitType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Enum { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ENUM } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2068,10 +2570,13 @@ impl AstNode for Enum { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ExprStmt { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2079,10 +2584,13 @@ impl AstNode for ExprStmt { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ExternBlock { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_BLOCK } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2090,10 +2598,13 @@ impl AstNode for ExternBlock { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ExternCrate { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_CRATE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2101,10 +2612,13 @@ impl AstNode for ExternCrate { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ExternItemList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_ITEM_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2112,10 +2626,13 @@ impl AstNode for ExternItemList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for FieldExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == FIELD_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2123,10 +2640,13 @@ impl AstNode for FieldExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Fn { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == FN } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2134,10 +2654,13 @@ impl AstNode for Fn { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for FnPtrType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == FN_PTR_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2145,10 +2668,13 @@ impl AstNode for FnPtrType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ForExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2156,10 +2682,13 @@ impl AstNode for ForExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ForType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2167,10 +2696,13 @@ impl AstNode for ForType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for FormatArgsArg { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == FORMAT_ARGS_ARG } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2178,10 +2710,13 @@ impl AstNode for FormatArgsArg { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for FormatArgsExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == FORMAT_ARGS_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2189,10 +2724,13 @@ impl AstNode for FormatArgsExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for GenericArgList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_ARG_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2200,10 +2738,13 @@ impl AstNode for GenericArgList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for GenericParamList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2211,10 +2752,13 @@ impl AstNode for GenericParamList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for IdentPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2222,10 +2766,13 @@ impl AstNode for IdentPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for IfExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == IF_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2233,10 +2780,13 @@ impl AstNode for IfExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Impl { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2244,10 +2794,13 @@ impl AstNode for Impl { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ImplTraitType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL_TRAIT_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2255,10 +2808,13 @@ impl AstNode for ImplTraitType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for IndexExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == INDEX_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2266,10 +2822,13 @@ impl AstNode for IndexExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for InferType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == INFER_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2277,10 +2836,13 @@ impl AstNode for InferType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ItemList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == ITEM_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2288,10 +2850,13 @@ impl AstNode for ItemList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Label { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LABEL } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2299,10 +2864,13 @@ impl AstNode for Label { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for LetElse { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LET_ELSE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2310,10 +2878,13 @@ impl AstNode for LetElse { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for LetExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2321,10 +2892,13 @@ impl AstNode for LetExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for LetStmt { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LET_STMT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2332,10 +2906,13 @@ impl AstNode for LetStmt { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Lifetime { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2343,10 +2920,13 @@ impl AstNode for Lifetime { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for LifetimeArg { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_ARG } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2354,10 +2934,13 @@ impl AstNode for LifetimeArg { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for LifetimeParam { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_PARAM } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2365,10 +2948,13 @@ impl AstNode for LifetimeParam { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Literal { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2376,10 +2962,13 @@ impl AstNode for Literal { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for LiteralPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2387,10 +2976,13 @@ impl AstNode for LiteralPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for LoopExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == LOOP_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2398,10 +2990,13 @@ impl AstNode for LoopExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroCall { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_CALL } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2409,10 +3004,13 @@ impl AstNode for MacroCall { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroDef { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_DEF } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2420,10 +3018,13 @@ impl AstNode for MacroDef { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroEagerInput { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EAGER_INPUT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2431,10 +3032,13 @@ impl AstNode for MacroEagerInput { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2442,10 +3046,13 @@ impl AstNode for MacroExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroItems { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_ITEMS } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2453,10 +3060,13 @@ impl AstNode for MacroItems { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2464,10 +3074,13 @@ impl AstNode for MacroPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroRules { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_RULES } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2475,10 +3088,13 @@ impl AstNode for MacroRules { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroStmts { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2486,10 +3102,13 @@ impl AstNode for MacroStmts { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MacroType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2497,10 +3116,13 @@ impl AstNode for MacroType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MatchArm { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2508,10 +3130,13 @@ impl AstNode for MatchArm { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MatchArmList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2519,10 +3144,13 @@ impl AstNode for MatchArmList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MatchExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2530,10 +3158,13 @@ impl AstNode for MatchExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MatchGuard { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_GUARD } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2541,10 +3172,13 @@ impl AstNode for MatchGuard { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Meta { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == META } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2552,10 +3186,13 @@ impl AstNode for Meta { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for MethodCallExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == METHOD_CALL_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2563,10 +3200,13 @@ impl AstNode for MethodCallExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Module { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == MODULE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2574,10 +3214,13 @@ impl AstNode for Module { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Name { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == NAME } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2585,10 +3228,13 @@ impl AstNode for Name { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for NameRef { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == NAME_REF } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2596,10 +3242,13 @@ impl AstNode for NameRef { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for NeverType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == NEVER_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2607,10 +3256,13 @@ impl AstNode for NeverType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for OffsetOfExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == OFFSET_OF_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2618,10 +3270,13 @@ impl AstNode for OffsetOfExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for OrPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == OR_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2629,10 +3284,13 @@ impl AstNode for OrPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Param { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2640,10 +3298,13 @@ impl AstNode for Param { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ParamList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2651,10 +3312,13 @@ impl AstNode for ParamList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ParenExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2662,10 +3326,13 @@ impl AstNode for ParenExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ParenPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2673,10 +3340,13 @@ impl AstNode for ParenPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ParenType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2684,10 +3354,13 @@ impl AstNode for ParenType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Path { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PATH } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2695,10 +3368,13 @@ impl AstNode for Path { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for PathExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2706,10 +3382,13 @@ impl AstNode for PathExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for PathPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2717,10 +3396,13 @@ impl AstNode for PathPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for PathSegment { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_SEGMENT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2728,10 +3410,13 @@ impl AstNode for PathSegment { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for PathType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2739,10 +3424,13 @@ impl AstNode for PathType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for PrefixExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PREFIX_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2750,10 +3438,13 @@ impl AstNode for PrefixExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for PtrType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == PTR_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2761,10 +3452,13 @@ impl AstNode for PtrType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RangeExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2772,10 +3466,13 @@ impl AstNode for RangeExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RangePat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2783,10 +3480,13 @@ impl AstNode for RangePat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2794,10 +3494,13 @@ impl AstNode for RecordExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordExprField { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2805,10 +3508,13 @@ impl AstNode for RecordExprField { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordExprFieldList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2816,10 +3522,13 @@ impl AstNode for RecordExprFieldList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordField { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2827,10 +3536,13 @@ impl AstNode for RecordField { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordFieldList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2838,10 +3550,13 @@ impl AstNode for RecordFieldList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2849,10 +3564,13 @@ impl AstNode for RecordPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordPatField { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2860,10 +3578,13 @@ impl AstNode for RecordPatField { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RecordPatFieldList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2871,10 +3592,13 @@ impl AstNode for RecordPatFieldList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RefExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == REF_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2882,10 +3606,13 @@ impl AstNode for RefExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RefPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == REF_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2893,10 +3620,13 @@ impl AstNode for RefPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RefType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == REF_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2904,10 +3634,13 @@ impl AstNode for RefType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Rename { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RENAME } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2915,10 +3648,13 @@ impl AstNode for Rename { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RestPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == REST_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2926,10 +3662,13 @@ impl AstNode for RestPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for RetType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RET_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2937,10 +3676,13 @@ impl AstNode for RetType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for ReturnExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2948,10 +3690,13 @@ impl AstNode for ReturnExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for SelfParam { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2959,10 +3704,13 @@ impl AstNode for SelfParam { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for SlicePat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2970,10 +3718,13 @@ impl AstNode for SlicePat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for SliceType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2981,10 +3732,13 @@ impl AstNode for SliceType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for SourceFile { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == SOURCE_FILE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -2992,10 +3746,13 @@ impl AstNode for SourceFile { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Static { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == STATIC } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3003,10 +3760,13 @@ impl AstNode for Static { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for StmtList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3014,10 +3774,13 @@ impl AstNode for StmtList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Struct { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == STRUCT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3025,10 +3788,13 @@ impl AstNode for Struct { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TokenTree { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TOKEN_TREE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3036,10 +3802,13 @@ impl AstNode for TokenTree { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Trait { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3047,10 +3816,13 @@ impl AstNode for Trait { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TraitAlias { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT_ALIAS } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3058,10 +3830,13 @@ impl AstNode for TraitAlias { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TryExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3069,10 +3844,13 @@ impl AstNode for TryExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TupleExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3080,10 +3858,13 @@ impl AstNode for TupleExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TupleField { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3091,10 +3872,13 @@ impl AstNode for TupleField { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TupleFieldList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3102,10 +3886,13 @@ impl AstNode for TupleFieldList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TuplePat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3113,10 +3900,13 @@ impl AstNode for TuplePat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TupleStructPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_STRUCT_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3124,10 +3914,13 @@ impl AstNode for TupleStructPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TupleType { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_TYPE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3135,10 +3928,13 @@ impl AstNode for TupleType { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TypeAlias { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3146,10 +3942,13 @@ impl AstNode for TypeAlias { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TypeArg { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ARG } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3157,10 +3956,13 @@ impl AstNode for TypeArg { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TypeBound { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3168,10 +3970,13 @@ impl AstNode for TypeBound { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TypeBoundList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3179,10 +3984,13 @@ impl AstNode for TypeBoundList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for TypeParam { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_PARAM } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3190,10 +3998,13 @@ impl AstNode for TypeParam { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for UnderscoreExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == UNDERSCORE_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3201,10 +4012,13 @@ impl AstNode for UnderscoreExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Union { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == UNION } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3212,10 +4026,13 @@ impl AstNode for Union { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Use { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == USE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3223,10 +4040,13 @@ impl AstNode for Use { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for UseTree { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3234,10 +4054,13 @@ impl AstNode for UseTree { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for UseTreeList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3245,10 +4068,13 @@ impl AstNode for UseTreeList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Variant { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3256,10 +4082,13 @@ impl AstNode for Variant { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for VariantList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT_LIST } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3267,10 +4096,13 @@ impl AstNode for VariantList { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for Visibility { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == VISIBILITY } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3278,10 +4110,13 @@ impl AstNode for Visibility { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for WhereClause { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3289,10 +4124,13 @@ impl AstNode for WhereClause { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for WherePred { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_PRED } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3300,10 +4138,13 @@ impl AstNode for WherePred { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for WhileExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == WHILE_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3311,10 +4152,13 @@ impl AstNode for WhileExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for WildcardPat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == WILDCARD_PAT } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3322,10 +4166,13 @@ impl AstNode for WildcardPat { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for YeetExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == YEET_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3333,10 +4180,13 @@ impl AstNode for YeetExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AstNode for YieldExpr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == YIELD_EXPR } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) @@ -3344,19 +4194,25 @@ impl AstNode for YieldExpr { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl From<Enum> for Adt { + #[inline] fn from(node: Enum) -> Adt { Adt::Enum(node) } } impl From<Struct> for Adt { + #[inline] fn from(node: Struct) -> Adt { Adt::Struct(node) } } impl From<Union> for Adt { + #[inline] fn from(node: Union) -> Adt { Adt::Union(node) } } impl AstNode for Adt { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ENUM | STRUCT | UNION) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { ENUM => Adt::Enum(Enum { syntax }), @@ -3366,6 +4222,7 @@ impl AstNode for Adt { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { Adt::Enum(it) => &it.syntax, @@ -3375,19 +4232,25 @@ impl AstNode for Adt { } } impl From<Const> for AssocItem { + #[inline] fn from(node: Const) -> AssocItem { AssocItem::Const(node) } } impl From<Fn> for AssocItem { + #[inline] fn from(node: Fn) -> AssocItem { AssocItem::Fn(node) } } impl From<MacroCall> for AssocItem { + #[inline] fn from(node: MacroCall) -> AssocItem { AssocItem::MacroCall(node) } } impl From<TypeAlias> for AssocItem { + #[inline] fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) } } impl AstNode for AssocItem { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CONST | FN | MACRO_CALL | TYPE_ALIAS) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { CONST => AssocItem::Const(Const { syntax }), @@ -3398,6 +4261,7 @@ impl AstNode for AssocItem { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { AssocItem::Const(it) => &it.syntax, @@ -3408,114 +4272,151 @@ impl AstNode for AssocItem { } } impl From<ArrayExpr> for Expr { + #[inline] fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) } } impl From<AsmExpr> for Expr { + #[inline] fn from(node: AsmExpr) -> Expr { Expr::AsmExpr(node) } } impl From<AwaitExpr> for Expr { + #[inline] fn from(node: AwaitExpr) -> Expr { Expr::AwaitExpr(node) } } impl From<BecomeExpr> for Expr { + #[inline] fn from(node: BecomeExpr) -> Expr { Expr::BecomeExpr(node) } } impl From<BinExpr> for Expr { + #[inline] fn from(node: BinExpr) -> Expr { Expr::BinExpr(node) } } impl From<BlockExpr> for Expr { + #[inline] fn from(node: BlockExpr) -> Expr { Expr::BlockExpr(node) } } impl From<BreakExpr> for Expr { + #[inline] fn from(node: BreakExpr) -> Expr { Expr::BreakExpr(node) } } impl From<CallExpr> for Expr { + #[inline] fn from(node: CallExpr) -> Expr { Expr::CallExpr(node) } } impl From<CastExpr> for Expr { + #[inline] fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) } } impl From<ClosureExpr> for Expr { + #[inline] fn from(node: ClosureExpr) -> Expr { Expr::ClosureExpr(node) } } impl From<ContinueExpr> for Expr { + #[inline] fn from(node: ContinueExpr) -> Expr { Expr::ContinueExpr(node) } } impl From<FieldExpr> for Expr { + #[inline] fn from(node: FieldExpr) -> Expr { Expr::FieldExpr(node) } } impl From<ForExpr> for Expr { + #[inline] fn from(node: ForExpr) -> Expr { Expr::ForExpr(node) } } impl From<FormatArgsExpr> for Expr { + #[inline] fn from(node: FormatArgsExpr) -> Expr { Expr::FormatArgsExpr(node) } } impl From<IfExpr> for Expr { + #[inline] fn from(node: IfExpr) -> Expr { Expr::IfExpr(node) } } impl From<IndexExpr> for Expr { + #[inline] fn from(node: IndexExpr) -> Expr { Expr::IndexExpr(node) } } impl From<LetExpr> for Expr { + #[inline] fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) } } impl From<Literal> for Expr { + #[inline] fn from(node: Literal) -> Expr { Expr::Literal(node) } } impl From<LoopExpr> for Expr { + #[inline] fn from(node: LoopExpr) -> Expr { Expr::LoopExpr(node) } } impl From<MacroExpr> for Expr { + #[inline] fn from(node: MacroExpr) -> Expr { Expr::MacroExpr(node) } } impl From<MatchExpr> for Expr { + #[inline] fn from(node: MatchExpr) -> Expr { Expr::MatchExpr(node) } } impl From<MethodCallExpr> for Expr { + #[inline] fn from(node: MethodCallExpr) -> Expr { Expr::MethodCallExpr(node) } } impl From<OffsetOfExpr> for Expr { + #[inline] fn from(node: OffsetOfExpr) -> Expr { Expr::OffsetOfExpr(node) } } impl From<ParenExpr> for Expr { + #[inline] fn from(node: ParenExpr) -> Expr { Expr::ParenExpr(node) } } impl From<PathExpr> for Expr { + #[inline] fn from(node: PathExpr) -> Expr { Expr::PathExpr(node) } } impl From<PrefixExpr> for Expr { + #[inline] fn from(node: PrefixExpr) -> Expr { Expr::PrefixExpr(node) } } impl From<RangeExpr> for Expr { + #[inline] fn from(node: RangeExpr) -> Expr { Expr::RangeExpr(node) } } impl From<RecordExpr> for Expr { + #[inline] fn from(node: RecordExpr) -> Expr { Expr::RecordExpr(node) } } impl From<RefExpr> for Expr { + #[inline] fn from(node: RefExpr) -> Expr { Expr::RefExpr(node) } } impl From<ReturnExpr> for Expr { + #[inline] fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) } } impl From<TryExpr> for Expr { + #[inline] fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } } impl From<TupleExpr> for Expr { + #[inline] fn from(node: TupleExpr) -> Expr { Expr::TupleExpr(node) } } impl From<UnderscoreExpr> for Expr { + #[inline] fn from(node: UnderscoreExpr) -> Expr { Expr::UnderscoreExpr(node) } } impl From<WhileExpr> for Expr { + #[inline] fn from(node: WhileExpr) -> Expr { Expr::WhileExpr(node) } } impl From<YeetExpr> for Expr { + #[inline] fn from(node: YeetExpr) -> Expr { Expr::YeetExpr(node) } } impl From<YieldExpr> for Expr { + #[inline] fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) } } impl AstNode for Expr { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -3557,6 +4458,7 @@ impl AstNode for Expr { | YIELD_EXPR ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { ARRAY_EXPR => Expr::ArrayExpr(ArrayExpr { syntax }), @@ -3599,6 +4501,7 @@ impl AstNode for Expr { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { Expr::ArrayExpr(it) => &it.syntax, @@ -3641,19 +4544,25 @@ impl AstNode for Expr { } } impl From<Fn> for ExternItem { + #[inline] fn from(node: Fn) -> ExternItem { ExternItem::Fn(node) } } impl From<MacroCall> for ExternItem { + #[inline] fn from(node: MacroCall) -> ExternItem { ExternItem::MacroCall(node) } } impl From<Static> for ExternItem { + #[inline] fn from(node: Static) -> ExternItem { ExternItem::Static(node) } } impl From<TypeAlias> for ExternItem { + #[inline] fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) } } impl AstNode for ExternItem { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FN | MACRO_CALL | STATIC | TYPE_ALIAS) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { FN => ExternItem::Fn(Fn { syntax }), @@ -3664,6 +4573,7 @@ impl AstNode for ExternItem { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { ExternItem::Fn(it) => &it.syntax, @@ -3674,13 +4584,17 @@ impl AstNode for ExternItem { } } impl From<RecordFieldList> for FieldList { + #[inline] fn from(node: RecordFieldList) -> FieldList { FieldList::RecordFieldList(node) } } impl From<TupleFieldList> for FieldList { + #[inline] fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) } } impl AstNode for FieldList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, RECORD_FIELD_LIST | TUPLE_FIELD_LIST) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }), @@ -3689,6 +4603,7 @@ impl AstNode for FieldList { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { FieldList::RecordFieldList(it) => &it.syntax, @@ -3697,21 +4612,27 @@ impl AstNode for FieldList { } } impl From<AssocTypeArg> for GenericArg { + #[inline] fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) } } impl From<ConstArg> for GenericArg { + #[inline] fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) } } impl From<LifetimeArg> for GenericArg { + #[inline] fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) } } impl From<TypeArg> for GenericArg { + #[inline] fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) } } impl AstNode for GenericArg { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ASSOC_TYPE_ARG | CONST_ARG | LIFETIME_ARG | TYPE_ARG) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }), @@ -3722,6 +4643,7 @@ impl AstNode for GenericArg { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { GenericArg::AssocTypeArg(it) => &it.syntax, @@ -3732,18 +4654,23 @@ impl AstNode for GenericArg { } } impl From<ConstParam> for GenericParam { + #[inline] fn from(node: ConstParam) -> GenericParam { GenericParam::ConstParam(node) } } impl From<LifetimeParam> for GenericParam { + #[inline] fn from(node: LifetimeParam) -> GenericParam { GenericParam::LifetimeParam(node) } } impl From<TypeParam> for GenericParam { + #[inline] fn from(node: TypeParam) -> GenericParam { GenericParam::TypeParam(node) } } impl AstNode for GenericParam { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { CONST_PARAM => GenericParam::ConstParam(ConstParam { syntax }), @@ -3753,6 +4680,7 @@ impl AstNode for GenericParam { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { GenericParam::ConstParam(it) => &it.syntax, @@ -3762,57 +4690,75 @@ impl AstNode for GenericParam { } } impl From<Const> for Item { + #[inline] fn from(node: Const) -> Item { Item::Const(node) } } impl From<Enum> for Item { + #[inline] fn from(node: Enum) -> Item { Item::Enum(node) } } impl From<ExternBlock> for Item { + #[inline] fn from(node: ExternBlock) -> Item { Item::ExternBlock(node) } } impl From<ExternCrate> for Item { + #[inline] fn from(node: ExternCrate) -> Item { Item::ExternCrate(node) } } impl From<Fn> for Item { + #[inline] fn from(node: Fn) -> Item { Item::Fn(node) } } impl From<Impl> for Item { + #[inline] fn from(node: Impl) -> Item { Item::Impl(node) } } impl From<MacroCall> for Item { + #[inline] fn from(node: MacroCall) -> Item { Item::MacroCall(node) } } impl From<MacroDef> for Item { + #[inline] fn from(node: MacroDef) -> Item { Item::MacroDef(node) } } impl From<MacroRules> for Item { + #[inline] fn from(node: MacroRules) -> Item { Item::MacroRules(node) } } impl From<Module> for Item { + #[inline] fn from(node: Module) -> Item { Item::Module(node) } } impl From<Static> for Item { + #[inline] fn from(node: Static) -> Item { Item::Static(node) } } impl From<Struct> for Item { + #[inline] fn from(node: Struct) -> Item { Item::Struct(node) } } impl From<Trait> for Item { + #[inline] fn from(node: Trait) -> Item { Item::Trait(node) } } impl From<TraitAlias> for Item { + #[inline] fn from(node: TraitAlias) -> Item { Item::TraitAlias(node) } } impl From<TypeAlias> for Item { + #[inline] fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) } } impl From<Union> for Item { + #[inline] fn from(node: Union) -> Item { Item::Union(node) } } impl From<Use> for Item { + #[inline] fn from(node: Use) -> Item { Item::Use(node) } } impl AstNode for Item { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -3835,6 +4781,7 @@ impl AstNode for Item { | USE ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { CONST => Item::Const(Const { syntax }), @@ -3858,6 +4805,7 @@ impl AstNode for Item { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { Item::Const(it) => &it.syntax, @@ -3881,54 +4829,71 @@ impl AstNode for Item { } } impl From<BoxPat> for Pat { + #[inline] fn from(node: BoxPat) -> Pat { Pat::BoxPat(node) } } impl From<ConstBlockPat> for Pat { + #[inline] fn from(node: ConstBlockPat) -> Pat { Pat::ConstBlockPat(node) } } impl From<IdentPat> for Pat { + #[inline] fn from(node: IdentPat) -> Pat { Pat::IdentPat(node) } } impl From<LiteralPat> for Pat { + #[inline] fn from(node: LiteralPat) -> Pat { Pat::LiteralPat(node) } } impl From<MacroPat> for Pat { + #[inline] fn from(node: MacroPat) -> Pat { Pat::MacroPat(node) } } impl From<OrPat> for Pat { + #[inline] fn from(node: OrPat) -> Pat { Pat::OrPat(node) } } impl From<ParenPat> for Pat { + #[inline] fn from(node: ParenPat) -> Pat { Pat::ParenPat(node) } } impl From<PathPat> for Pat { + #[inline] fn from(node: PathPat) -> Pat { Pat::PathPat(node) } } impl From<RangePat> for Pat { + #[inline] fn from(node: RangePat) -> Pat { Pat::RangePat(node) } } impl From<RecordPat> for Pat { + #[inline] fn from(node: RecordPat) -> Pat { Pat::RecordPat(node) } } impl From<RefPat> for Pat { + #[inline] fn from(node: RefPat) -> Pat { Pat::RefPat(node) } } impl From<RestPat> for Pat { + #[inline] fn from(node: RestPat) -> Pat { Pat::RestPat(node) } } impl From<SlicePat> for Pat { + #[inline] fn from(node: SlicePat) -> Pat { Pat::SlicePat(node) } } impl From<TuplePat> for Pat { + #[inline] fn from(node: TuplePat) -> Pat { Pat::TuplePat(node) } } impl From<TupleStructPat> for Pat { + #[inline] fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) } } impl From<WildcardPat> for Pat { + #[inline] fn from(node: WildcardPat) -> Pat { Pat::WildcardPat(node) } } impl AstNode for Pat { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -3950,6 +4915,7 @@ impl AstNode for Pat { | WILDCARD_PAT ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { BOX_PAT => Pat::BoxPat(BoxPat { syntax }), @@ -3972,6 +4938,7 @@ impl AstNode for Pat { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { Pat::BoxPat(it) => &it.syntax, @@ -3994,57 +4961,75 @@ impl AstNode for Pat { } } impl From<ExprStmt> for Stmt { + #[inline] fn from(node: ExprStmt) -> Stmt { Stmt::ExprStmt(node) } } impl From<Item> for Stmt { + #[inline] fn from(node: Item) -> Stmt { Stmt::Item(node) } } impl From<LetStmt> for Stmt { + #[inline] fn from(node: LetStmt) -> Stmt { Stmt::LetStmt(node) } } impl From<ArrayType> for Type { + #[inline] fn from(node: ArrayType) -> Type { Type::ArrayType(node) } } impl From<DynTraitType> for Type { + #[inline] fn from(node: DynTraitType) -> Type { Type::DynTraitType(node) } } impl From<FnPtrType> for Type { + #[inline] fn from(node: FnPtrType) -> Type { Type::FnPtrType(node) } } impl From<ForType> for Type { + #[inline] fn from(node: ForType) -> Type { Type::ForType(node) } } impl From<ImplTraitType> for Type { + #[inline] fn from(node: ImplTraitType) -> Type { Type::ImplTraitType(node) } } impl From<InferType> for Type { + #[inline] fn from(node: InferType) -> Type { Type::InferType(node) } } impl From<MacroType> for Type { + #[inline] fn from(node: MacroType) -> Type { Type::MacroType(node) } } impl From<NeverType> for Type { + #[inline] fn from(node: NeverType) -> Type { Type::NeverType(node) } } impl From<ParenType> for Type { + #[inline] fn from(node: ParenType) -> Type { Type::ParenType(node) } } impl From<PathType> for Type { + #[inline] fn from(node: PathType) -> Type { Type::PathType(node) } } impl From<PtrType> for Type { + #[inline] fn from(node: PtrType) -> Type { Type::PtrType(node) } } impl From<RefType> for Type { + #[inline] fn from(node: RefType) -> Type { Type::RefType(node) } } impl From<SliceType> for Type { + #[inline] fn from(node: SliceType) -> Type { Type::SliceType(node) } } impl From<TupleType> for Type { + #[inline] fn from(node: TupleType) -> Type { Type::TupleType(node) } } impl AstNode for Type { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -4064,6 +5049,7 @@ impl AstNode for Type { | TUPLE_TYPE ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { ARRAY_TYPE => Type::ArrayType(ArrayType { syntax }), @@ -4084,6 +5070,7 @@ impl AstNode for Type { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { Type::ArrayType(it) => &it.syntax, @@ -4110,10 +5097,13 @@ impl AnyHasArgList { } } impl AstNode for AnyHasArgList { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasArgList { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasAttrs { @@ -4123,6 +5113,7 @@ impl AnyHasAttrs { } } impl AstNode for AnyHasAttrs { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -4200,9 +5191,11 @@ impl AstNode for AnyHasAttrs { | YIELD_EXPR ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasAttrs { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasDocComments { @@ -4212,6 +5205,7 @@ impl AnyHasDocComments { } } impl AstNode for AnyHasDocComments { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -4238,9 +5232,29 @@ impl AstNode for AnyHasDocComments { | VARIANT ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasDocComments { syntax }) } + #[inline] + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} +impl AnyHasGenericArgs { + #[inline] + pub fn new<T: ast::HasGenericArgs>(node: T) -> AnyHasGenericArgs { + AnyHasGenericArgs { syntax: node.syntax().clone() } + } +} +impl AstNode for AnyHasGenericArgs { + #[inline] + fn can_cast(kind: SyntaxKind) -> bool { + matches!(kind, ASSOC_TYPE_ARG | METHOD_CALL_EXPR | PATH_SEGMENT) + } + #[inline] + fn cast(syntax: SyntaxNode) -> Option<Self> { + Self::can_cast(syntax.kind()).then_some(AnyHasGenericArgs { syntax }) + } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasGenericParams { @@ -4250,12 +5264,15 @@ impl AnyHasGenericParams { } } impl AstNode for AnyHasGenericParams { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TRAIT_ALIAS | TYPE_ALIAS | UNION) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasGenericParams { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasLoopBody { @@ -4265,10 +5282,13 @@ impl AnyHasLoopBody { } } impl AstNode for AnyHasLoopBody { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasLoopBody { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasModuleItem { @@ -4278,10 +5298,13 @@ impl AnyHasModuleItem { } } impl AstNode for AnyHasModuleItem { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ITEM_LIST | MACRO_ITEMS | SOURCE_FILE) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasModuleItem { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasName { @@ -4291,6 +5314,7 @@ impl AnyHasName { } } impl AstNode for AnyHasName { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -4316,9 +5340,11 @@ impl AstNode for AnyHasName { | VARIANT ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasName { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasTypeBounds { @@ -4328,15 +5354,18 @@ impl AnyHasTypeBounds { } } impl AstNode for AnyHasTypeBounds { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, ASSOC_TYPE_ARG | LIFETIME_PARAM | TRAIT | TYPE_ALIAS | TYPE_PARAM | WHERE_PRED ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasTypeBounds { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl AnyHasVisibility { @@ -4346,6 +5375,7 @@ impl AnyHasVisibility { } } impl AstNode for AnyHasVisibility { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, @@ -4369,9 +5399,11 @@ impl AstNode for AnyHasVisibility { | VARIANT ) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(AnyHasVisibility { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } impl std::fmt::Display for Adt { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs index 651a8ebbf79..85d20c2bd8c 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_ast`, do not edit by hand. +//! Generated by `cargo codegen grammar`, do not edit by hand. use crate::{ ast::AstToken, diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index b0fbe7101c1..911e3d823de 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -10,7 +10,10 @@ use parser::SyntaxKind; use rowan::{GreenNodeData, GreenTokenData}; use crate::{ - ast::{self, support, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, SyntaxNode}, + ast::{ + self, support, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName, + SyntaxNode, + }, ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, }; diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs index 9131cd2f179..28089ffb377 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs @@ -27,6 +27,14 @@ impl Expr { } fn needs_parens_in_expr(&self, parent: &Expr) -> bool { + // Parentheses are necessary when calling a function-like pointer that is a member of a struct or union + // (e.g. `(a.f)()`). + let is_parent_call_expr = matches!(parent, ast::Expr::CallExpr(_)); + let is_field_expr = matches!(self, ast::Expr::FieldExpr(_)); + if is_parent_call_expr && is_field_expr { + return true; + } + // Special-case block weirdness if parent.child_is_followed_by_a_block() { use Expr::*; diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs index 1ce548f8fc7..df017ddde64 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs @@ -1,9 +1,6 @@ //! There are many AstNodes, but only a few tokens, so we hand-write them here. -use std::{ - borrow::Cow, - num::{ParseFloatError, ParseIntError}, -}; +use std::{borrow::Cow, num::ParseIntError}; use rustc_lexer::unescape::{ unescape_byte, unescape_char, unescape_mixed, unescape_unicode, EscapeError, MixedUnit, Mode, @@ -393,9 +390,9 @@ impl ast::IntNumber { } } - pub fn float_value(&self) -> Option<f64> { + pub fn value_string(&self) -> String { let (_, text, _) = self.split_into_parts(); - text.replace('_', "").parse::<f64>().ok() + text.replace('_', "") } } @@ -432,14 +429,9 @@ impl ast::FloatNumber { } } - pub fn value(&self) -> Result<f64, ParseFloatError> { - let (text, _) = self.split_into_parts(); - text.replace('_', "").parse::<f64>() - } - - pub fn value_f32(&self) -> Result<f32, ParseFloatError> { + pub fn value_string(&self) -> String { let (text, _) = self.split_into_parts(); - text.replace('_', "").parse::<f32>() + text.replace('_', "") } } @@ -497,6 +489,8 @@ impl ast::Byte { #[cfg(test)] mod tests { + use rustc_apfloat::ieee::Quad as f128; + use crate::ast::{self, make, FloatNumber, IntNumber}; fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) { @@ -507,12 +501,17 @@ mod tests { assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into()); } - fn check_float_value(lit: &str, expected: impl Into<Option<f64>> + Copy) { + // FIXME(#17451) Use `expected: f128` once `f128` is stabilised. + fn check_float_value(lit: &str, expected: &str) { + let expected = Some(expected.parse::<f128>().unwrap()); assert_eq!( - FloatNumber { syntax: make::tokens::literal(lit) }.value().ok(), - expected.into() + FloatNumber { syntax: make::tokens::literal(lit) }.value_string().parse::<f128>().ok(), + expected + ); + assert_eq!( + IntNumber { syntax: make::tokens::literal(lit) }.value_string().parse::<f128>().ok(), + expected ); - assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.float_value(), expected.into()); } fn check_int_value(lit: &str, expected: impl Into<Option<u128>>) { @@ -525,9 +524,9 @@ mod tests { check_float_suffix("123f32", "f32"); check_float_suffix("123.0e", None); check_float_suffix("123.0e4", None); - check_float_suffix("123.0ef32", "f32"); + check_float_suffix("123.0ef16", "f16"); check_float_suffix("123.0E4f32", "f32"); - check_float_suffix("1_2_3.0_f32", "f32"); + check_float_suffix("1_2_3.0_f128", "f128"); } #[test] @@ -594,8 +593,10 @@ bcde", b"abcde", #[test] fn test_value_underscores() { - check_float_value("1.234567891011121_f64", 1.234567891011121_f64); - check_float_value("1__0.__0__f32", 10.0); + check_float_value("1.3_4665449586950493453___6_f128", "1.346654495869504934536"); + check_float_value("1.234567891011121_f64", "1.234567891011121"); + check_float_value("1__0.__0__f32", "10.0"); + check_float_value("3._0_f16", "3.0"); check_int_value("0b__1_0_", 2); check_int_value("1_1_1_1_1_1", 111111); } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs index 16f7356b1e3..152b0cb98c2 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs @@ -52,6 +52,11 @@ pub trait HasGenericParams: AstNode { support::child(self.syntax()) } } +pub trait HasGenericArgs: AstNode { + fn generic_arg_list(&self) -> Option<ast::GenericArgList> { + support::child(self.syntax()) + } +} pub trait HasTypeBounds: AstNode { fn type_bound_list(&self) -> Option<ast::TypeBoundList> { diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs index b5d816b0ce1..177f48b986a 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs @@ -20,7 +20,6 @@ //! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md> #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] -#![warn(rust_2018_idioms, unused_lifetimes)] #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_lexer as rustc_lexer; diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs index 43f62d0d1e0..088817b8357 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs @@ -6,7 +6,6 @@ //! * Extracting markup (mainly, `$0` markers) out of fixture strings. //! * marks (see the eponymous module). -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(clippy::print_stderr)] mod assert_linear; @@ -305,7 +304,7 @@ fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> { } let range = TextRange::at(offset, len.try_into().unwrap()); let line_no_caret = &line[len..]; - let end_marker = line_no_caret.find(|c| c == '$'); + let end_marker = line_no_caret.find('$'); let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len); let cond = |end_marker| { diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 0257ed9ab41..d1862f7d738 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -123,7 +123,7 @@ pub mod marker { impl_copy! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 - f32 f64 + f16 f32 f64 f128 bool char } @@ -180,7 +180,7 @@ pub mod default { 0; usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } impl_default! { - 0.0; f32 f64 + 0.0; f16 f32 f64 f128 } // endregion:builtin_impls } @@ -276,7 +276,7 @@ pub mod clone { impl_clone! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 - f32 f64 + f16 f32 f64 f128 bool char } @@ -796,7 +796,7 @@ pub mod ops { )*) } - add_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } + add_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f16 f32 f64 f128 } // endregion:builtin_impls // endregion:add @@ -1043,7 +1043,7 @@ pub mod fmt { impl_debug! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 - f32 f64 + f16 f32 f64 f128 bool char } diff --git a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs index e2ff373c1ba..3efe0850d88 100644 --- a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs +++ b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs @@ -4,8 +4,6 @@ //! so `TextEdit` is the ultimate representation of the work done by //! rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes)] - use itertools::Itertools; use std::cmp::max; pub use text_size::{TextRange, TextSize}; diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs index 2591ed16916..a0603e35a09 100644 --- a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs +++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs @@ -1,7 +1,5 @@ //! Discovery of `cargo` & `rustc` executables. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{env, iter, path::PathBuf}; use camino::{Utf8Path, Utf8PathBuf}; diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml index c96f088cdc5..1311e2ddf89 100644 --- a/src/tools/rust-analyzer/crates/tt/Cargo.toml +++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml @@ -12,6 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] +arrayvec.workspace = true smol_str.workspace = true text-size.workspace = true diff --git a/src/tools/rust-analyzer/crates/tt/src/iter.rs b/src/tools/rust-analyzer/crates/tt/src/iter.rs new file mode 100644 index 00000000000..175259a3e47 --- /dev/null +++ b/src/tools/rust-analyzer/crates/tt/src/iter.rs @@ -0,0 +1,161 @@ +//! A "Parser" structure for token trees. We use this when parsing a declarative +//! macro definition into a list of patterns and templates. + +use arrayvec::ArrayVec; + +use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree}; + +#[derive(Debug, Clone)] +pub struct TtIter<'a, S> { + inner: std::slice::Iter<'a, TokenTree<S>>, +} + +impl<'a, S: Copy> TtIter<'a, S> { + pub fn new(subtree: &'a Subtree<S>) -> TtIter<'a, S> { + TtIter { inner: subtree.token_trees.iter() } + } + + pub fn new_iter(iter: std::slice::Iter<'a, TokenTree<S>>) -> TtIter<'a, S> { + TtIter { inner: iter } + } + + pub fn expect_char(&mut self, char: char) -> Result<(), ()> { + match self.next() { + Some(&TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if c == char => Ok(()), + _ => Err(()), + } + } + + pub fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> { + match self.next() { + Some(TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if chars.contains(c) => { + Ok(()) + } + _ => Err(()), + } + } + + pub fn expect_subtree(&mut self) -> Result<&'a Subtree<S>, ()> { + match self.next() { + Some(TokenTree::Subtree(it)) => Ok(it), + _ => Err(()), + } + } + + pub fn expect_leaf(&mut self) -> Result<&'a Leaf<S>, ()> { + match self.next() { + Some(TokenTree::Leaf(it)) => Ok(it), + _ => Err(()), + } + } + + pub fn expect_dollar(&mut self) -> Result<(), ()> { + match self.expect_leaf()? { + Leaf::Punct(Punct { char: '$', .. }) => Ok(()), + _ => Err(()), + } + } + + pub fn expect_ident(&mut self) -> Result<&'a Ident<S>, ()> { + match self.expect_leaf()? { + Leaf::Ident(it) if it.text != "_" => Ok(it), + _ => Err(()), + } + } + + pub fn expect_ident_or_underscore(&mut self) -> Result<&'a Ident<S>, ()> { + match self.expect_leaf()? { + Leaf::Ident(it) => Ok(it), + _ => Err(()), + } + } + + pub fn expect_literal(&mut self) -> Result<&'a Leaf<S>, ()> { + let it = self.expect_leaf()?; + match it { + Leaf::Literal(_) => Ok(it), + Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), + _ => Err(()), + } + } + + pub fn expect_single_punct(&mut self) -> Result<&'a Punct<S>, ()> { + match self.expect_leaf()? { + Leaf::Punct(it) => Ok(it), + _ => Err(()), + } + } + + /// Returns consecutive `Punct`s that can be glued together. + /// + /// This method currently may return a single quotation, which is part of lifetime ident and + /// conceptually not a punct in the context of mbe. Callers should handle this. + pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, 3>, ()> { + let TokenTree::Leaf(Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { + return Err(()); + }; + + let mut res = ArrayVec::new(); + if first.spacing == Spacing::Alone { + res.push(first); + return Ok(res); + } + + let (second, third) = match (self.peek_n(0), self.peek_n(1)) { + (Some(TokenTree::Leaf(Leaf::Punct(p2))), Some(TokenTree::Leaf(Leaf::Punct(p3)))) + if p2.spacing == Spacing::Joint => + { + (p2, Some(p3)) + } + (Some(TokenTree::Leaf(Leaf::Punct(p2))), _) => (p2, None), + _ => { + res.push(first); + return Ok(res); + } + }; + + match (first.char, second.char, third.map(|it| it.char)) { + ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { + let _ = self.next().unwrap(); + let _ = self.next().unwrap(); + res.push(first); + res.push(*second); + res.push(*third.unwrap()); + } + ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) + | ('-' | '=' | '>', '>', _) + | ('<', '-', _) + | (':', ':', _) + | ('.', '.', _) + | ('&', '&', _) + | ('<', '<', _) + | ('|', '|', _) => { + let _ = self.next().unwrap(); + res.push(first); + res.push(*second); + } + _ => res.push(first), + } + Ok(res) + } + pub fn peek_n(&self, n: usize) -> Option<&'a TokenTree<S>> { + self.inner.as_slice().get(n) + } + + pub fn as_slice(&self) -> &'a [TokenTree<S>] { + self.inner.as_slice() + } +} + +impl<'a, S> Iterator for TtIter<'a, S> { + type Item = &'a TokenTree<S>; + fn next(&mut self) -> Option<Self::Item> { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option<usize>) { + self.inner.size_hint() + } +} + +impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {} diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs index e9de3f97b0e..369744d0e96 100644 --- a/src/tools/rust-analyzer/crates/tt/src/lib.rs +++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs @@ -2,7 +2,8 @@ //! input and output) of macros. It closely mirrors `proc_macro` crate's //! `TokenTree`. -#![warn(rust_2018_idioms, unused_lifetimes)] +pub mod buffer; +pub mod iter; use std::fmt; @@ -365,8 +366,6 @@ impl<S> Subtree<S> { } } -pub mod buffer; - pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String { fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String { match tkn { diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs index 1dbccab370c..7e0f9af7af8 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs @@ -7,8 +7,6 @@ //! Hopefully, one day a reliable file watching/walking crate appears on //! crates.io, and we can reduce this to trivial glue code. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{ fs, path::{Component, Path}, diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs index 18c8699dd4d..b3aa6e2fe11 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs @@ -38,8 +38,6 @@ //! [`Handle`]: loader::Handle //! [`Entries`]: loader::Entry -#![warn(rust_2018_idioms, unused_lifetimes)] - mod anchored_path; pub mod file_set; pub mod loader; diff --git a/src/tools/rust-analyzer/docs/dev/architecture.md b/src/tools/rust-analyzer/docs/dev/architecture.md index 4303a800a04..f4e7263868c 100644 --- a/src/tools/rust-analyzer/docs/dev/architecture.md +++ b/src/tools/rust-analyzer/docs/dev/architecture.md @@ -368,7 +368,7 @@ In particular, we generate: * Documentation tests for assists -See the `sourcegen` crate for details. +See the `xtask\src\codegen\assists_doc_tests.rs` module for details. **Architecture Invariant:** we avoid bootstrapping. For codegen we need to parse Rust code. diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index 695fec7e8e0..74acb6f9940 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ <!--- -lsp/ext.rs hash: 8e6e340f2899b5e9 +lsp/ext.rs hash: 39b47906286ad9c If you need to change the above hash to make the test pass, please check if you need to adjust this doc as well and ping this issue: @@ -376,12 +376,29 @@ rust-analyzer supports two `kind`s of runnables, `"cargo"` and `"shell"`. The `a ```typescript { + /** + * Environment variables to set before running the command. + */ + environment?: Record<string, string>; + /** + * The working directory to run the command in. + */ + cwd: string; + /** + * The workspace root directory of the cargo project. + */ workspaceRoot?: string; - cwd?: string; + /** + * The cargo command to run. + */ cargoArgs: string[]; - cargoExtraArgs: string[]; + /** + * Arguments to pass to the executable, these will be passed to the command after a `--` argument. + */ executableArgs: string[]; - expectTest?: boolean; + /** + * Command to execute instead of `cargo`. + */ overrideCargo?: string; } ``` @@ -390,10 +407,17 @@ The args for `"shell"` look like this: ```typescript { + /** + * Environment variables to set before running the command. + */ + environment?: Record<string, string>; + /** + * The working directory to run the command in. + */ + cwd: string; kind: string; program: string; args: string[]; - cwd: string; } ``` diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index 14aae91741e..edb95abdb8e 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -9,10 +9,15 @@ for enum variants. -- Placeholder expression to use for missing expressions in assists. -- -[[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `400`):: +[[rust-analyzer.assist.termSearch.borrowcheck]]rust-analyzer.assist.termSearch.borrowcheck (default: `true`):: + -- -Term search fuel in "units of work" for assists (Defaults to 400). +Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. +-- +[[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `1800`):: ++ +-- +Term search fuel in "units of work" for assists (Defaults to 1800). -- [[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`):: + @@ -350,12 +355,6 @@ Default: "description": "Put the expression into a pinned `Box`", "scope": "expr" }, - "Ok": { - "postfix": "ok", - "body": "Ok(${receiver})", - "description": "Wrap the expression in a `Result::Ok`", - "scope": "expr" - }, "Err": { "postfix": "err", "body": "Err(${receiver})", @@ -367,6 +366,12 @@ Default: "body": "Some(${receiver})", "description": "Wrap the expression in an `Option::Some`", "scope": "expr" + }, + "Ok": { + "postfix": "ok", + "body": "Ok(${receiver})", + "description": "Wrap the expression in a `Result::Ok`", + "scope": "expr" } } ---- @@ -378,10 +383,10 @@ Custom completion snippets. -- Whether to enable term search based snippets like `Some(foo.bar().baz())`. -- -[[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `200`):: +[[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `1000`):: + -- -Term search fuel in "units of work" for autocompletion (Defaults to 200). +Term search fuel in "units of work" for autocompletion (Defaults to 1000). -- [[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`):: + @@ -589,6 +594,11 @@ Whether to prefer import paths containing a `prelude` module. -- The path structure for newly inserted paths to use. -- +[[rust-analyzer.imports.prefixExternPrelude]]rust-analyzer.imports.prefixExternPrelude (default: `false`):: ++ +-- +Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". +-- [[rust-analyzer.inlayHints.bindingModeHints.enable]]rust-analyzer.inlayHints.bindingModeHints.enable (default: `false`):: + -- @@ -645,6 +655,21 @@ Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. -- Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). -- +[[rust-analyzer.inlayHints.genericParameterHints.const.enable]]rust-analyzer.inlayHints.genericParameterHints.const.enable (default: `false`):: ++ +-- +Whether to show const generic parameter name inlay hints. +-- +[[rust-analyzer.inlayHints.genericParameterHints.lifetime.enable]]rust-analyzer.inlayHints.genericParameterHints.lifetime.enable (default: `true`):: ++ +-- +Whether to show generic lifetime parameter name inlay hints. +-- +[[rust-analyzer.inlayHints.genericParameterHints.type.enable]]rust-analyzer.inlayHints.genericParameterHints.type.enable (default: `false`):: ++ +-- +Whether to show generic type parameter name inlay hints. +-- [[rust-analyzer.inlayHints.implicitDrops.enable]]rust-analyzer.inlayHints.implicitDrops.enable (default: `false`):: + -- diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index db2a989106f..7e77c7e52fa 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -305,6 +305,16 @@ "command": "rust-analyzer.toggleLSPLogs", "title": "Toggle LSP Logs", "category": "rust-analyzer" + }, + { + "command": "rust-analyzer.openWalkthrough", + "title": "Open Walkthrough", + "category": "rust-analyzer" + }, + { + "command": "rust-analyzer.openFAQ", + "title": "Open FAQ", + "category": "rust-analyzer" } ], "keybindings": [ @@ -323,14 +333,6 @@ { "title": "general", "properties": { - "rust-analyzer.cargoRunner": { - "type": [ - "null", - "string" - ], - "default": null, - "description": "Custom cargo runner extension ID." - }, "rust-analyzer.restartServerOnConfigChange": { "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.", "default": false, @@ -591,9 +593,19 @@ { "title": "assist", "properties": { + "rust-analyzer.assist.termSearch.borrowcheck": { + "markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.", + "default": true, + "type": "boolean" + } + } + }, + { + "title": "assist", + "properties": { "rust-analyzer.assist.termSearch.fuel": { - "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 400).", - "default": 400, + "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 1800).", + "default": 1800, "type": "integer", "minimum": 0 } @@ -1187,12 +1199,6 @@ "description": "Put the expression into a pinned `Box`", "scope": "expr" }, - "Ok": { - "postfix": "ok", - "body": "Ok(${receiver})", - "description": "Wrap the expression in a `Result::Ok`", - "scope": "expr" - }, "Err": { "postfix": "err", "body": "Err(${receiver})", @@ -1204,6 +1210,12 @@ "body": "Some(${receiver})", "description": "Wrap the expression in an `Option::Some`", "scope": "expr" + }, + "Ok": { + "postfix": "ok", + "body": "Ok(${receiver})", + "description": "Wrap the expression in a `Result::Ok`", + "scope": "expr" } }, "type": "object" @@ -1224,8 +1236,8 @@ "title": "completion", "properties": { "rust-analyzer.completion.termSearch.fuel": { - "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 200).", - "default": 200, + "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 1000).", + "default": 1000, "type": "integer", "minimum": 0 } @@ -1723,6 +1735,16 @@ } }, { + "title": "imports", + "properties": { + "rust-analyzer.imports.prefixExternPrelude": { + "markdownDescription": "Whether to prefix external (including std, core) crate imports with `::`. e.g. \"use ::std::io::Read;\".", + "default": false, + "type": "boolean" + } + } + }, + { "title": "inlayHints", "properties": { "rust-analyzer.inlayHints.bindingModeHints.enable": { @@ -1890,6 +1912,36 @@ { "title": "inlayHints", "properties": { + "rust-analyzer.inlayHints.genericParameterHints.const.enable": { + "markdownDescription": "Whether to show const generic parameter name inlay hints.", + "default": false, + "type": "boolean" + } + } + }, + { + "title": "inlayHints", + "properties": { + "rust-analyzer.inlayHints.genericParameterHints.lifetime.enable": { + "markdownDescription": "Whether to show generic lifetime parameter name inlay hints.", + "default": true, + "type": "boolean" + } + } + }, + { + "title": "inlayHints", + "properties": { + "rust-analyzer.inlayHints.genericParameterHints.type.enable": { + "markdownDescription": "Whether to show generic type parameter name inlay hints.", + "default": false, + "type": "boolean" + } + } + }, + { + "title": "inlayHints", + "properties": { "rust-analyzer.inlayHints.implicitDrops.enable": { "markdownDescription": "Whether to show implicit drop hints.", "default": false, @@ -3132,6 +3184,12 @@ { "command": "rust-analyzer.toggleLSPLogs", "when": "inRustProject" + }, + { + "command": "rust-analyzer.openWalkthrough" + }, + { + "command": "rust-analyzer.openFAQ" } ], "editor/context": [ @@ -3161,6 +3219,57 @@ "fileMatch": "rust-project.json", "url": "https://json.schemastore.org/rust-project.json" } + ], + "walkthroughs": [ + { + "id": "landing", + "title": "Learn about rust-analyzer", + "description": "A brief introduction to get started with rust-analyzer. Learn about key features and resources to help you get the most out of the extension.", + "steps": [ + { + "id": "setup", + "title": "Useful Setup Tips", + "description": "There are a couple of things you might want to configure upfront to your tastes. We'll name a few here but be sure to check out the docs linked below!\n\n**Marking library sources as readonly**\n\nAdding the following to your settings.json will mark all Rust library sources as readonly:\n```json\n\"files.readonlyInclude\": {\n \"**/.cargo/registry/src/**/*.rs\": true,\n \"**/lib/rustlib/src/rust/library/**/*.rs\": true,\n},\n```\n\n**Check on Save**\n\nBy default, rust-analyzer will run `cargo check` on your codebase when you save a file, rendering diagnostics emitted by `cargo check` within your code. This can potentially collide with other `cargo` commands running concurrently, blocking them from running for a certain amount of time. In these cases it is recommended to disable the `rust-analyzer.checkOnSave` configuration and running the `rust-analyzer: Run flycheck` command on-demand instead.", + "media": { + "image": "./icon.png", + "altText": "rust-analyzer logo" + } + }, + { + "id": "docs", + "title": "Visit the docs!", + "description": "Confused about configurations? Want to learn more about rust-analyzer? Visit the [User Manual](https://rust-analyzer.github.io/manual.html)!", + "media": { + "image": "./icon.png", + "altText": "rust-analyzer logo" + }, + "completionEvents": [ + "onLink:https://rust-analyzer.github.io/manual.html" + ] + }, + { + "id": "faq", + "title": "FAQ", + "description": "What are these code hints that are being inserted into my code?\n\nThese hints are called inlay hints which rust-analyzer support and are enabled by default in VSCode. If you wish to disable them you can do so via the `editor.inlayHints.enabled` setting.", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + } + }, + { + "id": "changelog", + "title": "Changelog", + "description": "Stay up-to-date with the latest changes in rust-analyzer. Check out the changelog [here](https://rust-analyzer.github.io/thisweek)!", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + }, + "completionEvents": [ + "onLink:https://rust-analyzer.github.io/thisweek" + ] + } + ] + } ] } } diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts index 5a92b285ae6..f2884ad0b05 100644 --- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts +++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts @@ -36,6 +36,12 @@ async function getServer( config: Config, state: PersistentState, ): Promise<string | undefined> { + const packageJson: { + version: string; + releaseTag: string | null; + enableProposedApi: boolean | undefined; + } = context.extension.packageJSON; + const explicitPath = process.env["__RA_LSP_SERVER_DEBUG"] ?? config.serverPath; if (explicitPath) { if (explicitPath.startsWith("~/")) { @@ -43,7 +49,7 @@ async function getServer( } return explicitPath; } - if (config.package.releaseTag === null) return "rust-analyzer"; + if (packageJson.releaseTag === null) return "rust-analyzer"; const ext = process.platform === "win32" ? ".exe" : ""; const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`); @@ -54,8 +60,15 @@ async function getServer( if (bundledExists) { let server = bundled; if (await isNixOs()) { - server = await getNixOsServer(config, ext, state, bundled, server); - await state.updateServerVersion(config.package.version); + server = await getNixOsServer( + context.globalStorageUri, + packageJson.version, + ext, + state, + bundled, + server, + ); + await state.updateServerVersion(packageJson.version); } return server.fsPath; } @@ -86,19 +99,20 @@ export function isValidExecutable(path: string, extraEnv: Env): boolean { } async function getNixOsServer( - config: Config, + globalStorageUri: vscode.Uri, + version: string, ext: string, state: PersistentState, bundled: vscode.Uri, server: vscode.Uri, ) { - await vscode.workspace.fs.createDirectory(config.globalStorageUri).then(); - const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`); + await vscode.workspace.fs.createDirectory(globalStorageUri).then(); + const dest = vscode.Uri.joinPath(globalStorageUri, `rust-analyzer${ext}`); let exists = await vscode.workspace.fs.stat(dest).then( () => true, () => false, ); - if (exists && config.package.version !== state.serverVersion) { + if (exists && version !== state.serverVersion) { await vscode.workspace.fs.delete(dest); exists = false; } diff --git a/src/tools/rust-analyzer/editors/code/src/client.ts b/src/tools/rust-analyzer/editors/code/src/client.ts index 1c2a34b484d..542233e7b91 100644 --- a/src/tools/rust-analyzer/editors/code/src/client.ts +++ b/src/tools/rust-analyzer/editors/code/src/client.ts @@ -76,7 +76,8 @@ export async function createClient( // value === "unlinked-file" && value === "temporary-disabled" && !unlinkedFiles.includes(uri) && - diag.message !== "file not included in module tree" + (diag.message === "file not included in crate hierarchy" || + diag.message.startsWith("This file is not included in any crates")) ) { const config = vscode.workspace.getConfiguration("rust-analyzer"); if (config.get("showUnlinkedFileNotification")) { diff --git a/src/tools/rust-analyzer/editors/code/src/commands.ts b/src/tools/rust-analyzer/editors/code/src/commands.ts index f0f9fab1c64..2b0b3001062 100644 --- a/src/tools/rust-analyzer/editors/code/src/commands.ts +++ b/src/tools/rust-analyzer/editors/code/src/commands.ts @@ -1502,3 +1502,23 @@ export function toggleLSPLogs(ctx: Ctx): Cmd { } }; } + +export function openWalkthrough(_: Ctx): Cmd { + return async () => { + await vscode.commands.executeCommand( + "workbench.action.openWalkthrough", + "rust-lang.rust-analyzer#landing", + false, + ); + }; +} + +export function openFAQ(_: Ctx): Cmd { + return async () => { + await vscode.commands.executeCommand( + "workbench.action.openWalkthrough", + "rust-lang.rust-analyzer#faq", + true, + ); + }; +} diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 1931cfe3813..ca77215004d 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -4,13 +4,14 @@ import * as path from "path"; import * as vscode from "vscode"; import { type Env, log, unwrapUndefinable, expectNotUndefined } from "./util"; import type { JsonProject } from "./rust_project"; +import type { Disposable } from "./ctx"; export type RunnableEnvCfgItem = { mask?: string; env: Record<string, string>; platform?: string | string[]; }; -export type RunnableEnvCfg = undefined | Record<string, string> | RunnableEnvCfgItem[]; +export type RunnableEnvCfg = Record<string, string> | RunnableEnvCfgItem[]; export class Config { readonly extensionId = "rust-lang.rust-analyzer"; @@ -29,22 +30,9 @@ export class Config { (opt) => `${this.rootSection}.${opt}`, ); - readonly package: { - version: string; - releaseTag: string | null; - enableProposedApi: boolean | undefined; - } = vscode.extensions.getExtension(this.extensionId)!.packageJSON; - - readonly globalStorageUri: vscode.Uri; - - constructor(ctx: vscode.ExtensionContext) { - this.globalStorageUri = ctx.globalStorageUri; + constructor(disposables: Disposable[]) { this.discoveredWorkspaces = []; - vscode.workspace.onDidChangeConfiguration( - this.onDidChangeConfiguration, - this, - ctx.subscriptions, - ); + vscode.workspace.onDidChangeConfiguration(this.onDidChangeConfiguration, this, disposables); this.refreshLogging(); this.configureLanguage(); } @@ -55,7 +43,10 @@ export class Config { private refreshLogging() { log.setEnabled(this.traceExtension ?? false); - log.info("Extension version:", this.package.version); + log.info( + "Extension version:", + vscode.extensions.getExtension(this.extensionId)!.packageJSON.version, + ); const cfg = Object.entries(this.cfg).filter(([_, val]) => !(val instanceof Function)); log.info("Using configuration", Object.fromEntries(cfg)); @@ -277,10 +268,6 @@ export class Config { return this.get<string[]>("runnables.problemMatcher") || []; } - get cargoRunner() { - return this.get<string | undefined>("cargoRunner"); - } - get testExplorer() { return this.get<boolean | undefined>("testExplorer"); } diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index bf0b84ec358..caa99d76194 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -118,7 +118,7 @@ export class Ctx implements RustAnalyzerExtensionApi { extCtx.subscriptions.push(this); this.version = extCtx.extension.packageJSON.version ?? "<unknown>"; this._serverVersion = "<not running>"; - this.config = new Config(extCtx); + this.config = new Config(extCtx.subscriptions); this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); if (this.config.testExplorer) { this.testController = vscode.tests.createTestController( diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts index 58fe1df51f4..f23e3680933 100644 --- a/src/tools/rust-analyzer/editors/code/src/debug.ts +++ b/src/tools/rust-analyzer/editors/code/src/debug.ts @@ -180,7 +180,7 @@ async function getDebugExecutable( env: Record<string, string>, ): Promise<string> { const cargo = new Cargo(runnableArgs.workspaceRoot || ".", debugOutput, env); - const executable = await cargo.executableFromArgs(runnableArgs.cargoArgs); + const executable = await cargo.executableFromArgs(runnableArgs); // if we are here, there were no compilation errors. return executable; diff --git a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts index 699052e4d44..e24893b2509 100644 --- a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts +++ b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts @@ -235,22 +235,43 @@ type RunnableShell = { args: ShellRunnableArgs; }; +export type CommonRunnableArgs = { + /** + * Environment variables to set before running the command. + */ + environment?: Record<string, string>; + /** + * The working directory to run the command in. + */ + cwd: string; +}; + export type ShellRunnableArgs = { kind: string; program: string; args: string[]; - cwd: string; -}; +} & CommonRunnableArgs; export type CargoRunnableArgs = { + /** + * The workspace root directory of the cargo project. + */ workspaceRoot?: string; - cargoArgs: string[]; - cwd: string; - cargoExtraArgs: string[]; + /** + * Arguments to pass to the executable, these will be passed to the command after a `--` argument. + */ executableArgs: string[]; - expectTest?: boolean; + /** + * Arguments to pass to cargo. + */ + cargoArgs: string[]; + /** + * Command to execute instead of `cargo`. + */ + // This is supplied by the user via config. We could pull this through the client config in the + // extension directly, but that would prevent us from honoring the rust-analyzer.toml for it. overrideCargo?: string; -}; +} & CommonRunnableArgs; export type RunnablesParams = { textDocument: lc.TextDocumentIdentifier; diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index ff67bb7bd59..c96f2ae869e 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -178,6 +178,8 @@ function createCommands(): Record<string, CommandFactory> { viewMemoryLayout: { enabled: commands.viewMemoryLayout }, toggleCheckOnSave: { enabled: commands.toggleCheckOnSave }, toggleLSPLogs: { enabled: commands.toggleLSPLogs }, + openWalkthrough: { enabled: commands.openWalkthrough }, + openFAQ: { enabled: commands.openFAQ }, // Internal commands which are invoked by the server. applyActionGroup: { enabled: commands.applyActionGroup }, applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand }, diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index 7a9049af0de..783bbc1607d 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -66,23 +66,21 @@ export class RunnableQuickPick implements vscode.QuickPickItem { } } -export function prepareBaseEnv(): Record<string, string> { +export function prepareBaseEnv(base?: Record<string, string>): Record<string, string> { const env: Record<string, string> = { RUST_BACKTRACE: "short" }; - Object.assign(env, process.env as { [key: string]: string }); + Object.assign(env, process.env); + if (base) { + Object.assign(env, base); + } return env; } export function prepareEnv( label: string, runnableArgs: ra.CargoRunnableArgs, - runnableEnvCfg: RunnableEnvCfg, + runnableEnvCfg?: RunnableEnvCfg, ): Record<string, string> { - const env = prepareBaseEnv(); - - if (runnableArgs.expectTest) { - env["UPDATE_EXPECT"] = "1"; - } - + const env = prepareBaseEnv(runnableArgs.environment); const platform = process.platform; const checkPlatform = (it: RunnableEnvCfgItem) => { @@ -113,26 +111,31 @@ export async function createTaskFromRunnable( runnable: ra.Runnable, config: Config, ): Promise<vscode.Task> { - let definition: tasks.RustTargetDefinition; + const target = vscode.workspace.workspaceFolders?.[0]; + + let definition: tasks.TaskDefinition; + let options; + let cargo; if (runnable.kind === "cargo") { const runnableArgs = runnable.args; let args = createCargoArgs(runnableArgs); - let program: string; if (runnableArgs.overrideCargo) { // Split on spaces to allow overrides like "wrapper cargo". const cargoParts = runnableArgs.overrideCargo.split(" "); - program = unwrapUndefinable(cargoParts[0]); + cargo = unwrapUndefinable(cargoParts[0]); args = [...cargoParts.slice(1), ...args]; } else { - program = await toolchain.cargoPath(); + cargo = await toolchain.cargoPath(); } definition = { type: tasks.CARGO_TASK_TYPE, - command: program, - args, + command: unwrapUndefinable(args[0]), + args: args.slice(1), + }; + options = { cwd: runnableArgs.workspaceRoot || ".", env: prepareEnv(runnable.label, runnableArgs, config.runnablesExtraEnv), }; @@ -142,13 +145,14 @@ export async function createTaskFromRunnable( type: tasks.SHELL_TASK_TYPE, command: runnableArgs.program, args: runnableArgs.args, + }; + options = { cwd: runnableArgs.cwd, env: prepareBaseEnv(), }; } - const target = vscode.workspace.workspaceFolders?.[0]; - const exec = await tasks.targetToExecution(definition, config.cargoRunner, true); + const exec = await tasks.targetToExecution(definition, options, cargo); const task = await tasks.buildRustTask( target, definition, @@ -167,9 +171,6 @@ export async function createTaskFromRunnable( export function createCargoArgs(runnableArgs: ra.CargoRunnableArgs): string[] { const args = [...runnableArgs.cargoArgs]; // should be a copy! - if (runnableArgs.cargoExtraArgs) { - args.push(...runnableArgs.cargoExtraArgs); // Append user-specified cargo options. - } if (runnableArgs.executableArgs.length > 0) { args.push("--", ...runnableArgs.executableArgs); } diff --git a/src/tools/rust-analyzer/editors/code/src/tasks.ts b/src/tools/rust-analyzer/editors/code/src/tasks.ts index 6f4fbf91889..fac1cc6394f 100644 --- a/src/tools/rust-analyzer/editors/code/src/tasks.ts +++ b/src/tools/rust-analyzer/editors/code/src/tasks.ts @@ -1,6 +1,5 @@ import * as vscode from "vscode"; import type { Config } from "./config"; -import { log, unwrapUndefinable } from "./util"; import * as toolchain from "./toolchain"; // This ends up as the `type` key in tasks.json. RLS also uses `cargo` and @@ -10,21 +9,21 @@ export const SHELL_TASK_TYPE = "shell"; export const RUST_TASK_SOURCE = "rust"; -export type RustTargetDefinition = { +export type TaskDefinition = vscode.TaskDefinition & { readonly type: typeof CARGO_TASK_TYPE | typeof SHELL_TASK_TYPE; -} & vscode.TaskDefinition & - RustTarget; -export type RustTarget = { - // The command to run, usually `cargo`. - command: string; - // Additional arguments passed to the command. args?: string[]; - // The working directory to run the command in. - cwd?: string; - // The shell environment. - env?: { [key: string]: string }; + command: string; }; +export type CargoTaskDefinition = { + env?: Record<string, string>; + type: typeof CARGO_TASK_TYPE; +} & TaskDefinition; + +function isCargoTask(definition: vscode.TaskDefinition): definition is CargoTaskDefinition { + return definition.type === CARGO_TASK_TYPE; +} + class RustTaskProvider implements vscode.TaskProvider { private readonly config: Config; @@ -58,13 +57,13 @@ class RustTaskProvider implements vscode.TaskProvider { for (const workspaceTarget of vscode.workspace.workspaceFolders) { for (const def of defs) { const definition = { - command: cargo, - args: [def.command], - }; - const exec = await targetToExecution(definition, this.config.cargoRunner); + command: def.command, + type: CARGO_TASK_TYPE, + } as const; + const exec = await targetToExecution(definition, {}, cargo); const vscodeTask = await buildRustTask( workspaceTarget, - { ...definition, type: CARGO_TASK_TYPE }, + definition, `cargo ${def.command}`, this.config.problemMatcher, exec, @@ -81,23 +80,13 @@ class RustTaskProvider implements vscode.TaskProvider { // VSCode calls this for every cargo task in the user's tasks.json, // we need to inform VSCode how to execute that command by creating // a ShellExecution for it. - if (task.definition.type === CARGO_TASK_TYPE) { - const taskDefinition = task.definition as RustTargetDefinition; - const cargo = await toolchain.cargoPath(); - const exec = await targetToExecution( - { - command: cargo, - args: [taskDefinition.command].concat(taskDefinition.args || []), - cwd: taskDefinition.cwd, - env: taskDefinition.env, - }, - this.config.cargoRunner, - ); - return await buildRustTask( + if (isCargoTask(task.definition)) { + const exec = await targetToExecution(task.definition, { env: task.definition.env }); + return buildRustTask( task.scope, - taskDefinition, + task.definition, task.name, - this.config.problemMatcher, + task.problemMatchers, exec, ); } @@ -108,7 +97,7 @@ class RustTaskProvider implements vscode.TaskProvider { export async function buildRustTask( scope: vscode.WorkspaceFolder | vscode.TaskScope | undefined, - definition: RustTargetDefinition, + definition: TaskDefinition, name: string, problemMatcher: string[], exec: vscode.ProcessExecution | vscode.ShellExecution, @@ -126,40 +115,23 @@ export async function buildRustTask( } export async function targetToExecution( - definition: RustTarget, - customRunner?: string, - throwOnError: boolean = false, + definition: TaskDefinition, + options?: { + env?: { [key: string]: string }; + cwd?: string; + }, + cargo?: string, ): Promise<vscode.ProcessExecution | vscode.ShellExecution> { - if (customRunner) { - const runnerCommand = `${customRunner}.buildShellExecution`; - - try { - const runnerArgs = { - kind: CARGO_TASK_TYPE, - args: definition.args, - cwd: definition.cwd, - env: definition.env, - }; - const customExec = await vscode.commands.executeCommand(runnerCommand, runnerArgs); - if (customExec) { - if (customExec instanceof vscode.ShellExecution) { - return customExec; - } else { - log.debug("Invalid cargo ShellExecution", customExec); - throw "Invalid cargo ShellExecution."; - } - } - // fallback to default processing - } catch (e) { - if (throwOnError) throw `Cargo runner '${customRunner}' failed! ${e}`; - // fallback to default processing - } + let command, args; + if (isCargoTask(definition)) { + // FIXME: The server should provide cargo + command = cargo || (await toolchain.cargoPath()); + args = [definition.command].concat(definition.args || []); + } else { + command = definition.command; + args = definition.args || []; } - const args = unwrapUndefinable(definition.args); - return new vscode.ProcessExecution(definition.command, args, { - cwd: definition.cwd, - env: definition.env, - }); + return new vscode.ProcessExecution(command, args, options); } export function activateTaskProvider(config: Config): vscode.Disposable { diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts index a48d2d90cce..6a0b5c26d82 100644 --- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts +++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts @@ -4,6 +4,7 @@ import * as path from "path"; import * as readline from "readline"; import * as vscode from "vscode"; import { execute, log, memoizeAsync, unwrapNullable, unwrapUndefinable } from "./util"; +import type { CargoRunnableArgs } from "./lsp_ext"; interface CompilationArtifact { fileName: string; @@ -25,9 +26,8 @@ export class Cargo { ) {} // Made public for testing purposes - static artifactSpec(args: readonly string[]): ArtifactSpec { - const cargoArgs = [...args, "--message-format=json"]; - + static artifactSpec(cargoArgs: string[], executableArgs?: string[]): ArtifactSpec { + cargoArgs = [...cargoArgs, "--message-format=json"]; // arguments for a runnable from the quick pick should be updated. // see crates\rust-analyzer\src\main_loop\handlers.rs, handle_code_lens switch (cargoArgs[0]) { @@ -48,6 +48,9 @@ export class Cargo { // produce 2 artifacts: {"kind": "bin"} and {"kind": "test"} result.filter = (artifacts) => artifacts.filter((it) => it.isTest); } + if (executableArgs) { + cargoArgs.push("--", ...executableArgs); + } return result; } @@ -84,8 +87,10 @@ export class Cargo { return spec.filter?.(artifacts) ?? artifacts; } - async executableFromArgs(args: readonly string[]): Promise<string> { - const artifacts = await this.getArtifacts(Cargo.artifactSpec(args)); + async executableFromArgs(runnableArgs: CargoRunnableArgs): Promise<string> { + const artifacts = await this.getArtifacts( + Cargo.artifactSpec(runnableArgs.cargoArgs, runnableArgs.executableArgs), + ); if (artifacts.length === 0) { throw new Error("No compilation artifacts"); diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts index 21bdaf5384d..81850e03f1c 100644 --- a/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts +++ b/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts @@ -12,12 +12,11 @@ function makeRunnable(label: string): ra.Runnable { cargoArgs: [], cwd: ".", executableArgs: [], - cargoExtraArgs: [], }, }; } -function fakePrepareEnv(runnableName: string, config: RunnableEnvCfg): Record<string, string> { +function fakePrepareEnv(runnableName: string, config?: RunnableEnvCfg): Record<string, string> { const runnable = makeRunnable(runnableName); const runnableArgs = runnable.args as ra.CargoRunnableArgs; return prepareEnv(runnable.label, runnableArgs, config); diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts index bafb9569a1c..84501dde6ca 100644 --- a/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts +++ b/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts @@ -13,7 +13,7 @@ export async function getTests(ctx: Context) { USING_MY_VAR: "test test test", MY_VAR: "test", }; - const actualEnv = await substituteVariablesInEnv(envJson); + const actualEnv = substituteVariablesInEnv(envJson); assert.deepStrictEqual(actualEnv, expectedEnv); }); @@ -34,7 +34,7 @@ export async function getTests(ctx: Context) { E_IS_ISOLATED: "test", F_USES_E: "test", }; - const actualEnv = await substituteVariablesInEnv(envJson); + const actualEnv = substituteVariablesInEnv(envJson); assert.deepStrictEqual(actualEnv, expectedEnv); }); @@ -47,7 +47,7 @@ export async function getTests(ctx: Context) { USING_EXTERNAL_VAR: "test test test", }; - const actualEnv = await substituteVariablesInEnv(envJson); + const actualEnv = substituteVariablesInEnv(envJson); assert.deepStrictEqual(actualEnv, expectedEnv); delete process.env["TEST_VARIABLE"]; }); @@ -56,7 +56,7 @@ export async function getTests(ctx: Context) { const envJson = { USING_VSCODE_VAR: "${workspaceFolderBasename}", }; - const actualEnv = await substituteVariablesInEnv(envJson); + const actualEnv = substituteVariablesInEnv(envJson); assert.deepStrictEqual(actualEnv["USING_VSCODE_VAR"], "code"); }); }); diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/tasks.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/tasks.test.ts new file mode 100644 index 00000000000..9bccaaf3d47 --- /dev/null +++ b/src/tools/rust-analyzer/editors/code/tests/unit/tasks.test.ts @@ -0,0 +1,139 @@ +import type { Context } from "."; +import * as vscode from "vscode"; +import * as assert from "assert"; +import { targetToExecution } from "../../src/tasks"; + +export async function getTests(ctx: Context) { + await ctx.suite("Tasks", (suite) => { + suite.addTest("cargo targetToExecution", async () => { + assert.deepStrictEqual( + await targetToExecution({ + type: "cargo", + command: "check", + args: ["foo"], + }).then(executionToSimple), + { + process: "cargo", + args: ["check", "foo"], + }, + ); + }); + + suite.addTest("shell targetToExecution", async () => { + assert.deepStrictEqual( + await targetToExecution({ + type: "shell", + command: "thing", + args: ["foo"], + }).then(executionToSimple), + { + process: "thing", + args: ["foo"], + }, + ); + }); + + suite.addTest("base tasks", async () => { + const tasks = await vscode.tasks.fetchTasks({ type: "cargo" }); + const expectedTasks = [ + { + definition: { type: "cargo", command: "build" }, + name: "cargo build", + execution: { + process: "cargo", + args: ["build"], + }, + }, + { + definition: { + type: "cargo", + command: "check", + }, + name: "cargo check", + execution: { + process: "cargo", + args: ["check"], + }, + }, + { + definition: { type: "cargo", command: "clippy" }, + name: "cargo clippy", + execution: { + process: "cargo", + args: ["clippy"], + }, + }, + { + definition: { type: "cargo", command: "test" }, + name: "cargo test", + execution: { + process: "cargo", + args: ["test"], + }, + }, + { + definition: { + type: "cargo", + command: "clean", + }, + name: "cargo clean", + execution: { + process: "cargo", + args: ["clean"], + }, + }, + { + definition: { type: "cargo", command: "run" }, + name: "cargo run", + execution: { + process: "cargo", + args: ["run"], + }, + }, + ]; + tasks.map(f).forEach((actual, i) => { + const expected = expectedTasks[i]; + assert.deepStrictEqual(actual, expected); + }); + }); + }); +} + +function f(task: vscode.Task): { + definition: vscode.TaskDefinition; + name: string; + execution: { + args: string[]; + } & ({ command: string } | { process: string }); +} { + const execution = executionToSimple(task.execution!); + + return { + definition: task.definition, + name: task.name, + execution, + }; +} +function executionToSimple( + taskExecution: vscode.ProcessExecution | vscode.ShellExecution | vscode.CustomExecution, +): { + args: string[]; +} & ({ command: string } | { process: string }) { + const exec = taskExecution as vscode.ProcessExecution | vscode.ShellExecution; + if (exec instanceof vscode.ShellExecution) { + return { + command: typeof exec.command === "string" ? exec.command : exec.command.value, + args: exec.args.map((arg) => { + if (typeof arg === "string") { + return arg; + } + return arg.value; + }), + }; + } else { + return { + process: exec.process, + args: exec.args, + }; + } +} diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index c605feb6eea..424c93a7521 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -3d5d7a24f76006b391d8a53d903ae64c1b4a52d2 +bcf1f6db4594ae6132378b179a30cdb3599a863d diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml index 95eed3ee172..d62adb9144b 100644 --- a/src/tools/rust-analyzer/triagebot.toml +++ b/src/tools/rust-analyzer/triagebot.toml @@ -5,6 +5,7 @@ [relabel] allow-unauthenticated = [ "S-*", + "A-*", ] [autolabel."S-waiting-on-review"] diff --git a/src/tools/rust-analyzer/xtask/src/codegen.rs b/src/tools/rust-analyzer/xtask/src/codegen.rs index b23d700263f..acaa65129df 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen.rs @@ -5,12 +5,17 @@ use std::{ use xshell::{cmd, Shell}; -use crate::{flags, project_root}; +use crate::{ + flags::{self, CodegenType}, + project_root, +}; pub(crate) mod assists_doc_tests; pub(crate) mod diagnostics_docs; +pub(crate) mod feature_docs; mod grammar; mod lints; +mod parser_inline_tests; impl flags::Codegen { pub(crate) fn run(self, _sh: &Shell) -> anyhow::Result<()> { @@ -18,6 +23,8 @@ impl flags::Codegen { flags::CodegenType::All => { diagnostics_docs::generate(self.check); assists_doc_tests::generate(self.check); + parser_inline_tests::generate(self.check); + // diagnostics_docs::generate(self.check) doesn't generate any tests // lints::generate(self.check) Updating clones the rust repo, so don't run it unless // explicitly asked for } @@ -25,41 +32,13 @@ impl flags::Codegen { flags::CodegenType::AssistsDocTests => assists_doc_tests::generate(self.check), flags::CodegenType::DiagnosticsDocs => diagnostics_docs::generate(self.check), flags::CodegenType::LintDefinitions => lints::generate(self.check), + flags::CodegenType::ParserTests => parser_inline_tests::generate(self.check), + flags::CodegenType::FeatureDocs => feature_docs::generate(self.check), } Ok(()) } } -fn list_rust_files(dir: &Path) -> Vec<PathBuf> { - let mut res = list_files(dir); - res.retain(|it| { - it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs") - }); - res -} - -fn list_files(dir: &Path) -> Vec<PathBuf> { - let mut res = Vec::new(); - let mut work = vec![dir.to_path_buf()]; - while let Some(dir) = work.pop() { - for entry in dir.read_dir().unwrap() { - let entry = entry.unwrap(); - let file_type = entry.file_type().unwrap(); - let path = entry.path(); - let is_hidden = - path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.'); - if !is_hidden { - if file_type.is_dir() { - work.push(path); - } else if file_type.is_file() { - res.push(path); - } - } - } - } - res -} - #[derive(Clone)] pub(crate) struct CommentBlock { pub(crate) id: String, @@ -174,8 +153,8 @@ fn reformat(text: String) -> String { stdout } -fn add_preamble(generator: &'static str, mut text: String) -> String { - let preamble = format!("//! Generated by `{generator}`, do not edit by hand.\n\n"); +fn add_preamble(cg: CodegenType, mut text: String) -> String { + let preamble = format!("//! Generated by `cargo codegen {cg}`, do not edit by hand.\n\n"); text.insert_str(0, &preamble); text } @@ -183,7 +162,7 @@ fn add_preamble(generator: &'static str, mut text: String) -> String { /// Checks that the `file` has the specified `contents`. If that is not the /// case, updates the file and then fails the test. #[allow(clippy::print_stderr)] -fn ensure_file_contents(file: &Path, contents: &str, check: bool) { +fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: bool) { if let Ok(old_contents) = fs::read_to_string(file) { if normalize_newlines(&old_contents) == normalize_newlines(contents) { // File is already up to date. @@ -197,9 +176,11 @@ fn ensure_file_contents(file: &Path, contents: &str, check: bool) { "{} was not up-to-date{}", file.display(), if std::env::var("CI").is_ok() { - "\n NOTE: run `cargo codegen` locally and commit the updated files\n" + format!( + "\n NOTE: run `cargo codegen {cg}` locally and commit the updated files\n" + ) } else { - "" + "".to_owned() } ); } else { diff --git a/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs b/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs index b2d89dde765..d06c9d65df3 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs @@ -5,10 +5,9 @@ use std::{fmt, fs, path::Path}; use stdx::format_to_acc; use crate::{ - codegen::{ - add_preamble, ensure_file_contents, list_rust_files, reformat, CommentBlock, Location, - }, + codegen::{add_preamble, ensure_file_contents, reformat, CommentBlock, Location}, project_root, + util::list_rust_files, }; pub(crate) fn generate(check: bool) { @@ -45,8 +44,9 @@ r#####" buf.push_str(&test) } } - let buf = add_preamble("sourcegen_assists_docs", reformat(buf)); + let buf = add_preamble(crate::flags::CodegenType::AssistsDocTests, reformat(buf)); ensure_file_contents( + crate::flags::CodegenType::AssistsDocTests, &project_root().join("crates/ide-assists/src/tests/generated.rs"), &buf, check, @@ -59,7 +59,7 @@ r#####" // a release. let contents = add_preamble( - "sourcegen_assists_docs", + crate::flags::CodegenType::AssistsDocTests, assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"), ); let dst = project_root().join("docs/user/generated_assists.adoc"); @@ -195,3 +195,8 @@ fn reveal_hash_comments(text: &str) -> String { }) .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n")) } + +#[test] +fn test() { + generate(true); +} diff --git a/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs b/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs index cf30531e7f9..4cb8f3f259d 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs @@ -3,8 +3,9 @@ use std::{fmt, fs, io, path::PathBuf}; use crate::{ - codegen::{add_preamble, list_rust_files, CommentBlock, Location}, + codegen::{add_preamble, CommentBlock, Location}, project_root, + util::list_rust_files, }; pub(crate) fn generate(check: bool) { @@ -12,7 +13,7 @@ pub(crate) fn generate(check: bool) { if !check { let contents = diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); - let contents = add_preamble("sourcegen_diagnostic_docs", contents); + let contents = add_preamble(crate::flags::CodegenType::DiagnosticsDocs, contents); let dst = project_root().join("docs/user/generated_diagnostic.adoc"); fs::write(dst, contents).unwrap(); } @@ -63,7 +64,7 @@ fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> { if diagnostic.chars().any(|c| c.is_ascii_uppercase()) { return Err("Diagnostic names can't contain uppercase symbols".into()); } - if diagnostic.chars().any(|c| !c.is_ascii()) { + if !diagnostic.is_ascii() { return Err("Diagnostic can't contain non-ASCII symbols".into()); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs b/src/tools/rust-analyzer/xtask/src/codegen/feature_docs.rs index 2eafb0da692..c6451d888b0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/feature_docs.rs @@ -2,8 +2,13 @@ use std::{fmt, fs, io, path::PathBuf}; -#[test] -fn sourcegen_feature_docs() { +use crate::{ + codegen::{CommentBlock, Location}, + project_root, + util::list_rust_files, +}; + +pub(crate) fn generate(_check: bool) { let features = Feature::collect().unwrap(); let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); let contents = format!( @@ -13,23 +18,23 @@ fn sourcegen_feature_docs() { ", contents.trim() ); - let dst = sourcegen::project_root().join("docs/user/generated_features.adoc"); + let dst = project_root().join("docs/user/generated_features.adoc"); fs::write(dst, contents).unwrap(); } #[derive(Debug)] struct Feature { id: String, - location: sourcegen::Location, + location: Location, doc: String, } impl Feature { fn collect() -> io::Result<Vec<Feature>> { - let crates_dir = sourcegen::project_root().join("crates"); + let crates_dir = project_root().join("crates"); let mut res = Vec::new(); - for path in sourcegen::list_rust_files(&crates_dir) { + for path in list_rust_files(&crates_dir) { collect_file(&mut res, path)?; } res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id)); @@ -37,7 +42,7 @@ impl Feature { fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> io::Result<()> { let text = std::fs::read_to_string(&path)?; - let comment_blocks = sourcegen::CommentBlock::extract("Feature", &text); + let comment_blocks = CommentBlock::extract("Feature", &text); for block in comment_blocks { let id = block.id; @@ -45,7 +50,7 @@ impl Feature { panic!("invalid feature name: {id:?}:\n {msg}") } let doc = block.contents.join("\n"); - let location = sourcegen::Location { file: path.clone(), line: block.line }; + let location = Location { file: path.clone(), line: block.line }; acc.push(Feature { id, location, doc }) } diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs index cc2fadc9750..45fa2d37c8f 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs @@ -27,7 +27,12 @@ use self::ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc pub(crate) fn generate(check: bool) { let syntax_kinds = generate_syntax_kinds(KINDS_SRC); let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs"); - ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds, check); + ensure_file_contents( + crate::flags::CodegenType::Grammar, + syntax_kinds_file.as_path(), + &syntax_kinds, + check, + ); let grammar = fs::read_to_string(project_root().join("crates/syntax/rust.ungram")) .unwrap() @@ -37,11 +42,21 @@ pub(crate) fn generate(check: bool) { let ast_tokens = generate_tokens(&ast); let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs"); - ensure_file_contents(ast_tokens_file.as_path(), &ast_tokens, check); + ensure_file_contents( + crate::flags::CodegenType::Grammar, + ast_tokens_file.as_path(), + &ast_tokens, + check, + ); let ast_nodes = generate_nodes(KINDS_SRC, &ast); let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs"); - ensure_file_contents(ast_nodes_file.as_path(), &ast_nodes, check); + ensure_file_contents( + crate::flags::CodegenType::Grammar, + ast_nodes_file.as_path(), + &ast_nodes, + check, + ); } fn generate_tokens(grammar: &AstSrc) -> String { @@ -69,7 +84,7 @@ fn generate_tokens(grammar: &AstSrc) -> String { }); add_preamble( - "sourcegen_ast", + crate::flags::CodegenType::Grammar, reformat( quote! { use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken}; @@ -107,18 +122,21 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { if field.is_many() { quote! { + #[inline] pub fn #method_name(&self) -> AstChildren<#ty> { support::children(&self.syntax) } } } else if let Some(token_kind) = field.token_kind() { quote! { + #[inline] pub fn #method_name(&self) -> Option<#ty> { support::token(&self.syntax, #token_kind) } } } else { quote! { + #[inline] pub fn #method_name(&self) -> Option<#ty> { support::child(&self.syntax) } @@ -141,12 +159,15 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { }, quote! { impl AstNode for #name { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == #kind } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } }, @@ -175,9 +196,11 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } else { quote! { impl AstNode for #name { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, #(#kinds)|*) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { #( @@ -187,6 +210,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { }; Some(res) } + #[inline] fn syntax(&self) -> &SyntaxNode { match self { #( @@ -211,6 +235,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { quote! { #( impl From<#variants> for #name { + #[inline] fn from(node: #variants) -> #name { #name::#variants(node) } @@ -255,12 +280,15 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } } impl AstNode for #name { + #[inline] fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, #(#kinds)|*) } + #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { Self::can_cast(syntax.kind()).then_some(#name { syntax }) } + #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } @@ -328,7 +356,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } } - let res = add_preamble("sourcegen_ast", reformat(res)); + let res = add_preamble(crate::flags::CodegenType::Grammar, reformat(res)); res.replace("#[derive", "\n#[derive") } @@ -458,7 +486,7 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String { } }; - add_preamble("sourcegen_ast", reformat(ast.to_string())) + add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string())) } fn to_upper_snake_case(s: &str) -> String { @@ -692,6 +720,8 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r | "self_ty" | "iterable" | "condition" + | "args" + | "body" ); if manually_implemented { return; @@ -780,20 +810,21 @@ fn extract_enums(ast: &mut AstSrc) { } } -fn extract_struct_traits(ast: &mut AstSrc) { - let traits: &[(&str, &[&str])] = &[ - ("HasAttrs", &["attrs"]), - ("HasName", &["name"]), - ("HasVisibility", &["visibility"]), - ("HasGenericParams", &["generic_param_list", "where_clause"]), - ("HasTypeBounds", &["type_bound_list", "colon_token"]), - ("HasModuleItem", &["items"]), - ("HasLoopBody", &["label", "loop_body"]), - ("HasArgList", &["arg_list"]), - ]; +const TRAITS: &[(&str, &[&str])] = &[ + ("HasAttrs", &["attrs"]), + ("HasName", &["name"]), + ("HasVisibility", &["visibility"]), + ("HasGenericParams", &["generic_param_list", "where_clause"]), + ("HasGenericArgs", &["generic_arg_list"]), + ("HasTypeBounds", &["type_bound_list", "colon_token"]), + ("HasModuleItem", &["items"]), + ("HasLoopBody", &["label", "loop_body"]), + ("HasArgList", &["arg_list"]), +]; +fn extract_struct_traits(ast: &mut AstSrc) { for node in &mut ast.nodes { - for (name, methods) in traits { + for (name, methods) in TRAITS { extract_struct_trait(node, name, methods); } } @@ -873,3 +904,8 @@ impl AstNodeSrc { }); } } + +#[test] +fn test() { + generate(true); +} diff --git a/src/tools/rust-analyzer/xtask/src/codegen/lints.rs b/src/tools/rust-analyzer/xtask/src/codegen/lints.rs index 6975f9328e5..f097b5817be 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/lints.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/lints.rs @@ -6,8 +6,9 @@ use stdx::format_to; use xshell::{cmd, Shell}; use crate::{ - codegen::{add_preamble, ensure_file_contents, list_files, reformat}, + codegen::{add_preamble, ensure_file_contents, reformat}, project_root, + util::list_files, }; const DESTINATION: &str = "crates/ide-db/src/generated/lints.rs"; @@ -28,7 +29,7 @@ pub(crate) fn generate(check: bool) { cmd!( sh, "git -C {rust_repo} submodule update --init --recursive --depth=1 -- - compiler library src/tools" + compiler library src/tools src/doc/book" ) .run() .unwrap(); @@ -73,10 +74,15 @@ pub struct LintGroup { .unwrap(); generate_descriptor_clippy(&mut contents, &lints_json); - let contents = add_preamble("sourcegen_lints", reformat(contents)); + let contents = add_preamble(crate::flags::CodegenType::LintDefinitions, reformat(contents)); let destination = project_root().join(DESTINATION); - ensure_file_contents(destination.as_path(), &contents, check); + ensure_file_contents( + crate::flags::CodegenType::LintDefinitions, + destination.as_path(), + &contents, + check, + ); } /// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`. diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs b/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs index 5a71bfd82b1..5983b06e1b9 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs @@ -8,16 +8,21 @@ use std::{ path::{Path, PathBuf}, }; -#[test] -fn sourcegen_parser_tests() { - let grammar_dir = sourcegen::project_root().join(Path::new("crates/parser/src/grammar")); +use crate::{ + codegen::{ensure_file_contents, CommentBlock}, + project_root, + util::list_rust_files, +}; + +pub(crate) fn generate(check: bool) { + let grammar_dir = project_root().join(Path::new("crates/parser/src/grammar")); let tests = tests_from_dir(&grammar_dir); - install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok"); - install_tests(&tests.err, "crates/parser/test_data/parser/inline/err"); + install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok", check); + install_tests(&tests.err, "crates/parser/test_data/parser/inline/err", check); - fn install_tests(tests: &HashMap<String, Test>, into: &str) { - let tests_dir = sourcegen::project_root().join(into); + fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) { + let tests_dir = project_root().join(into); if !tests_dir.is_dir() { fs::create_dir_all(&tests_dir).unwrap(); } @@ -37,7 +42,7 @@ fn sourcegen_parser_tests() { tests_dir.join(file_name) } }; - sourcegen::ensure_file_contents(&path, &test.text); + ensure_file_contents(crate::flags::CodegenType::ParserTests, &path, &test.text, check); } } } @@ -57,7 +62,7 @@ struct Tests { fn collect_tests(s: &str) -> Vec<Test> { let mut res = Vec::new(); - for comment_block in sourcegen::CommentBlock::extract_untagged(s) { + for comment_block in CommentBlock::extract_untagged(s) { let first_line = &comment_block.contents[0]; let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") { (name.to_owned(), true) @@ -80,7 +85,7 @@ fn collect_tests(s: &str) -> Vec<Test> { fn tests_from_dir(dir: &Path) -> Tests { let mut res = Tests::default(); - for entry in sourcegen::list_rust_files(dir) { + for entry in list_rust_files(dir) { process_file(&mut res, entry.as_path()); } let grammar_rs = dir.parent().unwrap().join("grammar.rs"); @@ -122,3 +127,8 @@ fn existing_tests(dir: &Path, ok: bool) -> HashMap<String, (PathBuf, Test)> { } res } + +#[test] +fn test() { + generate(true); +} diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs index dd7bfd0bda0..cf4a22d476f 100644 --- a/src/tools/rust-analyzer/xtask/src/flags.rs +++ b/src/tools/rust-analyzer/xtask/src/flags.rs @@ -1,6 +1,6 @@ #![allow(unreachable_pub)] -use std::str::FromStr; +use std::{fmt, str::FromStr}; use crate::install::{ClientOpt, ServerOpt}; @@ -73,6 +73,8 @@ xflags::xflags! { optional codegen_type: CodegenType optional --check } + + cmd tidy {} } } @@ -96,9 +98,13 @@ pub enum XtaskCmd { Metrics(Metrics), Bb(Bb), Codegen(Codegen), + Tidy(Tidy), } #[derive(Debug)] +pub struct Tidy {} + +#[derive(Debug)] pub struct Install { pub client: bool, pub code_bin: Option<String>, @@ -185,6 +191,22 @@ pub enum CodegenType { AssistsDocTests, DiagnosticsDocs, LintDefinitions, + ParserTests, + FeatureDocs, +} + +impl fmt::Display for CodegenType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::All => write!(f, "all"), + Self::Grammar => write!(f, "grammar"), + Self::AssistsDocTests => write!(f, "assists-doc-tests"), + Self::DiagnosticsDocs => write!(f, "diagnostics-docs"), + Self::LintDefinitions => write!(f, "lint-definitions"), + Self::ParserTests => write!(f, "parser-tests"), + Self::FeatureDocs => write!(f, "feature-docs"), + } + } } impl FromStr for CodegenType { @@ -195,7 +217,9 @@ impl FromStr for CodegenType { "grammar" => Ok(Self::Grammar), "assists-doc-tests" => Ok(Self::AssistsDocTests), "diagnostics-docs" => Ok(Self::DiagnosticsDocs), - "lints-definitions" => Ok(Self::LintDefinitions), + "lint-definitions" => Ok(Self::LintDefinitions), + "parser-tests" => Ok(Self::ParserTests), + "feature-docs" => Ok(Self::FeatureDocs), _ => Err("Invalid option".to_owned()), } } diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs index e0705763035..5c312da1dd7 100644 --- a/src/tools/rust-analyzer/xtask/src/main.rs +++ b/src/tools/rust-analyzer/xtask/src/main.rs @@ -19,6 +19,8 @@ mod install; mod metrics; mod publish; mod release; +mod tidy; +mod util; use anyhow::bail; use std::{env, path::PathBuf}; @@ -51,6 +53,7 @@ fn main() -> anyhow::Result<()> { )?; Ok(()) } + flags::XtaskCmd::Tidy(cmd) => cmd.run(sh), } } diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs index 5699053a23d..3eda2bc0274 100644 --- a/src/tools/rust-analyzer/xtask/src/release.rs +++ b/src/tools/rust-analyzer/xtask/src/release.rs @@ -32,6 +32,7 @@ impl flags::Release { // Generates bits of manual.adoc. codegen::diagnostics_docs::generate(false); codegen::assists_doc_tests::generate(false); + codegen::feature_docs::generate(false); let website_root = project_root().join("../rust-analyzer.github.io"); { @@ -119,12 +120,11 @@ impl flags::RustcPull { // Fetch given rustc commit. cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git") .run() - .map_err(|e| { + .inspect_err(|_| { // Try to un-do the previous `git commit`, to leave the repo in the state we found it it. cmd!(sh, "git reset --hard HEAD^") .run() .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - e }) .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs b/src/tools/rust-analyzer/xtask/src/tidy.rs index 7dd6382cfac..e85f5182865 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/src/tools/rust-analyzer/xtask/src/tidy.rs @@ -6,23 +6,29 @@ use std::{ use xshell::Shell; -#[cfg(not(feature = "in-rust-tree"))] use xshell::cmd; -#[test] -fn check_lsp_extensions_docs() { - let sh = &Shell::new().unwrap(); +use crate::{flags::Tidy, project_root, util::list_files}; + +impl Tidy { + pub(crate) fn run(&self, sh: &Shell) -> anyhow::Result<()> { + check_lsp_extensions_docs(sh); + files_are_tidy(sh); + check_licenses(sh); + Ok(()) + } +} +fn check_lsp_extensions_docs(sh: &Shell) { let expected_hash = { - let lsp_ext_rs = sh - .read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp/ext.rs")) - .unwrap(); + let lsp_ext_rs = + sh.read_file(project_root().join("crates/rust-analyzer/src/lsp/ext.rs")).unwrap(); stable_hash(lsp_ext_rs.as_str()) }; let actual_hash = { let lsp_extensions_md = - sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap(); + sh.read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); let text = lsp_extensions_md .lines() .find_map(|line| line.strip_prefix("lsp/ext.rs hash:")) @@ -45,11 +51,8 @@ Please adjust docs/dev/lsp-extensions.md. } } -#[test] -fn files_are_tidy() { - let sh = &Shell::new().unwrap(); - - let files = sourcegen::list_files(&sourcegen::project_root().join("crates")); +fn files_are_tidy(sh: &Shell) { + let files = list_files(&project_root().join("crates")); let mut tidy_docs = TidyDocs::default(); let mut tidy_marks = TidyMarks::default(); @@ -121,11 +124,7 @@ fn check_cargo_toml(path: &Path, text: String) { } } -#[cfg(not(feature = "in-rust-tree"))] -#[test] -fn check_licenses() { - let sh = &Shell::new().unwrap(); - +fn check_licenses(sh: &Shell) { let expected = " (MIT OR Apache-2.0) AND Unicode-DFS-2016 0BSD OR MIT OR Apache-2.0 @@ -155,7 +154,7 @@ Zlib OR Apache-2.0 OR MIT let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap(); let mut licenses = meta - .split(|c| c == ',' || c == '{' || c == '}') + .split([',', '{', '}']) .filter(|it| it.contains(r#""license""#)) .map(|it| it.trim()) .map(|it| it[r#""license":"#.len()..].trim_matches('"')) @@ -277,7 +276,7 @@ impl TidyDocs { } fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { - p.strip_prefix(sourcegen::project_root()) + p.strip_prefix(project_root()) .unwrap() .components() .rev() @@ -339,3 +338,8 @@ fn find_marks(set: &mut HashSet<String>, text: &str, mark: &str) { } } } + +#[test] +fn test() { + Tidy {}.run(&Shell::new().unwrap()).unwrap(); +} diff --git a/src/tools/rust-analyzer/xtask/src/util.rs b/src/tools/rust-analyzer/xtask/src/util.rs new file mode 100644 index 00000000000..39f52938c8c --- /dev/null +++ b/src/tools/rust-analyzer/xtask/src/util.rs @@ -0,0 +1,31 @@ +use std::path::{Path, PathBuf}; + +pub(crate) fn list_rust_files(dir: &Path) -> Vec<PathBuf> { + let mut res = list_files(dir); + res.retain(|it| { + it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs") + }); + res +} + +pub(crate) fn list_files(dir: &Path) -> Vec<PathBuf> { + let mut res = Vec::new(); + let mut work = vec![dir.to_path_buf()]; + while let Some(dir) = work.pop() { + for entry in dir.read_dir().unwrap() { + let entry = entry.unwrap(); + let file_type = entry.file_type().unwrap(); + let path = entry.path(); + let is_hidden = + path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.'); + if !is_hidden { + if file_type.is_dir() { + work.push(path); + } else if file_type.is_file() { + res.push(path); + } + } + } + } + res +} |
