diff options
Diffstat (limited to 'src/tools/rust-analyzer/crates')
217 files changed, 6363 insertions, 3637 deletions
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index a0fc8c31eaf..c2cea071905 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -129,9 +129,9 @@ impl fmt::Display for CrateName { } impl ops::Deref for CrateName { - type Target = str; - fn deref(&self) -> &str { - self.0.as_str() + type Target = Symbol; + fn deref(&self) -> &Symbol { + &self.0 } } @@ -230,8 +230,8 @@ impl fmt::Display for CrateDisplayName { } impl ops::Deref for CrateDisplayName { - type Target = str; - fn deref(&self) -> &str { + type Target = Symbol; + fn deref(&self) -> &Symbol { &self.crate_name } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index 12f5f6ad79a..bec66278772 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -95,10 +95,14 @@ impl FunctionData { .map(Box::new); let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); if flags.contains(FnFlags::HAS_UNSAFE_KW) - && !crate_graph[krate].edition.at_least_2024() && attrs.by_key(&sym::rustc_deprecated_safe_2024).exists() { flags.remove(FnFlags::HAS_UNSAFE_KW); + flags.insert(FnFlags::DEPRECATED_SAFE_2024); + } + + if attrs.by_key(&sym::target_feature).exists() { + flags.insert(FnFlags::HAS_TARGET_FEATURE); } Arc::new(FunctionData { @@ -148,6 +152,10 @@ impl FunctionData { self.flags.contains(FnFlags::HAS_UNSAFE_KW) } + pub fn is_deprecated_safe_2024(&self) -> bool { + self.flags.contains(FnFlags::DEPRECATED_SAFE_2024) + } + pub fn is_safe(&self) -> bool { self.flags.contains(FnFlags::HAS_SAFE_KW) } @@ -155,6 +163,10 @@ impl FunctionData { pub fn is_varargs(&self) -> bool { self.flags.contains(FnFlags::IS_VARARGS) } + + pub fn has_target_feature(&self) -> bool { + self.flags.contains(FnFlags::HAS_TARGET_FEATURE) + } } fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> { @@ -238,6 +250,7 @@ bitflags::bitflags! { const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 3; const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 4; const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 5; + const RUSTC_PAREN_SUGAR = 1 << 6; } } @@ -282,6 +295,9 @@ impl TraitData { if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } + if attrs.by_key(&sym::rustc_paren_sugar).exists() { + flags |= TraitFlags::RUSTC_PAREN_SUGAR; + } let mut skip_array_during_method_dispatch = attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs index 8fc19854033..5d1834a8642 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs @@ -9,6 +9,7 @@ use hir_expand::name::Name; use intern::sym; use la_arena::Arena; use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; +use rustc_hashes::Hash64; use triomphe::Arc; use tt::iter::TtElement; @@ -172,7 +173,7 @@ fn parse_repr_tt(tt: &TopSubtree) -> Option<ReprOptions> { } } - Some(ReprOptions { int, align: max_align, pack: min_pack, flags, field_shuffle_seed: 0 }) + Some(ReprOptions { int, align: max_align, pack: min_pack, flags, field_shuffle_seed: Hash64::ZERO }) } impl StructData { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index bf6cc1dcade..598a850898b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -10,12 +10,12 @@ use triomphe::Arc; use crate::{ attr::{Attrs, AttrsWithOwner}, - body::{scope::ExprScopes, Body, BodySourceMap}, data::{ adt::{EnumData, EnumVariantData, StructData, VariantData}, ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData, }, + expr_store::{scope::ExprScopes, Body, BodySourceMap}, generics::GenericParams, import_map::ImportMap, item_tree::{AttrOwner, ItemTree, ItemTreeSourceMaps}, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs index 0f73595347b..e9318d146dd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs @@ -31,9 +31,9 @@ pub mod keys { use crate::{ dyn_map::{DynMap, Policy}, - BlockId, ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId, - LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, - TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, + BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, + ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, + TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, }; pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>; @@ -44,6 +44,7 @@ pub mod keys { pub const STATIC: Key<ast::Static, StaticId> = Key::new(); pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new(); pub const IMPL: Key<ast::Impl, ImplId> = Key::new(); + pub const EXTERN_BLOCK: Key<ast::ExternBlock, ExternBlockId> = Key::new(); pub const TRAIT: Key<ast::Trait, TraitId> = Key::new(); pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new(); pub const STRUCT: Key<ast::Struct, StructId> = Key::new(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs index de439249306..5ff6a7ffe56 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs @@ -1,18 +1,19 @@ -//! Defines `Body`: a lowered representation of bodies of functions, statics and +//! Defines `ExpressionStore`: a lowered representation of functions, statics and //! consts. +mod body; mod lower; mod pretty; pub mod scope; + #[cfg(test)] mod tests; use std::ops::{Deref, Index}; -use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{name::Name, ExpandError, InFile}; -use la_arena::{Arena, ArenaMap, Idx, RawIdx}; +use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use span::{Edition, MacroFileId, SyntaxContextData}; @@ -22,19 +23,18 @@ use tt::TextRange; use crate::{ db::DefDatabase, - expander::Expander, hir::{ - dummy_expr_id, Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, - LabelId, Pat, PatId, RecordFieldPat, Statement, + Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat, + PatId, RecordFieldPat, Statement, }, - item_tree::AttrOwner, nameres::DefMap, path::{ModPath, Path}, - src::HasSource, type_ref::{TypeRef, TypeRefId, TypesMap, TypesSourceMap}, - BlockId, DefWithBodyId, HasModule, Lookup, SyntheticSyntax, + BlockId, DefWithBodyId, Lookup, SyntheticSyntax, }; +pub use self::body::{Body, BodySourceMap}; + /// A wrapper around [`span::SyntaxContextId`] that is intended only for comparisons. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct HygieneId(span::SyntaxContextId); @@ -58,9 +58,29 @@ impl HygieneId { } } -/// The body of an item (function, const etc.). +pub type ExprPtr = AstPtr<ast::Expr>; +pub type ExprSource = InFile<ExprPtr>; + +pub type PatPtr = AstPtr<ast::Pat>; +pub type PatSource = InFile<PatPtr>; + +pub type LabelPtr = AstPtr<ast::Label>; +pub type LabelSource = InFile<LabelPtr>; + +pub type FieldPtr = AstPtr<ast::RecordExprField>; +pub type FieldSource = InFile<FieldPtr>; + +pub type PatFieldPtr = AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>; +pub type PatFieldSource = InFile<PatFieldPtr>; + +pub type ExprOrPatPtr = AstPtr<Either<ast::Expr, ast::Pat>>; +pub type ExprOrPatSource = InFile<ExprOrPatPtr>; + +pub type SelfParamPtr = AstPtr<ast::SelfParam>; +pub type MacroCallPtr = AstPtr<ast::MacroCall>; + #[derive(Debug, Eq, PartialEq)] -pub struct Body { +pub struct ExpressionStore { pub exprs: Arena<Expr>, pub pats: Arena<Pat>, pub bindings: Arena<Binding>, @@ -68,19 +88,9 @@ pub struct Body { /// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the /// top level expression, it will not be listed in here. pub binding_owners: FxHashMap<BindingId, ExprId>, - /// The patterns for the function's parameters. While the parameter types are - /// part of the function signature, the patterns are not (they don't change - /// the external type of the function). - /// - /// If this `Body` is for the body of a constant, this will just be - /// empty. - pub params: Box<[PatId]>, - pub self_param: Option<BindingId>, - /// The `ExprId` of the actual body expression. - pub body_expr: ExprId, pub types: TypesMap, - /// Block expressions in this body that may contain inner items. - block_scopes: Vec<BlockId>, + /// Block expressions in this store that may contain inner items. + block_scopes: Box<[BlockId]>, /// A map from binding to its hygiene ID. /// @@ -92,56 +102,24 @@ pub struct Body { binding_hygiene: FxHashMap<BindingId, HygieneId>, /// A map from an variable usages to their hygiene ID. /// - /// Expressions that can be recorded here are single segment path, although not all single segments path refer + /// Expressions (and destructuing patterns) that can be recorded here are single segment path, although not all single segments path refer /// to variables and have hygiene (some refer to items, we don't know at this stage). - expr_hygiene: FxHashMap<ExprId, HygieneId>, - /// A map from a destructuring assignment possible variable usages to their hygiene ID. - pat_hygiene: FxHashMap<PatId, HygieneId>, + ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>, } -pub type ExprPtr = AstPtr<ast::Expr>; -pub type ExprSource = InFile<ExprPtr>; - -pub type PatPtr = AstPtr<ast::Pat>; -pub type PatSource = InFile<PatPtr>; - -pub type LabelPtr = AstPtr<ast::Label>; -pub type LabelSource = InFile<LabelPtr>; - -pub type FieldPtr = AstPtr<ast::RecordExprField>; -pub type FieldSource = InFile<FieldPtr>; - -pub type PatFieldPtr = AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>; -pub type PatFieldSource = InFile<PatFieldPtr>; - -pub type ExprOrPatPtr = AstPtr<Either<ast::Expr, ast::Pat>>; -pub type ExprOrPatSource = InFile<ExprOrPatPtr>; - -/// An item body together with the mapping from syntax nodes to HIR expression -/// IDs. This is needed to go from e.g. a position in a file to the HIR -/// expression containing it; but for type inference etc., we want to operate on -/// a structure that is agnostic to the actual positions of expressions in the -/// file, so that we don't recompute types whenever some whitespace is typed. -/// -/// One complication here is that, due to macro expansion, a single `Body` might -/// be spread across several files. So, for each ExprId and PatId, we record -/// both the HirFileId and the position inside the file. However, we only store -/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle -/// this properly for macros. -#[derive(Default, Debug, Eq, PartialEq)] -pub struct BodySourceMap { +#[derive(Debug, Eq, PartialEq, Default)] +pub struct ExpressionStoreSourceMap { // AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map // to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected). expr_map: FxHashMap<ExprSource, ExprOrPatId>, - expr_map_back: ArenaMap<ExprId, ExprSource>, + expr_map_back: ArenaMap<ExprId, ExprOrPatSource>, - pat_map: FxHashMap<PatSource, PatId>, + pat_map: FxHashMap<PatSource, ExprOrPatId>, pat_map_back: ArenaMap<PatId, ExprOrPatSource>, label_map: FxHashMap<LabelSource, LabelId>, label_map_back: ArenaMap<LabelId, LabelSource>, - self_param: Option<InFile<AstPtr<ast::SelfParam>>>, binding_definitions: FxHashMap<BindingId, SmallVec<[PatId; 4]>>, /// We don't create explicit nodes for record fields (`S { record_field: 92 }`). @@ -153,11 +131,25 @@ pub struct BodySourceMap { template_map: Option<Box<FormatTemplate>>, - expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, MacroFileId>, + expansions: FxHashMap<InFile<MacroCallPtr>, MacroFileId>, - /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in + /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in /// the source map (since they're just as volatile). - diagnostics: Vec<BodyDiagnostic>, + diagnostics: Vec<ExpressionStoreDiagnostics>, +} + +/// The body of an item (function, const etc.). +#[derive(Debug, Eq, PartialEq, Default)] +pub struct ExpressionStoreBuilder { + pub exprs: Arena<Expr>, + pub pats: Arena<Pat>, + pub bindings: Arena<Binding>, + pub labels: Arena<Label>, + pub binding_owners: FxHashMap<BindingId, ExprId>, + pub types: TypesMap, + block_scopes: Vec<BlockId>, + binding_hygiene: FxHashMap<BindingId, HygieneId>, + ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>, } #[derive(Default, Debug, Eq, PartialEq)] @@ -171,166 +163,62 @@ struct FormatTemplate { /// The value stored for each capture is its template literal and offset inside it. The template literal /// is from the `format_args[_nl]!()` macro and so needs to be mapped up once to go to the user-written /// template. - implicit_capture_to_source: FxHashMap<ExprId, InFile<(AstPtr<ast::Expr>, TextRange)>>, + implicit_capture_to_source: FxHashMap<ExprId, InFile<(ExprPtr, TextRange)>>, } #[derive(Debug, Eq, PartialEq)] -pub enum BodyDiagnostic { +pub enum ExpressionStoreDiagnostics { InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, - MacroError { node: InFile<AstPtr<ast::MacroCall>>, err: ExpandError }, - UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath }, + MacroError { node: InFile<MacroCallPtr>, err: ExpandError }, + UnresolvedMacroCall { node: InFile<MacroCallPtr>, path: ModPath }, UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String }, UndeclaredLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, } -impl Body { - pub(crate) fn body_with_source_map_query( - db: &dyn DefDatabase, - def: DefWithBodyId, - ) -> (Arc<Body>, Arc<BodySourceMap>) { - let _p = tracing::info_span!("body_with_source_map_query").entered(); - let mut params = None; - - let mut is_async_fn = false; - let InFile { file_id, value: body } = { - match def { - DefWithBodyId::FunctionId(f) => { - let data = db.function_data(f); - let f = f.lookup(db); - let src = f.source(db); - params = src.value.param_list().map(move |param_list| { - let item_tree = f.id.item_tree(db); - let func = &item_tree[f.id.value]; - let krate = f.container.module(db).krate; - let crate_graph = db.crate_graph(); - ( - param_list, - (0..func.params.len()).map(move |idx| { - item_tree - .attrs( - db, - krate, - AttrOwner::Param( - f.id.value, - Idx::from_raw(RawIdx::from(idx as u32)), - ), - ) - .is_cfg_enabled(&crate_graph[krate].cfg_options) - }), - ) - }); - is_async_fn = data.is_async(); - src.map(|it| it.body().map(ast::Expr::from)) - } - DefWithBodyId::ConstId(c) => { - let c = c.lookup(db); - let src = c.source(db); - src.map(|it| it.body()) - } - DefWithBodyId::StaticId(s) => { - let s = s.lookup(db); - let src = s.source(db); - src.map(|it| it.body()) - } - DefWithBodyId::VariantId(v) => { - let s = v.lookup(db); - let src = s.source(db); - src.map(|it| it.expr()) - } - DefWithBodyId::InTypeConstId(c) => c.lookup(db).id.map(|_| c.source(db).expr()), - } - }; - let module = def.module(db); - let expander = Expander::new(db, file_id, module); - let (mut body, mut source_map) = - Body::new(db, def, expander, params, body, module.krate, is_async_fn); - body.shrink_to_fit(); - source_map.shrink_to_fit(); - - (Arc::new(body), Arc::new(source_map)) - } - - pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<Body> { - db.body_with_source_map(def).0 - } - - /// Returns an iterator over all block expressions in this body that define inner items. - pub fn blocks<'a>( - &'a self, - db: &'a dyn DefDatabase, - ) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + 'a { - self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block))) - } - - pub fn pretty_print( - &self, - db: &dyn DefDatabase, - owner: DefWithBodyId, - edition: Edition, - ) -> String { - pretty::print_body_hir(db, self, owner, edition) - } - - pub fn pretty_print_expr( - &self, - db: &dyn DefDatabase, - owner: DefWithBodyId, - expr: ExprId, - edition: Edition, - ) -> String { - pretty::print_expr_hir(db, self, owner, expr, edition) - } - - pub fn pretty_print_pat( - &self, - db: &dyn DefDatabase, - owner: DefWithBodyId, - pat: PatId, - oneline: bool, - edition: Edition, - ) -> String { - pretty::print_pat_hir(db, self, owner, pat, oneline, edition) - } - - fn new( - db: &dyn DefDatabase, - owner: DefWithBodyId, - expander: Expander, - params: Option<(ast::ParamList, impl Iterator<Item = bool>)>, - body: Option<ast::Expr>, - krate: CrateId, - is_async_fn: bool, - ) -> (Body, BodySourceMap) { - lower::lower(db, owner, expander, params, body, krate, is_async_fn) - } - - fn shrink_to_fit(&mut self) { +impl ExpressionStoreBuilder { + fn finish(self) -> ExpressionStore { let Self { - body_expr: _, - params: _, - self_param: _, block_scopes, - exprs, - labels, - pats, - bindings, - binding_owners, - binding_hygiene, - expr_hygiene, - pat_hygiene, - types, + mut exprs, + mut labels, + mut pats, + mut bindings, + mut binding_owners, + mut binding_hygiene, + mut ident_hygiene, + mut types, } = self; - block_scopes.shrink_to_fit(); exprs.shrink_to_fit(); labels.shrink_to_fit(); pats.shrink_to_fit(); bindings.shrink_to_fit(); binding_owners.shrink_to_fit(); binding_hygiene.shrink_to_fit(); - expr_hygiene.shrink_to_fit(); - pat_hygiene.shrink_to_fit(); + ident_hygiene.shrink_to_fit(); types.shrink_to_fit(); + + ExpressionStore { + exprs, + pats, + bindings, + labels, + binding_owners, + types, + block_scopes: block_scopes.into_boxed_slice(), + binding_hygiene, + ident_hygiene, + } + } +} + +impl ExpressionStore { + /// Returns an iterator over all block expressions in this store that define inner items. + pub fn blocks<'a>( + &'a self, + db: &'a dyn DefDatabase, + ) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + 'a { + self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block))) } pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) { @@ -658,11 +546,11 @@ impl Body { } pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId { - self.expr_hygiene.get(&expr).copied().unwrap_or(HygieneId::ROOT) + self.ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT) } pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId { - self.pat_hygiene.get(&pat).copied().unwrap_or(HygieneId::ROOT) + self.ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT) } pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId { @@ -673,27 +561,7 @@ impl Body { } } -impl Default for Body { - fn default() -> Self { - Self { - body_expr: dummy_expr_id(), - exprs: Default::default(), - pats: Default::default(), - bindings: Default::default(), - labels: Default::default(), - params: Default::default(), - block_scopes: Default::default(), - binding_owners: Default::default(), - self_param: Default::default(), - binding_hygiene: Default::default(), - expr_hygiene: Default::default(), - pat_hygiene: Default::default(), - types: Default::default(), - } - } -} - -impl Index<ExprId> for Body { +impl Index<ExprId> for ExpressionStore { type Output = Expr; fn index(&self, expr: ExprId) -> &Expr { @@ -701,7 +569,7 @@ impl Index<ExprId> for Body { } } -impl Index<PatId> for Body { +impl Index<PatId> for ExpressionStore { type Output = Pat; fn index(&self, pat: PatId) -> &Pat { @@ -709,7 +577,7 @@ impl Index<PatId> for Body { } } -impl Index<LabelId> for Body { +impl Index<LabelId> for ExpressionStore { type Output = Label; fn index(&self, label: LabelId) -> &Label { @@ -717,7 +585,7 @@ impl Index<LabelId> for Body { } } -impl Index<BindingId> for Body { +impl Index<BindingId> for ExpressionStore { type Output = Binding; fn index(&self, b: BindingId) -> &Binding { @@ -725,7 +593,7 @@ impl Index<BindingId> for Body { } } -impl Index<TypeRefId> for Body { +impl Index<TypeRefId> for ExpressionStore { type Output = TypeRef; fn index(&self, b: TypeRefId) -> &TypeRef { @@ -735,15 +603,15 @@ impl Index<TypeRefId> for Body { // FIXME: Change `node_` prefix to something more reasonable. // Perhaps `expr_syntax` and `expr_id`? -impl BodySourceMap { +impl ExpressionStoreSourceMap { pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> { match id { - ExprOrPatId::ExprId(id) => self.expr_syntax(id).map(|it| it.map(AstPtr::wrap_left)), + ExprOrPatId::ExprId(id) => self.expr_syntax(id), ExprOrPatId::PatId(id) => self.pat_syntax(id), } } - pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> { + pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> { self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax) } @@ -757,9 +625,7 @@ impl BodySourceMap { self.expansions.get(&src).cloned() } - pub fn macro_calls( - &self, - ) -> impl Iterator<Item = (InFile<AstPtr<ast::MacroCall>>, MacroFileId)> + '_ { + pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroFileId)> + '_ { self.expansions.iter().map(|(&a, &b)| (a, b)) } @@ -767,11 +633,7 @@ impl BodySourceMap { self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax) } - pub fn self_param_syntax(&self) -> Option<InFile<AstPtr<ast::SelfParam>>> { - self.self_param - } - - pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> { + pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> { self.pat_map.get(&node.map(AstPtr::new)).cloned() } @@ -801,9 +663,7 @@ impl BodySourceMap { self.expr_map.get(&src).copied() } - pub fn expansions( - &self, - ) -> impl Iterator<Item = (&InFile<AstPtr<ast::MacroCall>>, &MacroFileId)> { + pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroFileId)> { self.expansions.iter() } @@ -823,7 +683,7 @@ impl BodySourceMap { pub fn format_args_implicit_capture( &self, capture_expr: ExprId, - ) -> Option<InFile<(AstPtr<ast::Expr>, TextRange)>> { + ) -> Option<InFile<(ExprPtr, TextRange)>> { self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied() } @@ -837,14 +697,13 @@ impl BodySourceMap { .zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref)) } - /// Get a reference to the body source map's diagnostics. - pub fn diagnostics(&self) -> &[BodyDiagnostic] { + /// Get a reference to the source map's diagnostics. + pub fn diagnostics(&self) -> &[ExpressionStoreDiagnostics] { &self.diagnostics } fn shrink_to_fit(&mut self) { let Self { - self_param: _, expr_map, expr_map_back, pat_map, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs new file mode 100644 index 00000000000..a55fec4f8b1 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs @@ -0,0 +1,175 @@ +//! Defines `Body`: a lowered representation of functions, statics and +//! consts. +use std::ops; + +use hir_expand::{InFile, Lookup}; +use la_arena::{Idx, RawIdx}; +use span::Edition; +use syntax::ast; +use triomphe::Arc; + +use crate::{ + db::DefDatabase, + expander::Expander, + expr_store::{lower, pretty, ExpressionStore, ExpressionStoreSourceMap, SelfParamPtr}, + hir::{BindingId, ExprId, PatId}, + item_tree::AttrOwner, + src::HasSource, + DefWithBodyId, HasModule, +}; + +/// The body of an item (function, const etc.). +#[derive(Debug, Eq, PartialEq)] +pub struct Body { + pub store: ExpressionStore, + /// The patterns for the function's parameters. While the parameter types are + /// part of the function signature, the patterns are not (they don't change + /// the external type of the function). + /// + /// If this `Body` is for the body of a constant, this will just be + /// empty. + pub params: Box<[PatId]>, + pub self_param: Option<BindingId>, + /// The `ExprId` of the actual body expression. + pub body_expr: ExprId, +} + +impl ops::Deref for Body { + type Target = ExpressionStore; + + fn deref(&self) -> &Self::Target { + &self.store + } +} + +/// An item body together with the mapping from syntax nodes to HIR expression +/// IDs. This is needed to go from e.g. a position in a file to the HIR +/// expression containing it; but for type inference etc., we want to operate on +/// a structure that is agnostic to the actual positions of expressions in the +/// file, so that we don't recompute types whenever some whitespace is typed. +/// +/// One complication here is that, due to macro expansion, a single `Body` might +/// be spread across several files. So, for each ExprId and PatId, we record +/// both the HirFileId and the position inside the file. However, we only store +/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle +/// this properly for macros. +#[derive(Default, Debug, Eq, PartialEq)] +pub struct BodySourceMap { + pub self_param: Option<InFile<SelfParamPtr>>, + pub store: ExpressionStoreSourceMap, +} + +impl ops::Deref for BodySourceMap { + type Target = ExpressionStoreSourceMap; + + fn deref(&self) -> &Self::Target { + &self.store + } +} + +impl Body { + pub(crate) fn body_with_source_map_query( + db: &dyn DefDatabase, + def: DefWithBodyId, + ) -> (Arc<Body>, Arc<BodySourceMap>) { + let _p = tracing::info_span!("body_with_source_map_query").entered(); + let mut params = None; + + let mut is_async_fn = false; + let InFile { file_id, value: body } = { + match def { + DefWithBodyId::FunctionId(f) => { + let data = db.function_data(f); + let f = f.lookup(db); + let src = f.source(db); + params = src.value.param_list().map(move |param_list| { + let item_tree = f.id.item_tree(db); + let func = &item_tree[f.id.value]; + let krate = f.container.module(db).krate; + let crate_graph = db.crate_graph(); + ( + param_list, + (0..func.params.len()).map(move |idx| { + item_tree + .attrs( + db, + krate, + AttrOwner::Param( + f.id.value, + Idx::from_raw(RawIdx::from(idx as u32)), + ), + ) + .is_cfg_enabled(&crate_graph[krate].cfg_options) + }), + ) + }); + is_async_fn = data.is_async(); + src.map(|it| it.body().map(ast::Expr::from)) + } + DefWithBodyId::ConstId(c) => { + let c = c.lookup(db); + let src = c.source(db); + src.map(|it| it.body()) + } + DefWithBodyId::StaticId(s) => { + let s = s.lookup(db); + let src = s.source(db); + src.map(|it| it.body()) + } + DefWithBodyId::VariantId(v) => { + let s = v.lookup(db); + let src = s.source(db); + src.map(|it| it.expr()) + } + DefWithBodyId::InTypeConstId(c) => c.lookup(db).id.map(|_| c.source(db).expr()), + } + }; + let module = def.module(db); + let expander = Expander::new(db, file_id, module); + let (body, mut source_map) = + lower::lower_body(db, def, expander, params, body, module.krate, is_async_fn); + source_map.store.shrink_to_fit(); + + (Arc::new(body), Arc::new(source_map)) + } + + pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<Body> { + db.body_with_source_map(def).0 + } + + pub fn pretty_print( + &self, + db: &dyn DefDatabase, + owner: DefWithBodyId, + edition: Edition, + ) -> String { + pretty::print_body_hir(db, self, owner, edition) + } + + pub fn pretty_print_expr( + &self, + db: &dyn DefDatabase, + owner: DefWithBodyId, + expr: ExprId, + edition: Edition, + ) -> String { + pretty::print_expr_hir(db, self, owner, expr, edition) + } + + pub fn pretty_print_pat( + &self, + db: &dyn DefDatabase, + owner: DefWithBodyId, + pat: PatId, + oneline: bool, + edition: Edition, + ) -> String { + pretty::print_pat_hir(db, self, owner, pat, oneline, edition) + } +} + +impl BodySourceMap { + pub fn self_param_syntax(&self) -> Option<InFile<SelfParamPtr>> { + self.self_param + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index 16c7b5ca00a..6e505a6b112 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -29,11 +29,14 @@ use triomphe::Arc; use crate::{ attr::Attrs, - body::{Body, BodyDiagnostic, BodySourceMap, ExprPtr, HygieneId, LabelPtr, PatPtr}, builtin_type::BuiltinUint, data::adt::StructKind, db::DefDatabase, expander::Expander, + expr_store::{ + Body, BodySourceMap, ExprPtr, ExpressionStore, ExpressionStoreBuilder, + ExpressionStoreDiagnostics, ExpressionStoreSourceMap, HygieneId, LabelPtr, PatPtr, + }, hir::{ format_args::{ self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind, @@ -41,8 +44,8 @@ use crate::{ FormatPlaceholder, FormatSign, FormatTrait, }, Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind, - Expr, ExprId, Item, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability, - OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Statement, + Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId, + RecordFieldPat, RecordLitField, Statement, }, item_scope::BuiltinShadowMode, lang_item::LangItem, @@ -55,11 +58,11 @@ use crate::{ type FxIndexSet<K> = indexmap::IndexSet<K, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>; -pub(super) fn lower( +pub(super) fn lower_body( db: &dyn DefDatabase, owner: DefWithBodyId, expander: Expander, - params: Option<(ast::ParamList, impl Iterator<Item = bool>)>, + parameters: Option<(ast::ParamList, impl Iterator<Item = bool>)>, body: Option<ast::Expr>, krate: CrateId, is_async_fn: bool, @@ -75,35 +78,146 @@ pub(super) fn lower( }; Arc::clone(span_map) }); - ExprCollector { - db, - owner, - krate, - def_map: expander.module.def_map(db), - source_map: BodySourceMap::default(), - ast_id_map: db.ast_id_map(expander.current_file_id()), - body: Body::default(), - expander, - current_try_block_label: None, - is_lowering_coroutine: false, - label_ribs: Vec::new(), - current_binding_owner: None, - awaitable_context: None, - current_span_map: span_map, - current_block_legacy_macro_defs_count: FxHashMap::default(), - } - .collect(params, body, is_async_fn) + + let mut self_param = None; + let mut source_map_self_param = None; + let mut params = vec![]; + let mut collector = ExprCollector::new(db, owner, expander, krate, span_map); + + let skip_body = match owner { + DefWithBodyId::FunctionId(it) => db.attrs(it.into()), + DefWithBodyId::StaticId(it) => db.attrs(it.into()), + DefWithBodyId::ConstId(it) => db.attrs(it.into()), + DefWithBodyId::InTypeConstId(_) => Attrs::EMPTY, + DefWithBodyId::VariantId(it) => db.attrs(it.into()), + } + .rust_analyzer_tool() + .any(|attr| *attr.path() == tool_path![skip]); + // If #[rust_analyzer::skip] annotated, only construct enough information for the signature + // and skip the body. + if skip_body { + if let Some((param_list, mut attr_enabled)) = parameters { + if let Some(self_param_syn) = + param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false)) + { + let is_mutable = + self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none(); + let binding_id: la_arena::Idx<Binding> = collector.alloc_binding( + Name::new_symbol_root(sym::self_.clone()), + BindingAnnotation::new(is_mutable, false), + ); + self_param = Some(binding_id); + source_map_self_param = + Some(collector.expander.in_file(AstPtr::new(&self_param_syn))); + } + params = param_list + .params() + .zip(attr_enabled) + .filter(|(_, enabled)| *enabled) + .map(|_| collector.missing_pat()) + .collect(); + }; + let body_expr = collector.missing_expr(); + return ( + Body { + store: collector.store.finish(), + params: params.into_boxed_slice(), + self_param, + body_expr, + }, + BodySourceMap { self_param: source_map_self_param, store: collector.source_map }, + ); + } + + if let Some((param_list, mut attr_enabled)) = parameters { + if let Some(self_param_syn) = + param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false)) + { + let is_mutable = + self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none(); + let binding_id: la_arena::Idx<Binding> = collector.alloc_binding( + Name::new_symbol_root(sym::self_.clone()), + BindingAnnotation::new(is_mutable, false), + ); + let hygiene = self_param_syn + .name() + .map(|name| collector.hygiene_id_for(name.syntax().text_range().start())) + .unwrap_or(HygieneId::ROOT); + if !hygiene.is_root() { + collector.store.binding_hygiene.insert(binding_id, hygiene); + } + self_param = Some(binding_id); + source_map_self_param = Some(collector.expander.in_file(AstPtr::new(&self_param_syn))); + } + + for (param, _) in param_list.params().zip(attr_enabled).filter(|(_, enabled)| *enabled) { + let param_pat = collector.collect_pat_top(param.pat()); + params.push(param_pat); + } + }; + + let body_expr = collector.collect( + body, + if is_async_fn { + Awaitable::Yes + } else { + match owner { + DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"), + DefWithBodyId::StaticId(..) => Awaitable::No("static"), + DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => { + Awaitable::No("constant") + } + DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"), + } + }, + ); + + ( + Body { + store: collector.store.finish(), + params: params.into_boxed_slice(), + self_param, + body_expr, + }, + BodySourceMap { self_param: source_map_self_param, store: collector.source_map }, + ) +} + +#[allow(dead_code)] +pub(super) fn lower( + db: &dyn DefDatabase, + owner: ExprStoreOwnerId, + expander: Expander, + body: Option<ast::Expr>, + krate: CrateId, +) -> (ExpressionStore, ExpressionStoreSourceMap) { + // We cannot leave the root span map empty and let any identifier from it be treated as root, + // because when inside nested macros `SyntaxContextId`s from the outer macro will be interleaved + // with the inner macro, and that will cause confusion because they won't be the same as `ROOT` + // even though they should be the same. Also, when the body comes from multiple expansions, their + // hygiene is different. + let span_map = expander.current_file_id().macro_file().map(|_| { + let SpanMap::ExpansionSpanMap(span_map) = expander.span_map(db) else { + panic!("in a macro file there should be `ExpansionSpanMap`"); + }; + Arc::clone(span_map) + }); + let mut expr_collector = ExprCollector::new(db, owner, expander, krate, span_map); + expr_collector.collect(body, Awaitable::No("?")); + (expr_collector.store.finish(), expr_collector.source_map) } +type ExprStoreOwnerId = DefWithBodyId; + struct ExprCollector<'a> { db: &'a dyn DefDatabase, expander: Expander, - owner: DefWithBodyId, + owner: ExprStoreOwnerId, def_map: Arc<DefMap>, ast_id_map: Arc<AstIdMap>, krate: CrateId, - body: Body, - source_map: BodySourceMap, + store: ExpressionStoreBuilder, + source_map: ExpressionStoreSourceMap, is_lowering_coroutine: bool, @@ -157,6 +271,7 @@ impl RibKind { } } +#[derive(PartialEq, Eq, Debug, Copy, Clone)] enum Awaitable { Yes, No(&'static str), @@ -180,12 +295,12 @@ impl BindingList { let id = *self.map.entry((name, hygiene)).or_insert_with_key(|(name, _)| { let id = ec.alloc_binding(name.clone(), mode); if !hygiene.is_root() { - ec.body.binding_hygiene.insert(id, hygiene); + ec.store.binding_hygiene.insert(id, hygiene); } id }); - if ec.body.bindings[id].mode != mode { - ec.body.bindings[id].problems = Some(BindingProblems::BoundInconsistently); + if ec.store.bindings[id].mode != mode { + ec.store.bindings[id].problems = Some(BindingProblems::BoundInconsistently); } self.check_is_used(ec, id); id @@ -195,11 +310,11 @@ impl BindingList { match self.is_used.get(&id) { None => { if self.reject_new { - ec.body.bindings[id].problems = Some(BindingProblems::NotBoundAcrossAll); + ec.store.bindings[id].problems = Some(BindingProblems::NotBoundAcrossAll); } } Some(true) => { - ec.body.bindings[id].problems = Some(BindingProblems::BoundMoreThanOnce); + ec.store.bindings[id].problems = Some(BindingProblems::BoundMoreThanOnce); } Some(false) => {} } @@ -208,93 +323,37 @@ impl BindingList { } impl ExprCollector<'_> { - fn collect( - mut self, - param_list: Option<(ast::ParamList, impl Iterator<Item = bool>)>, - body: Option<ast::Expr>, - is_async_fn: bool, - ) -> (Body, BodySourceMap) { - let skip_body = match self.owner { - DefWithBodyId::FunctionId(it) => self.db.attrs(it.into()), - DefWithBodyId::StaticId(it) => self.db.attrs(it.into()), - DefWithBodyId::ConstId(it) => self.db.attrs(it.into()), - DefWithBodyId::InTypeConstId(_) => Attrs::EMPTY, - DefWithBodyId::VariantId(it) => self.db.attrs(it.into()), + fn new( + db: &dyn DefDatabase, + owner: ExprStoreOwnerId, + expander: Expander, + krate: CrateId, + span_map: Option<Arc<ExpansionSpanMap>>, + ) -> ExprCollector<'_> { + ExprCollector { + db, + owner, + krate, + def_map: expander.module.def_map(db), + source_map: ExpressionStoreSourceMap::default(), + ast_id_map: db.ast_id_map(expander.current_file_id()), + store: ExpressionStoreBuilder::default(), + expander, + current_try_block_label: None, + is_lowering_coroutine: false, + label_ribs: Vec::new(), + current_binding_owner: None, + awaitable_context: None, + current_span_map: span_map, + current_block_legacy_macro_defs_count: FxHashMap::default(), } - .rust_analyzer_tool() - .any(|attr| *attr.path() == tool_path![skip]); - // If #[rust_analyzer::skip] annotated, only construct enough information for the signature - // and skip the body. - if skip_body { - self.body.body_expr = self.missing_expr(); - if let Some((param_list, mut attr_enabled)) = param_list { - if let Some(self_param) = - param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false)) - { - let is_mutable = - self_param.mut_token().is_some() && self_param.amp_token().is_none(); - let binding_id: la_arena::Idx<Binding> = self.alloc_binding( - Name::new_symbol_root(sym::self_.clone()), - BindingAnnotation::new(is_mutable, false), - ); - self.body.self_param = Some(binding_id); - self.source_map.self_param = - Some(self.expander.in_file(AstPtr::new(&self_param))); - } - self.body.params = param_list - .params() - .zip(attr_enabled) - .filter(|(_, enabled)| *enabled) - .map(|_| self.missing_pat()) - .collect(); - }; - return (self.body, self.source_map); - } - - self.awaitable_context.replace(if is_async_fn { - Awaitable::Yes - } else { - match self.owner { - DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"), - DefWithBodyId::StaticId(..) => Awaitable::No("static"), - DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => { - Awaitable::No("constant") - } - DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"), - } - }); - if let Some((param_list, mut attr_enabled)) = param_list { - let mut params = vec![]; - if let Some(self_param) = - param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false)) - { - let is_mutable = - self_param.mut_token().is_some() && self_param.amp_token().is_none(); - let binding_id: la_arena::Idx<Binding> = self.alloc_binding( - Name::new_symbol_root(sym::self_.clone()), - BindingAnnotation::new(is_mutable, false), - ); - let hygiene = self_param - .name() - .map(|name| self.hygiene_id_for(name.syntax().text_range().start())) - .unwrap_or(HygieneId::ROOT); - if !hygiene.is_root() { - self.body.binding_hygiene.insert(binding_id, hygiene); - } - self.body.self_param = Some(binding_id); - self.source_map.self_param = Some(self.expander.in_file(AstPtr::new(&self_param))); - } + } - for (param, _) in param_list.params().zip(attr_enabled).filter(|(_, enabled)| *enabled) - { - let param_pat = self.collect_pat_top(param.pat()); - params.push(param_pat); - } - self.body.params = params.into_boxed_slice(); - }; - self.body.body_expr = self.with_label_rib(RibKind::Closure, |this| { - if is_async_fn { - match body { + fn collect(&mut self, expr: Option<ast::Expr>, awaitable: Awaitable) -> ExprId { + self.awaitable_context.replace(awaitable); + self.with_label_rib(RibKind::Closure, |this| { + if awaitable == Awaitable::Yes { + match expr { Some(e) => { let syntax_ptr = AstPtr::new(&e); let expr = this.collect_expr(e); @@ -306,15 +365,13 @@ impl ExprCollector<'_> { None => this.missing_expr(), } } else { - this.collect_expr_opt(body) + this.collect_expr_opt(expr) } - }); - - (self.body, self.source_map) + }) } fn ctx(&mut self) -> LowerCtx<'_> { - self.expander.ctx(self.db, &mut self.body.types, &mut self.source_map.types) + self.expander.ctx(self.db, &mut self.store.types, &mut self.source_map.types) } fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { @@ -390,7 +447,7 @@ impl ExprCollector<'_> { parent: this.owner, root: inner_expr, }); - this.body.exprs[result_expr_id] = Expr::Const(it); + this.store.exprs[result_expr_id] = Expr::Const(it); this.current_binding_owner = prev_binding_owner; result_expr_id }) @@ -480,7 +537,7 @@ impl ExprCollector<'_> { .unwrap_or((Expr::Missing, HygieneId::ROOT)); let expr_id = self.alloc_expr(path, syntax_ptr); if !hygiene.is_root() { - self.body.expr_hygiene.insert(expr_id, hygiene); + self.store.ident_hygiene.insert(expr_id.into(), hygiene); } expr_id } @@ -562,10 +619,12 @@ impl ExprCollector<'_> { ast::Expr::AwaitExpr(e) => { let expr = self.collect_expr_opt(e.expr()); if let Awaitable::No(location) = self.is_lowering_awaitable_block() { - self.source_map.diagnostics.push(BodyDiagnostic::AwaitOutsideOfAsync { - node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)), - location: location.to_string(), - }); + self.source_map.diagnostics.push( + ExpressionStoreDiagnostics::AwaitOutsideOfAsync { + node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)), + location: location.to_string(), + }, + ); } self.alloc_expr(Expr::Await { expr }, syntax_ptr) } @@ -646,7 +705,7 @@ impl ExprCollector<'_> { this.is_lowering_coroutine = prev_is_lowering_coroutine; this.current_binding_owner = prev_binding_owner; this.current_try_block_label = prev_try_block_label; - this.body.exprs[result_expr_id] = Expr::Closure { + this.store.exprs[result_expr_id] = Expr::Closure { args: args.into(), arg_types: arg_types.into(), ret_type, @@ -752,7 +811,7 @@ impl ExprCollector<'_> { } fn parse_path(&mut self, path: ast::Path) -> Option<Path> { - self.expander.parse_path(self.db, path, &mut self.body.types, &mut self.source_map.types) + self.expander.parse_path(self.db, path, &mut self.store.types, &mut self.source_map.types) } fn collect_expr_path(&mut self, e: ast::PathExpr) -> Option<(Path, HygieneId)> { @@ -781,7 +840,7 @@ impl ExprCollector<'_> { let src = self.expander.in_file(AstPtr::new(&expr).wrap_left()); let expr = self.collect_expr(expr); // Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`. - let id = self.body.pats.alloc(Pat::Expr(expr)); + let id = self.store.pats.alloc(Pat::Expr(expr)); self.source_map.pat_map_back.insert(id, src); id }) @@ -835,7 +894,7 @@ impl ExprCollector<'_> { .unwrap_or((Pat::Missing, HygieneId::ROOT)); let pat_id = self.alloc_pat_from_expr(path, syntax_ptr); if !hygiene.is_root() { - self.body.pat_hygiene.insert(pat_id, hygiene); + self.store.ident_hygiene.insert(pat_id.into(), hygiene); } pat_id } @@ -967,7 +1026,7 @@ impl ExprCollector<'_> { ) -> ExprId { let (id, prev_owner) = self.initialize_binding_owner(syntax_ptr); let tmp = job(self); - self.body.exprs[id] = mem::replace(&mut self.body.exprs[tmp], Expr::Missing); + self.store.exprs[id] = mem::replace(&mut self.store.exprs[tmp], Expr::Missing); self.current_binding_owner = prev_owner; id } @@ -979,8 +1038,9 @@ impl ExprCollector<'_> { let Some(try_from_output) = self.lang_path(LangItem::TryTraitFromOutput) else { return self.collect_block(e); }; - let label = self - .alloc_label_desugared(Label { name: Name::generate_new_name(self.body.labels.len()) }); + let label = self.alloc_label_desugared(Label { + name: Name::generate_new_name(self.store.labels.len()), + }); let old_label = self.current_try_block_label.replace(label); let ptr = AstPtr::new(&e).upcast(); @@ -1006,7 +1066,7 @@ impl ExprCollector<'_> { ) } }; - let Expr::Block { tail, .. } = &mut self.body.exprs[expr_id] else { + let Expr::Block { tail, .. } = &mut self.store.exprs[expr_id] else { unreachable!("block was lowered to non-block"); }; *tail = Some(next_tail); @@ -1112,7 +1172,7 @@ impl ExprCollector<'_> { this.collect_expr_opt(e.loop_body().map(|it| it.into())) }), }; - let iter_name = Name::generate_new_name(self.body.exprs.len()); + let iter_name = Name::generate_new_name(self.store.exprs.len()); let iter_expr = self.alloc_expr(Expr::Path(Path::from(iter_name.clone())), syntax_ptr); let iter_expr_mut = self.alloc_expr( Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut }, @@ -1177,7 +1237,7 @@ impl ExprCollector<'_> { let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr); let expr = self .alloc_expr(Expr::Call { callee: try_branch, args: Box::new([operand]) }, syntax_ptr); - let continue_name = Name::generate_new_name(self.body.bindings.len()); + let continue_name = Name::generate_new_name(self.store.bindings.len()); let continue_binding = self.alloc_binding(continue_name.clone(), BindingAnnotation::Unannotated); let continue_bpat = @@ -1192,7 +1252,7 @@ impl ExprCollector<'_> { guard: None, expr: self.alloc_expr(Expr::Path(Path::from(continue_name)), syntax_ptr), }; - let break_name = Name::generate_new_name(self.body.bindings.len()); + let break_name = Name::generate_new_name(self.store.bindings.len()); let break_binding = self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated); let break_bpat = self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None }); self.add_definition_to_binding(break_binding, break_bpat); @@ -1261,17 +1321,19 @@ impl ExprCollector<'_> { Ok(res) => res, Err(UnresolvedMacro { path }) => { if record_diagnostics { - self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall { - node: InFile::new(outer_file, syntax_ptr), - path, - }); + self.source_map.diagnostics.push( + ExpressionStoreDiagnostics::UnresolvedMacroCall { + node: InFile::new(outer_file, syntax_ptr), + path, + }, + ); } return collector(self, None); } }; if record_diagnostics { if let Some(err) = res.err { - self.source_map.diagnostics.push(BodyDiagnostic::MacroError { + self.source_map.diagnostics.push(ExpressionStoreDiagnostics::MacroError { node: InFile::new(outer_file, syntax_ptr), err, }); @@ -1464,7 +1526,7 @@ impl ExprCollector<'_> { let (module, def_map) = match block_id.map(|block_id| (self.db.block_def_map(block_id), block_id)) { Some((def_map, block_id)) => { - self.body.block_scopes.push(block_id); + self.store.block_scopes.push(block_id); (def_map.module_id(DefMap::ROOT), def_map) } None => (self.expander.module, self.def_map.clone()), @@ -1621,7 +1683,7 @@ impl ExprCollector<'_> { pats.push(self.collect_pat(rest, binding_list)); for (&id, &is_used) in binding_list.is_used.iter() { if !is_used { - self.body.bindings[id].problems = + self.store.bindings[id].problems = Some(BindingProblems::NotBoundAcrossAll); } } @@ -1722,23 +1784,33 @@ impl ExprCollector<'_> { self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { this.collect_pat_opt(expanded_pat, binding_list) }); - self.source_map.pat_map.insert(src, pat); + self.source_map.pat_map.insert(src, pat.into()); return pat; } None => Pat::Missing, }, - // FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference. ast::Pat::RangePat(p) => { - let mut range_part_lower = |p: Option<ast::Pat>| { - p.and_then(|it| match &it { - ast::Pat::LiteralPat(it) => { - Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0))) - } - pat @ (ast::Pat::IdentPat(_) | ast::Pat::PathPat(_)) => { - let subpat = self.collect_pat(pat.clone(), binding_list); - Some(Box::new(LiteralOrConst::Const(subpat))) + let mut range_part_lower = |p: Option<ast::Pat>| -> Option<ExprId> { + p.and_then(|it| { + let ptr = PatPtr::new(&it); + match &it { + ast::Pat::LiteralPat(it) => Some(self.alloc_expr_from_pat( + Expr::Literal(pat_literal_to_hir(it)?.0), + ptr, + )), + ast::Pat::IdentPat(ident) if ident.is_simple_ident() => ident + .name() + .map(|name| name.as_name()) + .map(Path::from) + .map(|path| self.alloc_expr_from_pat(Expr::Path(path), ptr)), + ast::Pat::PathPat(p) => p + .path() + .and_then(|path| self.parse_path(path)) + .map(|parsed| self.alloc_expr_from_pat(Expr::Path(parsed), ptr)), + // We only need to handle literal, ident (if bare) and path patterns here, + // as any other pattern as a range pattern operand is semantically invalid. + _ => None, } - _ => None, }) }; let start = range_part_lower(p.start()); @@ -1801,7 +1873,7 @@ impl ExprCollector<'_> { } }); if let Some(pat) = pat.left() { - self.source_map.pat_map.insert(src, pat); + self.source_map.pat_map.insert(src, pat.into()); } pat } @@ -1825,7 +1897,7 @@ impl ExprCollector<'_> { return Some(()); } - self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode { + self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode { node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())), cfg, opts: self.expander.cfg_options().clone(), @@ -1853,7 +1925,7 @@ impl ExprCollector<'_> { fn resolve_label( &self, lifetime: Option<ast::Lifetime>, - ) -> Result<Option<LabelId>, BodyDiagnostic> { + ) -> Result<Option<LabelId>, ExpressionStoreDiagnostics> { let Some(lifetime) = lifetime else { return Ok(None) }; let (mut hygiene_id, mut hygiene_info) = match &self.current_span_map { None => (HygieneId::ROOT, None), @@ -1877,7 +1949,7 @@ impl ExprCollector<'_> { return if self.is_label_valid_from_rib(rib_idx) { Ok(Some(*id)) } else { - Err(BodyDiagnostic::UnreachableLabel { + Err(ExpressionStoreDiagnostics::UnreachableLabel { name, node: self.expander.in_file(AstPtr::new(&lifetime)), }) @@ -1903,7 +1975,7 @@ impl ExprCollector<'_> { } } - Err(BodyDiagnostic::UndeclaredLabel { + Err(ExpressionStoreDiagnostics::UndeclaredLabel { name, node: self.expander.in_file(AstPtr::new(&lifetime)), }) @@ -1934,7 +2006,7 @@ impl ExprCollector<'_> { f: impl FnOnce(&mut Self) -> T, ) -> T { self.label_ribs.push(LabelRib::new(RibKind::Normal( - self.body[label].name.clone(), + self.store.labels[label].name.clone(), label, hygiene, ))); @@ -2023,7 +2095,7 @@ impl ExprCollector<'_> { ); } if !hygiene.is_root() { - self.body.expr_hygiene.insert(expr_id, hygiene); + self.store.ident_hygiene.insert(expr_id.into(), hygiene); } expr_id }, @@ -2171,17 +2243,27 @@ impl ExprCollector<'_> { let unsafe_arg_new = self.alloc_expr_desugared(Expr::Path(unsafe_arg_new)); let unsafe_arg_new = self.alloc_expr_desugared(Expr::Call { callee: unsafe_arg_new, args: Box::default() }); - let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe { + let mut unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe { id: None, - // We collect the unused expressions here so that we still infer them instead of - // dropping them out of the expression tree - statements: fmt - .orphans - .into_iter() - .map(|expr| Statement::Expr { expr, has_semi: true }) - .collect(), + statements: Box::new([]), tail: Some(unsafe_arg_new), }); + if !fmt.orphans.is_empty() { + unsafe_arg_new = self.alloc_expr_desugared(Expr::Block { + id: None, + // We collect the unused expressions here so that we still infer them instead of + // dropping them out of the expression tree. We cannot store them in the `Unsafe` + // block because then unsafe blocks within them will get a false "unused unsafe" + // diagnostic (rustc has a notion of builtin unsafe blocks, but we don't). + statements: fmt + .orphans + .into_iter() + .map(|expr| Statement::Expr { expr, has_semi: true }) + .collect(), + tail: Some(unsafe_arg_new), + label: None, + }); + } let idx = self.alloc_expr( Expr::Call { @@ -2417,20 +2499,20 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> impl ExprCollector<'_> { fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { let src = self.expander.in_file(ptr); - let id = self.body.exprs.alloc(expr); - self.source_map.expr_map_back.insert(id, src); + let id = self.store.exprs.alloc(expr); + self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left)); self.source_map.expr_map.insert(src, id.into()); id } // FIXME: desugared exprs don't have ptr, that's wrong and should be fixed. // Migrate to alloc_expr_desugared_with_ptr and then rename back fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { - self.body.exprs.alloc(expr) + self.store.exprs.alloc(expr) } fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { let src = self.expander.in_file(ptr); - let id = self.body.exprs.alloc(expr); - self.source_map.expr_map_back.insert(id, src); + let id = self.store.exprs.alloc(expr); + self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left)); // We intentionally don't fill this as it could overwrite a non-desugared entry // self.source_map.expr_map.insert(src, id); id @@ -2440,45 +2522,54 @@ impl ExprCollector<'_> { } fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId { - let binding = self.body.bindings.alloc(Binding { name, mode, problems: None }); + let binding = self.store.bindings.alloc(Binding { name, mode, problems: None }); if let Some(owner) = self.current_binding_owner { - self.body.binding_owners.insert(binding, owner); + self.store.binding_owners.insert(binding, owner); } binding } fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId { let src = self.expander.in_file(ptr); - let id = self.body.pats.alloc(pat); + let id = self.store.pats.alloc(pat); self.source_map.expr_map.insert(src, id.into()); self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left)); id } + + fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId { + let src = self.expander.in_file(ptr); + let id = self.store.exprs.alloc(expr); + self.source_map.pat_map.insert(src, id.into()); + self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right)); + id + } + fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { let src = self.expander.in_file(ptr); - let id = self.body.pats.alloc(pat); + let id = self.store.pats.alloc(pat); self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right)); - self.source_map.pat_map.insert(src, id); + self.source_map.pat_map.insert(src, id.into()); id } // FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow. fn alloc_pat_desugared(&mut self, pat: Pat) -> PatId { - self.body.pats.alloc(pat) + self.store.pats.alloc(pat) } fn missing_pat(&mut self) -> PatId { - self.body.pats.alloc(Pat::Missing) + self.store.pats.alloc(Pat::Missing) } fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId { let src = self.expander.in_file(ptr); - let id = self.body.labels.alloc(label); + let id = self.store.labels.alloc(label); self.source_map.label_map_back.insert(id, src); self.source_map.label_map.insert(src, id); id } // FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow. fn alloc_label_desugared(&mut self, label: Label) -> LabelId { - self.body.labels.alloc(label) + self.store.labels.alloc(label) } fn is_lowering_awaitable_block(&self) -> &Awaitable { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs index 994ba2aa069..032c18688ea 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs @@ -9,7 +9,7 @@ use syntax::{ use tt::TextRange; use crate::{ - body::lower::{ExprCollector, FxIndexSet}, + expr_store::lower::{ExprCollector, FxIndexSet}, hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmRegOrRegClass}, }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs index 52b91b522a4..82ad756dc2c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs @@ -6,10 +6,7 @@ use itertools::Itertools; use span::Edition; use crate::{ - hir::{ - Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability, - Statement, - }, + hir::{Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement}, pretty::{print_generic_args, print_path, print_type_ref}, }; @@ -60,7 +57,7 @@ pub(super) fn print_body_hir( let mut p = Printer { db, - body, + store: body, buf: header, indent_level: 0, line_format: LineFormat::Newline, @@ -103,14 +100,14 @@ pub(super) fn print_body_hir( pub(super) fn print_expr_hir( db: &dyn DefDatabase, - body: &Body, + store: &ExpressionStore, _owner: DefWithBodyId, expr: ExprId, edition: Edition, ) -> String { let mut p = Printer { db, - body, + store, buf: String::new(), indent_level: 0, line_format: LineFormat::Newline, @@ -122,7 +119,7 @@ pub(super) fn print_expr_hir( pub(super) fn print_pat_hir( db: &dyn DefDatabase, - body: &Body, + store: &ExpressionStore, _owner: DefWithBodyId, pat: PatId, oneline: bool, @@ -130,7 +127,7 @@ pub(super) fn print_pat_hir( ) -> String { let mut p = Printer { db, - body, + store, buf: String::new(), indent_level: 0, line_format: if oneline { LineFormat::Oneline } else { LineFormat::Newline }, @@ -157,7 +154,7 @@ macro_rules! wln { struct Printer<'a> { db: &'a dyn DefDatabase, - body: &'a Body, + store: &'a ExpressionStore, buf: String, indent_level: usize, line_format: LineFormat, @@ -233,7 +230,7 @@ impl Printer<'_> { } fn print_expr(&mut self, expr: ExprId) { - let expr = &self.body[expr]; + let expr = &self.store[expr]; match expr { Expr::Missing => w!(self, "�"), @@ -241,7 +238,7 @@ impl Printer<'_> { Expr::InlineAsm(_) => w!(self, "builtin#asm(_)"), Expr::OffsetOf(offset_of) => { w!(self, "builtin#offset_of("); - self.print_type_ref(offset_of.container, &self.body.types); + self.print_type_ref(offset_of.container, &self.store.types); let edition = self.edition; w!( self, @@ -271,7 +268,7 @@ impl Printer<'_> { } Expr::Loop { body, label } => { if let Some(lbl) = label { - w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast(), self.edition)); + w!(self, "{}: ", self.store[*lbl].name.display(self.db.upcast(), self.edition)); } w!(self, "loop "); self.print_expr(*body); @@ -295,7 +292,7 @@ impl Printer<'_> { if let Some(args) = generic_args { w!(self, "::<"); let edition = self.edition; - print_generic_args(self.db, args, &self.body.types, self, edition).unwrap(); + print_generic_args(self.db, args, &self.store.types, self, edition).unwrap(); w!(self, ">"); } w!(self, "("); @@ -330,13 +327,13 @@ impl Printer<'_> { Expr::Continue { label } => { w!(self, "continue"); if let Some(lbl) = label { - w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition)); + w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition)); } } Expr::Break { expr, label } => { w!(self, "break"); if let Some(lbl) = label { - w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition)); + w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition)); } if let Some(expr) = expr { self.whitespace(); @@ -404,7 +401,7 @@ impl Printer<'_> { Expr::Cast { expr, type_ref } => { self.print_expr(*expr); w!(self, " as "); - self.print_type_ref(*type_ref, &self.body.types); + self.print_type_ref(*type_ref, &self.store.types); } Expr::Ref { expr, rawness, mutability } => { w!(self, "&"); @@ -492,13 +489,13 @@ impl Printer<'_> { self.print_pat(*pat); if let Some(ty) = ty { w!(self, ": "); - self.print_type_ref(*ty, &self.body.types); + self.print_type_ref(*ty, &self.store.types); } } w!(self, "|"); if let Some(ret_ty) = ret_type { w!(self, " -> "); - self.print_type_ref(*ret_ty, &self.body.types); + self.print_type_ref(*ret_ty, &self.store.types); } self.whitespace(); self.print_expr(*body); @@ -534,7 +531,7 @@ impl Printer<'_> { Expr::Literal(lit) => self.print_literal(lit), Expr::Block { id: _, statements, tail, label } => { let label = label.map(|lbl| { - format!("{}: ", self.body[lbl].name.display(self.db.upcast(), self.edition)) + format!("{}: ", self.store[lbl].name.display(self.db.upcast(), self.edition)) }); self.print_block(label.as_deref(), statements, tail); } @@ -581,7 +578,7 @@ impl Printer<'_> { } fn print_pat(&mut self, pat: PatId) { - let pat = &self.body[pat]; + let pat = &self.store[pat]; match pat { Pat::Missing => w!(self, "�"), @@ -623,9 +620,9 @@ impl Printer<'_> { let field_name = arg.name.display(self.db.upcast(), edition).to_string(); let mut same_name = false; - if let Pat::Bind { id, subpat: None } = &self.body[arg.pat] { + if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] { if let Binding { name, mode: BindingAnnotation::Unannotated, .. } = - &self.body.bindings[*id] + &self.store.bindings[*id] { if name.as_str() == field_name { same_name = true; @@ -656,11 +653,11 @@ impl Printer<'_> { } Pat::Range { start, end } => { if let Some(start) = start { - self.print_literal_or_const(start); + self.print_expr(*start); } w!(self, "..="); if let Some(end) = end { - self.print_literal_or_const(end); + self.print_expr(*end); } } Pat::Slice { prefix, slice, suffix } => { @@ -734,7 +731,7 @@ impl Printer<'_> { self.print_pat(*pat); if let Some(ty) = type_ref { w!(self, ": "); - self.print_type_ref(*ty, &self.body.types); + self.print_type_ref(*ty, &self.store.types); } if let Some(init) = initializer { w!(self, " = "); @@ -757,13 +754,6 @@ impl Printer<'_> { } } - fn print_literal_or_const(&mut self, literal_or_const: &LiteralOrConst) { - match literal_or_const { - LiteralOrConst::Literal(l) => self.print_literal(l), - LiteralOrConst::Const(c) => self.print_pat(*c), - } - } - fn print_literal(&mut self, literal: &Literal) { match literal { Literal::String(it) => w!(self, "{:?}", it), @@ -799,11 +789,11 @@ impl Printer<'_> { fn print_path(&mut self, path: &Path) { let edition = self.edition; - print_path(self.db, path, &self.body.types, self, edition).unwrap(); + print_path(self.db, path, &self.store.types, self, edition).unwrap(); } fn print_binding(&mut self, id: BindingId) { - let Binding { name, mode, .. } = &self.body.bindings[id]; + let Binding { name, mode, .. } = &self.store.bindings[id]; let mode = match mode { BindingAnnotation::Unannotated => "", BindingAnnotation::Mutable => "mut ", diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs index 08af470b965..859a706177a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs @@ -4,8 +4,8 @@ use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx}; use triomphe::Arc; use crate::{ - body::{Body, HygieneId}, db::DefDatabase, + expr_store::{Body, ExpressionStore, HygieneId}, hir::{Binding, BindingId, Expr, ExprId, Item, LabelId, Pat, PatId, Statement}, BlockId, ConstBlockId, DefWithBodyId, }; @@ -53,7 +53,7 @@ pub struct ScopeData { impl ExprScopes { pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> { let body = db.body(def); - let mut scopes = ExprScopes::new(&body, |const_block| { + let mut scopes = ExprScopes::new_body(&body, |const_block| { db.lookup_intern_anonymous_const(const_block).root }); scopes.shrink_to_fit(); @@ -104,7 +104,7 @@ fn empty_entries(idx: usize) -> IdxRange<ScopeEntry> { } impl ExprScopes { - fn new( + fn new_body( body: &Body, resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy, ) -> ExprScopes { @@ -179,28 +179,28 @@ impl ExprScopes { fn add_bindings( &mut self, - body: &Body, + store: &ExpressionStore, scope: ScopeId, binding: BindingId, hygiene: HygieneId, ) { - let Binding { name, .. } = &body.bindings[binding]; + let Binding { name, .. } = &store.bindings[binding]; let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene }); self.scopes[scope].entries = IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry); } - fn add_pat_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) { - let pattern = &body[pat]; + fn add_pat_bindings(&mut self, store: &ExpressionStore, scope: ScopeId, pat: PatId) { + let pattern = &store[pat]; if let Pat::Bind { id, .. } = *pattern { - self.add_bindings(body, scope, id, body.binding_hygiene(id)); + self.add_bindings(store, scope, id, store.binding_hygiene(id)); } - pattern.walk_child_pats(|pat| self.add_pat_bindings(body, scope, pat)); + pattern.walk_child_pats(|pat| self.add_pat_bindings(store, scope, pat)); } - fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) { - params.iter().for_each(|pat| self.add_pat_bindings(body, scope, *pat)); + fn add_params_bindings(&mut self, store: &ExpressionStore, scope: ScopeId, params: &[PatId]) { + params.iter().for_each(|pat| self.add_pat_bindings(store, scope, *pat)); } fn set_scope(&mut self, node: ExprId, scope: ScopeId) { @@ -218,7 +218,7 @@ impl ExprScopes { fn compute_block_scopes( statements: &[Statement], tail: Option<ExprId>, - body: &Body, + store: &ExpressionStore, scopes: &mut ExprScopes, scope: &mut ScopeId, resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy, @@ -227,17 +227,17 @@ fn compute_block_scopes( match stmt { Statement::Let { pat, initializer, else_branch, .. } => { if let Some(expr) = initializer { - compute_expr_scopes(*expr, body, scopes, scope, resolve_const_block); + compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block); } if let Some(expr) = else_branch { - compute_expr_scopes(*expr, body, scopes, scope, resolve_const_block); + compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block); } *scope = scopes.new_scope(*scope); - scopes.add_pat_bindings(body, *scope, *pat); + scopes.add_pat_bindings(store, *scope, *pat); } Statement::Expr { expr, .. } => { - compute_expr_scopes(*expr, body, scopes, scope, resolve_const_block); + compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block); } Statement::Item(Item::MacroDef(macro_id)) => { *scope = scopes.new_macro_def_scope(*scope, macro_id.clone()); @@ -246,32 +246,32 @@ fn compute_block_scopes( } } if let Some(expr) = tail { - compute_expr_scopes(expr, body, scopes, scope, resolve_const_block); + compute_expr_scopes(expr, store, scopes, scope, resolve_const_block); } } fn compute_expr_scopes( expr: ExprId, - body: &Body, + store: &ExpressionStore, scopes: &mut ExprScopes, scope: &mut ScopeId, resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy, ) { let make_label = - |label: &Option<LabelId>| label.map(|label| (label, body.labels[label].name.clone())); + |label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone())); let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| { - compute_expr_scopes(expr, body, scopes, scope, resolve_const_block) + compute_expr_scopes(expr, store, scopes, scope, resolve_const_block) }; scopes.set_scope(expr, *scope); - match &body[expr] { + match &store[expr] { Expr::Block { statements, tail, id, label } => { let mut scope = scopes.new_block_scope(*scope, *id, make_label(label)); // Overwrite the old scope for the block expr, so that every block scope can be found // via the block itself (important for blocks that only contain items, no expressions). scopes.set_scope(expr, scope); - compute_block_scopes(statements, *tail, body, scopes, &mut scope, resolve_const_block); + compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block); } Expr::Const(id) => { let mut scope = scopes.root_scope(); @@ -282,7 +282,7 @@ fn compute_expr_scopes( // Overwrite the old scope for the block expr, so that every block scope can be found // via the block itself (important for blocks that only contain items, no expressions). scopes.set_scope(expr, scope); - compute_block_scopes(statements, *tail, body, scopes, &mut scope, resolve_const_block); + compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block); } Expr::Loop { body: body_expr, label } => { let mut scope = scopes.new_labeled_scope(*scope, make_label(label)); @@ -290,14 +290,14 @@ fn compute_expr_scopes( } Expr::Closure { args, body: body_expr, .. } => { let mut scope = scopes.new_scope(*scope); - scopes.add_params_bindings(body, scope, args); + scopes.add_params_bindings(store, scope, args); compute_expr_scopes(scopes, *body_expr, &mut scope); } Expr::Match { expr, arms } => { compute_expr_scopes(scopes, *expr, scope); for arm in arms.iter() { let mut scope = scopes.new_scope(*scope); - scopes.add_pat_bindings(body, scope, arm.pat); + scopes.add_pat_bindings(store, scope, arm.pat); if let Some(guard) = arm.guard { scope = scopes.new_scope(scope); compute_expr_scopes(scopes, guard, &mut scope); @@ -316,9 +316,9 @@ fn compute_expr_scopes( &Expr::Let { pat, expr } => { compute_expr_scopes(scopes, expr, scope); *scope = scopes.new_scope(*scope); - scopes.add_pat_bindings(body, *scope, pat); + scopes.add_pat_bindings(store, *scope, pat); } - _ => body.walk_child_exprs(expr, |e| compute_expr_scopes(scopes, e, scope)), + _ => store.walk_child_exprs(expr, |e| compute_expr_scopes(scopes, e, scope)), }; } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs index edc7c4c1f21..16bf46d3e3f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs @@ -1,10 +1,10 @@ mod block; +use crate::{hir::MatchArm, test_db::TestDB, ModuleDefId}; use expect_test::{expect, Expect}; +use la_arena::RawIdx; use test_fixture::WithFixture; -use crate::{test_db::TestDB, ModuleDefId}; - use super::*; fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) { @@ -459,3 +459,45 @@ async fn foo(a: (), b: i32) -> u32 { expect!["fn foo(�: (), �: i32) -> impl ::core::future::Future::<Output = u32> �"] .assert_eq(&printed); } + +#[test] +fn range_bounds_are_hir_exprs() { + let (_, body, _) = lower( + r#" +pub const L: i32 = 6; +mod x { + pub const R: i32 = 100; +} +const fn f(x: i32) -> i32 { + match x { + -1..=5 => x * 10, + L..=x::R => x * 100, + _ => x, + } +}"#, + ); + + let mtch_arms = body + .exprs + .iter() + .find_map(|(_, expr)| { + if let Expr::Match { arms, .. } = expr { + return Some(arms); + } + + None + }) + .unwrap(); + + let MatchArm { pat, .. } = mtch_arms[1]; + match body.pats[pat] { + Pat::Range { start, end } => { + let hir_start = &body.exprs[start.unwrap()]; + let hir_end = &body.exprs[end.unwrap()]; + + assert!(matches!(hir_start, Expr::Path { .. })); + assert!(matches!(hir_end, Expr::Path { .. })); + } + _ => {} + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs index e136dd18a55..e136dd18a55 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs index 7b3f1d06d21..e2b36da79b2 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs @@ -433,7 +433,7 @@ impl GenericParams { GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ConstId(_) => ( + GenericDefId::ConstId(_) | GenericDefId::StaticId(_) => ( Arc::new(GenericParams { type_or_consts: Default::default(), lifetimes: Default::default(), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs index 85963469430..494644d8eff 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs @@ -19,7 +19,7 @@ use std::fmt; use hir_expand::{name::Name, MacroDefId}; use intern::Symbol; -use la_arena::{Idx, RawIdx}; +use la_arena::Idx; use rustc_apfloat::ieee::{Half as f16, Quad as f128}; use syntax::ast; use type_ref::TypeRefId; @@ -37,13 +37,10 @@ pub type BindingId = Idx<Binding>; pub type ExprId = Idx<Expr>; -/// FIXME: this is a hacky function which should be removed -pub(crate) fn dummy_expr_id() -> ExprId { - ExprId::from_raw(RawIdx::from(u32::MAX)) -} - pub type PatId = Idx<Pat>; +// FIXME: Encode this as a single u32, we won't ever reach all 32 bits especially given these counts +// are local to the body. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub enum ExprOrPatId { ExprId(ExprId), @@ -58,12 +55,20 @@ impl ExprOrPatId { } } + pub fn is_expr(&self) -> bool { + matches!(self, Self::ExprId(_)) + } + pub fn as_pat(self) -> Option<PatId> { match self { Self::PatId(v) => Some(v), _ => None, } } + + pub fn is_pat(&self) -> bool { + matches!(self, Self::PatId(_)) + } } stdx::impl_from!(ExprId, PatId for ExprOrPatId); @@ -574,8 +579,8 @@ pub enum Pat { ellipsis: bool, }, Range { - start: Option<Box<LiteralOrConst>>, - end: Option<Box<LiteralOrConst>>, + start: Option<ExprId>, + end: Option<ExprId>, }, Slice { prefix: Box<[PatId]>, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index 34635997bdf..d43776b8a66 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -320,7 +320,7 @@ impl SearchMode { }; match m { Some((index, _)) => { - name = &name[index + 1..]; + name = name[index..].strip_prefix(|_: char| true).unwrap_or_default(); true } None => false, @@ -519,7 +519,7 @@ mod tests { crate_graph[krate] .display_name .as_ref() - .is_some_and(|it| &**it.crate_name() == crate_name) + .is_some_and(|it| it.crate_name().as_str() == crate_name) }) .expect("could not find crate"); @@ -1039,4 +1039,22 @@ pub mod fmt { "#]], ); } + + #[test] + fn unicode_fn_name() { + let ra_fixture = r#" + //- /main.rs crate:main deps:dep + //- /dep.rs crate:dep + pub fn あい() {} + "#; + + check_search( + ra_fixture, + "main", + Query::new("あ".to_owned()).fuzzy(), + expect![[r#" + dep::あい (f) + "#]], + ); + } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index 65a39c56561..0ca1eb9bcfe 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -18,8 +18,8 @@ use crate::{ db::DefDatabase, per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem}, visibility::{Visibility, VisibilityExplicitness}, - AdtId, BuiltinType, ConstId, ExternCrateId, FxIndexMap, HasModule, ImplId, LocalModuleId, - Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, + AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId, + LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, }; #[derive(Debug, Default)] @@ -158,6 +158,8 @@ pub struct ItemScope { declarations: Vec<ModuleDefId>, impls: Vec<ImplId>, + #[allow(clippy::box_collection)] + extern_blocks: Option<Box<Vec<ExternBlockId>>>, unnamed_consts: Vec<ConstId>, /// Traits imported via `use Trait as _;`. unnamed_trait_imports: FxHashMap<TraitId, Item<()>>, @@ -319,6 +321,10 @@ impl ItemScope { self.extern_crate_decls.iter().copied() } + pub fn extern_blocks(&self) -> impl Iterator<Item = ExternBlockId> + '_ { + self.extern_blocks.iter().flat_map(|it| it.iter()).copied() + } + pub fn use_decls(&self) -> impl ExactSizeIterator<Item = UseId> + '_ { self.use_decls.iter().copied() } @@ -469,6 +475,10 @@ impl ItemScope { self.impls.push(imp); } + pub(crate) fn define_extern_block(&mut self, extern_block: ExternBlockId) { + self.extern_blocks.get_or_insert_default().push(extern_block); + } + pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) { self.extern_crate_decls.push(extern_crate); } @@ -806,7 +816,11 @@ impl ItemScope { use_imports_types, use_imports_macros, macro_invocations, + extern_blocks, } = self; + if let Some(it) = extern_blocks { + it.shrink_to_fit(); + } types.shrink_to_fit(); values.shrink_to_fit(); macros.shrink_to_fit(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index b5bf2feb82a..8d5b3eeb28e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -937,7 +937,7 @@ pub struct Param { bitflags::bitflags! { #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] - pub(crate) struct FnFlags: u8 { + pub(crate) struct FnFlags: u16 { const HAS_SELF_PARAM = 1 << 0; const HAS_BODY = 1 << 1; const HAS_DEFAULT_KW = 1 << 2; @@ -946,6 +946,12 @@ bitflags::bitflags! { const HAS_UNSAFE_KW = 1 << 5; const IS_VARARGS = 1 << 6; const HAS_SAFE_KW = 1 << 7; + /// The `#[target_feature]` attribute is necessary to check safety (with RFC 2396), + /// but keeping it for all functions will consume a lot of memory when there are + /// only very few functions with it. So we only encode its existence here, and lookup + /// it if needed. + const HAS_TARGET_FEATURE = 1 << 8; + const DEPRECATED_SAFE_2024 = 1 << 9; } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 38733577d1c..59f51db9f74 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -502,4 +502,5 @@ language_item_table! { String, sym::String, string, Target::Struct, GenericRequirement::None; CStr, sym::CStr, c_str, Target::Struct, GenericRequirement::None; + Ordering, sym::Ordering, ordering, Target::Enum, GenericRequirement::None; } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index c78818c642c..9c947df35e9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -18,9 +18,15 @@ extern crate ra_ap_rustc_parse_format as rustc_parse_format; #[cfg(feature = "in-rust-tree")] extern crate rustc_abi; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_hashes; + #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_abi as rustc_abi; +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_hashes as rustc_hashes; + pub mod db; pub mod attr; @@ -42,7 +48,7 @@ pub mod lang_item; pub mod hir; pub use self::hir::type_ref; -pub mod body; +pub mod expr_store; pub mod resolver; pub mod nameres; @@ -693,6 +699,7 @@ impl TypeOwnerId { Some(match self { TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it), TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it), + TypeOwnerId::StaticId(it) => GenericDefId::StaticId(it), TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it), TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it), TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it), @@ -701,7 +708,7 @@ impl TypeOwnerId { TypeOwnerId::EnumVariantId(it) => { GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent)) } - TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None, + TypeOwnerId::InTypeConstId(_) => return None, }) } } @@ -743,6 +750,7 @@ impl From<GenericDefId> for TypeOwnerId { GenericDefId::TypeAliasId(it) => it.into(), GenericDefId::ImplId(it) => it.into(), GenericDefId::ConstId(it) => it.into(), + GenericDefId::StaticId(it) => it.into(), } } } @@ -851,7 +859,7 @@ impl GeneralConstId { pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> { match self { GeneralConstId::ConstId(it) => Some(it.into()), - GeneralConstId::StaticId(_) => None, + GeneralConstId::StaticId(it) => Some(it.into()), GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db), GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db), } @@ -897,7 +905,7 @@ impl DefWithBodyId { pub fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option<GenericDefId> { match self { DefWithBodyId::FunctionId(f) => Some(f.into()), - DefWithBodyId::StaticId(_) => None, + DefWithBodyId::StaticId(s) => Some(s.into()), DefWithBodyId::ConstId(c) => Some(c.into()), DefWithBodyId::VariantId(c) => Some(c.lookup(db).parent.into()), // FIXME: stable rust doesn't allow generics in constants, but we should @@ -922,23 +930,28 @@ impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId); #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] pub enum GenericDefId { - FunctionId(FunctionId), AdtId(AdtId), - TraitId(TraitId), - TraitAliasId(TraitAliasId), - TypeAliasId(TypeAliasId), - ImplId(ImplId), // consts can have type parameters from their parents (i.e. associated consts of traits) ConstId(ConstId), + FunctionId(FunctionId), + ImplId(ImplId), + // can't actually have generics currently, but they might in the future + // More importantly, this completes the set of items that contain type references + // which is to be used by the signature expression store in the future. + StaticId(StaticId), + TraitAliasId(TraitAliasId), + TraitId(TraitId), + TypeAliasId(TypeAliasId), } impl_from!( - FunctionId, AdtId(StructId, EnumId, UnionId), - TraitId, - TraitAliasId, - TypeAliasId, + ConstId, + FunctionId, ImplId, - ConstId + StaticId, + TraitAliasId, + TraitId, + TypeAliasId for GenericDefId ); @@ -969,6 +982,7 @@ impl GenericDefId { GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None), + GenericDefId::StaticId(it) => (it.lookup(db).id.file_id(), None), } } @@ -1350,6 +1364,7 @@ impl HasModule for GenericDefId { GenericDefId::TypeAliasId(it) => it.module(db), GenericDefId::ImplId(it) => it.module(db), GenericDefId::ConstId(it) => it.module(db), + GenericDefId::StaticId(it) => it.module(db), } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 70e3e1ed4e9..a43c0eb9d70 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -5,7 +5,7 @@ //! in-memory macros. use expect_test::expect; -use crate::macro_expansion_tests::check; +use crate::macro_expansion_tests::{check, check_errors}; #[test] fn attribute_macro_attr_censoring() { @@ -216,3 +216,21 @@ struct S; #[doc = "doc attr"] struct S;"##]], ); } + +#[test] +fn cfg_evaluated_before_attr_macros() { + check_errors( + r#" +//- proc_macros: disallow_cfg + +use proc_macros::disallow_cfg; + +#[disallow_cfg] #[cfg(false)] fn foo() {} +// True cfg are kept. +// #[disallow_cfg] #[cfg(true)] fn bar() {} +#[disallow_cfg] #[cfg_attr(false, inline)] fn baz() {} +#[disallow_cfg] #[cfg_attr(true, inline)] fn qux() {} + "#, + expect![[r#""#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 39d383f0159..3b6e3c5916e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -337,7 +337,7 @@ impl DefMap { pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc<DefMap> { let crate_graph = db.crate_graph(); let krate = &crate_graph[crate_id]; - let name = krate.display_name.as_deref().unwrap_or_default(); + let name = krate.display_name.as_deref().map(Symbol::as_str).unwrap_or_default(); let _p = tracing::info_span!("crate_def_map_query", ?name).entered(); let module_data = ModuleData::new( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 06276335b71..254c1379917 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -1759,16 +1759,20 @@ impl ModCollector<'_, '_> { ); } } - ModItem::ExternBlock(block) => self.collect( - &self.item_tree[block].children, - ItemContainerId::ExternBlockId( - ExternBlockLoc { - container: module, - id: ItemTreeId::new(self.tree_id, block), - } - .intern(db), - ), - ), + ModItem::ExternBlock(block) => { + let extern_block_id = ExternBlockLoc { + container: module, + id: ItemTreeId::new(self.tree_id, block), + } + .intern(db); + self.def_collector.def_map.modules[self.module_id] + .scope + .define_extern_block(extern_block_id); + self.collect( + &self.item_tree[block].children, + ItemContainerId::ExternBlockId(extern_block_id), + ) + } ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container), ModItem::MacroRules(id) => self.collect_macro_rules(id, module), ModItem::Macro2(id) => self.collect_macro_def(id, module), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs index e59c37104dd..e6c2504d07a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs @@ -173,10 +173,7 @@ impl Path { segments: path.mod_path().segments(), generic_args: Some(path.generic_args()), }, - Path::LangItem(_, seg) => PathSegments { - segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)), - generic_args: None, - }, + Path::LangItem(_, seg) => PathSegments { segments: seg.as_slice(), generic_args: None }, } } @@ -240,6 +237,11 @@ pub struct PathSegment<'a> { pub args_and_bindings: Option<&'a GenericArgs>, } +impl PathSegment<'_> { + pub const MISSING: PathSegment<'static> = + PathSegment { name: &Name::missing(), args_and_bindings: None }; +} + #[derive(Debug, Clone, Copy)] pub struct PathSegments<'a> { segments: &'a [Name], diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 8c556d8a8c3..9dfb6e3cc4b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -10,13 +10,13 @@ use smallvec::{smallvec, SmallVec}; use triomphe::Arc; use crate::{ - body::{ - scope::{ExprScopes, ScopeId}, - HygieneId, - }, builtin_type::BuiltinType, data::ExternCrateDeclData, db::DefDatabase, + expr_store::{ + scope::{ExprScopes, ScopeId}, + HygieneId, + }, generics::{GenericParams, TypeOrConstParamData}, hir::{BindingId, ExprId, LabelId}, item_scope::{BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, BUILTIN_SCOPE}, @@ -327,8 +327,9 @@ impl Resolver { | LangItemTarget::ImplDef(_) | LangItemTarget::Static(_) => return None, }; + // Remaining segments start from 0 because lang paths have no segments other than the remaining. return Some(( - ResolveValueResult::Partial(type_ns, 1, None), + ResolveValueResult::Partial(type_ns, 0, None), ResolvePathResultPrefixInfo::default(), )); } @@ -1264,6 +1265,7 @@ impl HasResolver for GenericDefId { GenericDefId::TypeAliasId(inner) => inner.resolver(db), GenericDefId::ImplId(inner) => inner.resolver(db), GenericDefId::ConstId(inner) => inner.resolver(db), + GenericDefId::StaticId(inner) => inner.resolver(db), } } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs index 01a3103af82..626a82ae08e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs @@ -201,9 +201,6 @@ pub(crate) fn process_cfg_attrs( MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(), _ => false, }; - if !is_derive { - return None; - } let mut remove = FxHashSet::default(); let item = ast::Item::cast(node.clone())?; @@ -220,28 +217,43 @@ pub(crate) fn process_cfg_attrs( } } } - match item { - ast::Item::Struct(it) => match it.field_list()? { - ast::FieldList::RecordFieldList(fields) => { - process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?; + + if is_derive { + // Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level + // (cfg_attr is handled above, cfg is handled in the def map). + match item { + ast::Item::Struct(it) => match it.field_list()? { + ast::FieldList::RecordFieldList(fields) => { + process_has_attrs_with_possible_comma( + db, + fields.fields(), + loc.krate, + &mut remove, + )?; + } + ast::FieldList::TupleFieldList(fields) => { + process_has_attrs_with_possible_comma( + db, + fields.fields(), + loc.krate, + &mut remove, + )?; + } + }, + ast::Item::Enum(it) => { + process_enum(db, it.variant_list()?, loc.krate, &mut remove)?; } - ast::FieldList::TupleFieldList(fields) => { - process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?; + ast::Item::Union(it) => { + process_has_attrs_with_possible_comma( + db, + it.record_field_list()?.fields(), + loc.krate, + &mut remove, + )?; } - }, - ast::Item::Enum(it) => { - process_enum(db, it.variant_list()?, loc.krate, &mut remove)?; - } - ast::Item::Union(it) => { - process_has_attrs_with_possible_comma( - db, - it.record_field_list()?.fields(), - loc.krate, - &mut remove, - )?; + // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now + _ => {} } - // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now - _ => {} } Some(remove) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 848870c3a38..0758bd4515e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -142,8 +142,8 @@ impl Name { /// Ideally, we want a `gensym` semantics for missing names -- each missing /// name is equal only to itself. It's not clear how to implement this in /// salsa though, so we punt on that bit for a moment. - pub fn missing() -> Name { - Name { symbol: sym::MISSING_NAME.clone(), ctx: () } + pub const fn missing() -> Name { + Name { symbol: sym::consts::MISSING_NAME, ctx: () } } /// Returns true if this is a fake name for things missing in the source code. See @@ -262,6 +262,6 @@ impl AsName for ast::FieldKind { impl AsName for base_db::Dependency { fn as_name(&self) -> Name { - Name::new_root(&self.name) + Name::new_symbol_root((*self.name).clone()) } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs index 6ff7831fd81..c744fbce77b 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs @@ -41,9 +41,9 @@ pub fn prettify_macro_expansion( } else if let Some(dep) = target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate) { - make::tokens::ident(&dep.name) + make::tokens::ident(dep.name.as_str()) } else if let Some(crate_name) = &crate_graph[macro_def_crate].display_name { - make::tokens::ident(crate_name.crate_name()) + make::tokens::ident(crate_name.crate_name().as_str()) } else { return dollar_crate.clone(); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index 62feca5f8cb..e0e366f4501 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -194,7 +194,11 @@ pub(crate) fn deref_by_trait( } let trait_id = || { - if use_receiver_trait { + // FIXME: Remove the `false` once `Receiver` needs to be stabilized, doing so will + // effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the + // blanked impl on `Deref`. + #[expect(clippy::overly_complex_bool_expr)] + if use_receiver_trait && false { if let Some(receiver) = db.lang_item(table.trait_env.krate, LangItem::Receiver).and_then(|l| l.as_trait()) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index c8ff6cba3dd..6d4753ea389 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -768,23 +768,21 @@ pub(crate) fn adt_datum_query( phantom_data, }; - // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it - let _variant_id_to_fields = |id: VariantId| { + let variant_id_to_fields = |id: VariantId| { let variant_data = &id.variant_data(db.upcast()); - let fields = if variant_data.fields().is_empty() { + let fields = if variant_data.fields().is_empty() || bound_vars_subst.is_empty(Interner) { vec![] } else { - let field_types = db.field_types(id); - variant_data - .fields() - .iter() - .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) - .filter(|it| !it.contains_unknown()) - .collect() + // HACK: provide full struct type info slows down rust-analyzer by quite a bit unfortunately, + // so we trick chalk into thinking that our struct impl Unsize + if let Some(ty) = bound_vars_subst.at(Interner, 0).ty(Interner) { + vec![ty.clone()] + } else { + vec![] + } }; rust_ir::AdtVariantDatum { fields } }; - let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] }; let (kind, variants) = match adt_id { hir_def::AdtId::StructId(id) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 142766c039b..7839589994b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -3,7 +3,7 @@ use base_db::{ra_salsa::Cycle, CrateId}; use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex}; use hir_def::{ - body::{Body, HygieneId}, + expr_store::{Body, HygieneId}, hir::{Expr, ExprId}, path::Path, resolver::{Resolver, ValueNs}, @@ -124,6 +124,7 @@ pub(crate) fn path_to_const<'g>( ConstScalar::UnevaluatedConst(c.into(), Substitution::empty(Interner)), expected_ty, )), + // FIXME: With feature(adt_const_params), we also need to consider other things here, e.g. struct constructors. _ => None, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 7f9f0c0de19..59aaf85164a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -36,7 +36,7 @@ use crate::{ }; pub(crate) use hir_def::{ - body::Body, + expr_store::Body, hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement}, LocalFieldId, VariantId, }; @@ -440,7 +440,9 @@ impl ExprValidator { return; }; let root = source_ptr.file_syntax(db.upcast()); - let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else { + let either::Left(ast::Expr::IfExpr(if_expr)) = + source_ptr.value.to_node(&root) + else { return; }; let mut top_if_expr = if_expr; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index c5d8c956615..b0f9fc53e29 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -11,7 +11,8 @@ pub(crate) mod pat_analysis; use chalk_ir::Mutability; use hir_def::{ - body::Body, data::adt::VariantData, hir::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId, + data::adt::VariantData, expr_store::Body, hir::PatId, AdtId, EnumVariantId, LocalFieldId, + VariantId, }; use hir_expand::name::Name; use span::Edition; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 2b854310a15..91eb59fb314 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -95,7 +95,7 @@ impl<'db> MatchCheckCtx<'db> { let place_validity = PlaceValidity::from_bool(known_valid_scrutinee.unwrap_or(true)); // Measured to take ~100ms on modern hardware. - let complexity_limit = Some(500000); + let complexity_limit = 500000; compute_match_usefulness(self, arms, scrut_ty, place_validity, complexity_limit) } @@ -361,11 +361,11 @@ impl PatCx for MatchCheckCtx<'_> { } } - fn ctor_sub_tys<'a>( - &'a self, - ctor: &'a rustc_pattern_analysis::constructor::Constructor<Self>, - ty: &'a Self::Ty, - ) -> impl ExactSizeIterator<Item = (Self::Ty, PrivateUninhabitedField)> + Captures<'a> { + fn ctor_sub_tys( + &self, + ctor: &rustc_pattern_analysis::constructor::Constructor<Self>, + ty: &Self::Ty, + ) -> impl ExactSizeIterator<Item = (Self::Ty, PrivateUninhabitedField)> { let single = |ty| smallvec![(ty, PrivateUninhabitedField(false))]; let tys: SmallVec<[_; 2]> = match ctor { Struct | Variant(_) | UnionField => match ty.kind(Interner) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 6bba83fac98..ac849b0762d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -5,28 +5,31 @@ use std::mem; use either::Either; use hir_def::{ - body::Body, + expr_store::Body, hir::{Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp}, path::Path, resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs}, type_ref::Rawness, - AdtId, DefWithBodyId, FieldId, VariantId, + AdtId, DefWithBodyId, FieldId, FunctionId, VariantId, }; +use span::Edition; use crate::{ - db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TyExt, TyKind, + db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TargetFeatures, TyExt, + TyKind, }; -/// Returns `(unsafe_exprs, fn_is_unsafe)`. -/// -/// If `fn_is_unsafe` is false, `unsafe_exprs` are hard errors. If true, they're `unsafe_op_in_unsafe_fn`. -pub fn missing_unsafe( - db: &dyn HirDatabase, - def: DefWithBodyId, -) -> (Vec<(ExprOrPatId, UnsafetyReason)>, bool) { +#[derive(Debug, Default)] +pub struct MissingUnsafeResult { + pub unsafe_exprs: Vec<(ExprOrPatId, UnsafetyReason)>, + /// If `fn_is_unsafe` is false, `unsafe_exprs` are hard errors. If true, they're `unsafe_op_in_unsafe_fn`. + pub fn_is_unsafe: bool, + pub deprecated_safe_calls: Vec<ExprId>, +} + +pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafeResult { let _p = tracing::info_span!("missing_unsafe").entered(); - let mut res = Vec::new(); let is_unsafe = match def { DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe(), DefWithBodyId::StaticId(_) @@ -35,11 +38,19 @@ pub fn missing_unsafe( | DefWithBodyId::InTypeConstId(_) => false, }; + let mut res = MissingUnsafeResult { fn_is_unsafe: is_unsafe, ..MissingUnsafeResult::default() }; let body = db.body(def); let infer = db.infer(def); - let mut callback = |node, inside_unsafe_block, reason| { - if inside_unsafe_block == InsideUnsafeBlock::No { - res.push((node, reason)); + let mut callback = |diag| match diag { + UnsafeDiagnostic::UnsafeOperation { node, inside_unsafe_block, reason } => { + if inside_unsafe_block == InsideUnsafeBlock::No { + res.unsafe_exprs.push((node, reason)); + } + } + UnsafeDiagnostic::DeprecatedSafe2024 { node, inside_unsafe_block } => { + if inside_unsafe_block == InsideUnsafeBlock::No { + res.deprecated_safe_calls.push(node) + } } }; let mut visitor = UnsafeVisitor::new(db, &infer, &body, def, &mut callback); @@ -54,7 +65,7 @@ pub fn missing_unsafe( } } - (res, is_unsafe) + res } #[derive(Debug, Clone, Copy)] @@ -73,15 +84,31 @@ pub enum InsideUnsafeBlock { Yes, } +#[derive(Debug)] +enum UnsafeDiagnostic { + UnsafeOperation { + node: ExprOrPatId, + inside_unsafe_block: InsideUnsafeBlock, + reason: UnsafetyReason, + }, + /// A lint. + DeprecatedSafe2024 { node: ExprId, inside_unsafe_block: InsideUnsafeBlock }, +} + pub fn unsafe_expressions( db: &dyn HirDatabase, infer: &InferenceResult, def: DefWithBodyId, body: &Body, current: ExprId, - unsafe_expr_cb: &mut dyn FnMut(ExprOrPatId, InsideUnsafeBlock, UnsafetyReason), + callback: &mut dyn FnMut(InsideUnsafeBlock), ) { - let mut visitor = UnsafeVisitor::new(db, infer, body, def, unsafe_expr_cb); + let mut visitor_callback = |diag| { + if let UnsafeDiagnostic::UnsafeOperation { inside_unsafe_block, .. } = diag { + callback(inside_unsafe_block); + } + }; + let mut visitor = UnsafeVisitor::new(db, infer, body, def, &mut visitor_callback); _ = visitor.resolver.update_to_inner_scope(db.upcast(), def, current); visitor.walk_expr(current); } @@ -95,7 +122,10 @@ struct UnsafeVisitor<'a> { inside_unsafe_block: InsideUnsafeBlock, inside_assignment: bool, inside_union_destructure: bool, - unsafe_expr_cb: &'a mut dyn FnMut(ExprOrPatId, InsideUnsafeBlock, UnsafetyReason), + callback: &'a mut dyn FnMut(UnsafeDiagnostic), + def_target_features: TargetFeatures, + // FIXME: This needs to be the edition of the span of each call. + edition: Edition, } impl<'a> UnsafeVisitor<'a> { @@ -104,9 +134,14 @@ impl<'a> UnsafeVisitor<'a> { infer: &'a InferenceResult, body: &'a Body, def: DefWithBodyId, - unsafe_expr_cb: &'a mut dyn FnMut(ExprOrPatId, InsideUnsafeBlock, UnsafetyReason), + unsafe_expr_cb: &'a mut dyn FnMut(UnsafeDiagnostic), ) -> Self { let resolver = def.resolver(db.upcast()); + let def_target_features = match def { + DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())), + _ => TargetFeatures::default(), + }; + let edition = db.crate_graph()[resolver.module().krate()].edition; Self { db, infer, @@ -116,12 +151,34 @@ impl<'a> UnsafeVisitor<'a> { inside_unsafe_block: InsideUnsafeBlock::No, inside_assignment: false, inside_union_destructure: false, - unsafe_expr_cb, + callback: unsafe_expr_cb, + def_target_features, + edition, } } - fn call_cb(&mut self, node: ExprOrPatId, reason: UnsafetyReason) { - (self.unsafe_expr_cb)(node, self.inside_unsafe_block, reason); + fn on_unsafe_op(&mut self, node: ExprOrPatId, reason: UnsafetyReason) { + (self.callback)(UnsafeDiagnostic::UnsafeOperation { + node, + inside_unsafe_block: self.inside_unsafe_block, + reason, + }); + } + + fn check_call(&mut self, node: ExprId, func: FunctionId) { + let unsafety = is_fn_unsafe_to_call(self.db, func, &self.def_target_features, self.edition); + match unsafety { + crate::utils::Unsafety::Safe => {} + crate::utils::Unsafety::Unsafe => { + self.on_unsafe_op(node.into(), UnsafetyReason::UnsafeFnCall) + } + crate::utils::Unsafety::DeprecatedSafe2024 => { + (self.callback)(UnsafeDiagnostic::DeprecatedSafe2024 { + node, + inside_unsafe_block: self.inside_unsafe_block, + }) + } + } } fn walk_pats_top(&mut self, pats: impl Iterator<Item = PatId>, parent_expr: ExprId) { @@ -146,7 +203,9 @@ impl<'a> UnsafeVisitor<'a> { | Pat::Ref { .. } | Pat::Box { .. } | Pat::Expr(..) - | Pat::ConstBlock(..) => self.call_cb(current.into(), UnsafetyReason::UnionField), + | Pat::ConstBlock(..) => { + self.on_unsafe_op(current.into(), UnsafetyReason::UnionField) + } // `Or` only wraps other patterns, and `Missing`/`Wild` do not constitute a read. Pat::Missing | Pat::Wild | Pat::Or(_) => {} } @@ -180,9 +239,13 @@ impl<'a> UnsafeVisitor<'a> { let inside_assignment = mem::replace(&mut self.inside_assignment, false); match expr { &Expr::Call { callee, .. } => { - if let Some(func) = self.infer[callee].as_fn_def(self.db) { - if is_fn_unsafe_to_call(self.db, func) { - self.call_cb(current.into(), UnsafetyReason::UnsafeFnCall); + let callee = &self.infer[callee]; + if let Some(func) = callee.as_fn_def(self.db) { + self.check_call(current, func); + } + if let TyKind::Function(fn_ptr) = callee.kind(Interner) { + if fn_ptr.sig.safety == chalk_ir::Safety::Unsafe { + self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall); } } } @@ -209,18 +272,13 @@ impl<'a> UnsafeVisitor<'a> { } } Expr::MethodCall { .. } => { - if self - .infer - .method_resolution(current) - .map(|(func, _)| is_fn_unsafe_to_call(self.db, func)) - .unwrap_or(false) - { - self.call_cb(current.into(), UnsafetyReason::UnsafeFnCall); + if let Some((func, _)) = self.infer.method_resolution(current) { + self.check_call(current, func); } } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { if let TyKind::Raw(..) = &self.infer[*expr].kind(Interner) { - self.call_cb(current.into(), UnsafetyReason::RawPtrDeref); + self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref); } } Expr::Unsafe { .. } => { @@ -235,7 +293,7 @@ impl<'a> UnsafeVisitor<'a> { self.walk_pats_top(std::iter::once(target), current); self.inside_assignment = old_inside_assignment; } - Expr::InlineAsm(_) => self.call_cb(current.into(), UnsafetyReason::InlineAsm), + Expr::InlineAsm(_) => self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm), // rustc allows union assignment to propagate through field accesses and casts. Expr::Cast { .. } => self.inside_assignment = inside_assignment, Expr::Field { .. } => { @@ -244,7 +302,7 @@ impl<'a> UnsafeVisitor<'a> { if let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) = self.infer.field_resolution(current) { - self.call_cb(current.into(), UnsafetyReason::UnionField); + self.on_unsafe_op(current.into(), UnsafetyReason::UnionField); } } } @@ -279,9 +337,9 @@ impl<'a> UnsafeVisitor<'a> { if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial { let static_data = self.db.static_data(id); if static_data.mutable { - self.call_cb(node, UnsafetyReason::MutableStatic); + self.on_unsafe_op(node, UnsafetyReason::MutableStatic); } else if static_data.is_extern && !static_data.has_safe_kw { - self.call_cb(node, UnsafetyReason::ExternStatic); + self.on_unsafe_op(node, UnsafetyReason::ExternStatic); } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index abbf2a4f2ef..18cf6e5ce36 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -262,7 +262,8 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<Generic GenericDefId::FunctionId(it) => it.lookup(db).container, GenericDefId::TypeAliasId(it) => it.lookup(db).container, GenericDefId::ConstId(it) => it.lookup(db).container, - GenericDefId::AdtId(_) + GenericDefId::StaticId(_) + | GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) | GenericDefId::TraitAliasId(_) => return None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 25bb3a76de2..0cb7002f446 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -16,7 +16,7 @@ pub(crate) mod cast; pub(crate) mod closure; mod coerce; -mod diagnostics; +pub(crate) mod diagnostics; mod expr; mod mutability; mod pat; @@ -34,9 +34,9 @@ use chalk_ir::{ }; use either::Either; use hir_def::{ - body::{Body, HygieneId}, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, data::{ConstData, StaticData}, + expr_store::{Body, HygieneId}, hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId}, lang_item::{LangItem, LangItemTarget}, layout::Integer, @@ -236,7 +236,7 @@ pub enum InferenceDiagnostic { name: Name, /// Contains the type the field resolves to field_with_same_name: Option<Ty>, - assoc_func_with_same_name: Option<AssocItemId>, + assoc_func_with_same_name: Option<FunctionId>, }, UnresolvedAssocItem { id: ExprOrPatId, @@ -466,6 +466,9 @@ pub struct InferenceResult { pub type_of_for_iterator: FxHashMap<ExprId, Ty>, type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>, /// Whether there are any type-mismatching errors in the result. + // FIXME: This isn't as useful as initially thought due to us falling back placeholders to + // `TyKind::Error`. + // Which will then mark this field. pub(crate) has_errors: bool, /// Interned common types to return references to. // FIXME: Move this into `InferenceContext` @@ -943,7 +946,7 @@ impl<'a> InferenceContext<'a> { let ty = self.insert_type_vars(ty); let ty = self.normalize_associated_types_in(ty); - self.infer_top_pat(*pat, &ty); + self.infer_top_pat(*pat, &ty, None); if ty .data(Interner) .flags @@ -1236,7 +1239,29 @@ impl<'a> InferenceContext<'a> { } fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) { - self.result.expr_adjustments.insert(expr, adjustments); + if adjustments.is_empty() { + return; + } + match self.result.expr_adjustments.entry(expr) { + std::collections::hash_map::Entry::Occupied(mut entry) => { + match (&mut entry.get_mut()[..], &adjustments[..]) { + ( + [Adjustment { kind: Adjust::NeverToAny, target }], + [.., Adjustment { target: new_target, .. }], + ) => { + // NeverToAny coercion can target any type, so instead of adding a new + // adjustment on top we can change the target. + *target = new_target.clone(); + } + _ => { + *entry.get_mut() = adjustments; + } + } + } + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(adjustments); + } + } } fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) { @@ -1477,21 +1502,22 @@ impl<'a> InferenceContext<'a> { &self.diagnostics, InferenceTyDiagnosticSource::Body, ); + let mut path_ctx = ctx.at_path(path, node); let (resolution, unresolved) = if value_ns { - let Some(res) = ctx.resolve_path_in_value_ns(path, node, HygieneId::ROOT) else { + let Some(res) = path_ctx.resolve_path_in_value_ns(HygieneId::ROOT) else { return (self.err_ty(), None); }; match res { ResolveValueResult::ValueNs(value, _) => match value { ValueNs::EnumVariantId(var) => { - let substs = ctx.substs_from_path(path, var.into(), true); + let substs = path_ctx.substs_from_path(var.into(), true); drop(ctx); let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); return (ty, Some(var.into())); } ValueNs::StructId(strukt) => { - let substs = ctx.substs_from_path(path, strukt.into(), true); + let substs = path_ctx.substs_from_path(strukt.into(), true); drop(ctx); let ty = self.db.ty(strukt.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); @@ -1506,7 +1532,7 @@ impl<'a> InferenceContext<'a> { ResolveValueResult::Partial(typens, unresolved, _) => (typens, Some(unresolved)), } } else { - match ctx.resolve_path_in_type_ns(path, node) { + match path_ctx.resolve_path_in_type_ns() { Some((it, idx)) => (it, idx), None => return (self.err_ty(), None), } @@ -1517,21 +1543,21 @@ impl<'a> InferenceContext<'a> { }; return match resolution { TypeNs::AdtId(AdtId::StructId(strukt)) => { - let substs = ctx.substs_from_path(path, strukt.into(), true); + let substs = path_ctx.substs_from_path(strukt.into(), true); drop(ctx); let ty = self.db.ty(strukt.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); forbid_unresolved_segments((ty, Some(strukt.into())), unresolved) } TypeNs::AdtId(AdtId::UnionId(u)) => { - let substs = ctx.substs_from_path(path, u.into(), true); + let substs = path_ctx.substs_from_path(u.into(), true); drop(ctx); let ty = self.db.ty(u.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); forbid_unresolved_segments((ty, Some(u.into())), unresolved) } TypeNs::EnumVariantId(var) => { - let substs = ctx.substs_from_path(path, var.into(), true); + let substs = path_ctx.substs_from_path(var.into(), true); drop(ctx); let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); @@ -1542,31 +1568,32 @@ impl<'a> InferenceContext<'a> { let substs = generics.placeholder_subst(self.db); let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); - let Some(mut remaining_idx) = unresolved else { + let Some(remaining_idx) = unresolved else { drop(ctx); return self.resolve_variant_on_alias(ty, None, mod_path); }; let mut remaining_segments = path.segments().skip(remaining_idx); + if remaining_segments.len() >= 2 { + path_ctx.ignore_last_segment(); + } + // We need to try resolving unresolved segments one by one because each may resolve // to a projection, which `TyLoweringContext` cannot handle on its own. let mut tried_resolving_once = false; - while !remaining_segments.is_empty() { - let resolved_segment = path.segments().get(remaining_idx - 1).unwrap(); - let current_segment = remaining_segments.take(1); - + while let Some(current_segment) = remaining_segments.first() { // If we can resolve to an enum variant, it takes priority over associated type // of the same name. if let Some((AdtId::EnumId(id), _)) = ty.as_adt() { let enum_data = self.db.enum_data(id); - let name = current_segment.first().unwrap().name; - if let Some(variant) = enum_data.variant(name) { + if let Some(variant) = enum_data.variant(current_segment.name) { return if remaining_segments.len() == 1 { (ty, Some(variant.into())) } else { // We still have unresolved paths, but enum variants never have // associated types! + // FIXME: Report an error. (self.err_ty(), None) }; } @@ -1575,23 +1602,13 @@ impl<'a> InferenceContext<'a> { if tried_resolving_once { // FIXME: with `inherent_associated_types` this is allowed, but our `lower_partly_resolved_path()` // will need to be updated to err at the correct segment. - // - // We need to stop here because otherwise the segment index passed to `lower_partly_resolved_path()` - // will be incorrect, and that can mess up error reporting. break; } // `lower_partly_resolved_path()` returns `None` as type namespace unless // `remaining_segments` is empty, which is never the case here. We don't know // which namespace the new `ty` is in until normalized anyway. - (ty, _) = ctx.lower_partly_resolved_path( - node, - resolution, - resolved_segment, - current_segment, - (remaining_idx - 1) as u32, - false, - ); + (ty, _) = path_ctx.lower_partly_resolved_path(resolution, false); tried_resolving_once = true; ty = self.table.insert_type_vars(ty); @@ -1601,8 +1618,6 @@ impl<'a> InferenceContext<'a> { return (self.err_ty(), None); } - // FIXME(inherent_associated_types): update `resolution` based on `ty` here. - remaining_idx += 1; remaining_segments = remaining_segments.skip(1); } drop(ctx); @@ -1618,12 +1633,7 @@ impl<'a> InferenceContext<'a> { (ty, variant) } TypeNs::TypeAliasId(it) => { - let resolved_seg = match unresolved { - None => path.segments().last().unwrap(), - Some(n) => path.segments().get(path.segments().len() - n - 1).unwrap(), - }; - let substs = - ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None); + let substs = path_ctx.substs_from_path_segment(it.into(), true, None); drop(ctx); let ty = self.db.ty(it.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs index 21d0be6ed5f..eb193686e96 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs @@ -374,6 +374,7 @@ enum PointerKind { fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<PointerKind>, ()> { let ty = table.resolve_ty_shallow(ty); + let ty = table.normalize_associated_types_in(ty); if table.is_sized(&ty) { return Ok(Some(PointerKind::Thin)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index d40816ba8ce..acd86b1f3ed 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -163,10 +163,27 @@ impl CoerceMany { // type is a type variable and the new one is `!`, trying it the other // way around first would mean we make the type variable `!`, instead of // just marking it as possibly diverging. - if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) { - self.final_ty = Some(res); - } else if let Ok(res) = ctx.coerce(expr, &self.merged_ty(), &expr_ty, CoerceNever::Yes) { + // + // - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335) + // First try to coerce the new expression to the type of the previous ones, + // but only if the new expression has no coercion already applied to it. + if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) { + if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) { + self.final_ty = Some(res); + if let Some(expr) = expr { + self.expressions.push(expr); + } + return; + } + } + + if let Ok((adjustments, res)) = + ctx.coerce_inner(&self.merged_ty(), &expr_ty, CoerceNever::Yes) + { self.final_ty = Some(res); + for &e in &self.expressions { + ctx.write_expr_adj(e, adjustments.clone()); + } } else { match cause { CoercionCause::Expr(id) => { @@ -244,14 +261,23 @@ impl InferenceContext<'_> { // between places and values. coerce_never: CoerceNever, ) -> Result<Ty, TypeError> { - let from_ty = self.resolve_ty_shallow(from_ty); - let to_ty = self.resolve_ty_shallow(to_ty); - let (adjustments, ty) = self.table.coerce(&from_ty, &to_ty, coerce_never)?; + let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?; if let Some(expr) = expr { self.write_expr_adj(expr, adjustments); } Ok(ty) } + + fn coerce_inner( + &mut self, + from_ty: &Ty, + to_ty: &Ty, + coerce_never: CoerceNever, + ) -> Result<(Vec<Adjustment>, Ty), TypeError> { + let from_ty = self.resolve_ty_shallow(from_ty); + let to_ty = self.resolve_ty_shallow(to_ty); + self.table.coerce(&from_ty, &to_ty, coerce_never) + } } impl InferenceTable<'_> { @@ -373,7 +399,7 @@ impl InferenceTable<'_> { // Check that the types which they point at are compatible. let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner); - // Although references and unsafe ptrs have the same + // Although references and raw ptrs have the same // representation, we still register an Adjust::DerefRef so that // regionck knows that the region for `a` must be valid here. if is_ref { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs index 032dc37899d..e4f5b5ed378 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs @@ -5,16 +5,14 @@ use std::cell::RefCell; use std::ops::{Deref, DerefMut}; -use hir_def::body::HygieneId; -use hir_def::hir::ExprOrPatId; -use hir_def::path::{Path, PathSegment, PathSegments}; -use hir_def::resolver::{ResolveValueResult, Resolver, TypeNs}; -use hir_def::type_ref::TypesMap; -use hir_def::TypeOwnerId; - -use crate::db::HirDatabase; +use either::Either; +use hir_def::{hir::ExprOrPatId, path::Path, resolver::Resolver, type_ref::TypesMap, TypeOwnerId}; +use la_arena::{Idx, RawIdx}; + use crate::{ - InferenceDiagnostic, InferenceTyDiagnosticSource, Ty, TyLoweringContext, TyLoweringDiagnostic, + db::HirDatabase, + lower::path::{PathDiagnosticCallback, PathLoweringContext}, + InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic, }; // Unfortunately, this struct needs to use interior mutability (but we encapsulate it) @@ -44,6 +42,11 @@ impl Diagnostics { } } +pub(crate) struct PathDiagnosticCallbackData<'a> { + node: ExprOrPatId, + diagnostics: &'a Diagnostics, +} + pub(super) struct InferenceTyLoweringContext<'a> { ctx: TyLoweringContext<'a>, diagnostics: &'a Diagnostics, @@ -51,6 +54,7 @@ pub(super) struct InferenceTyLoweringContext<'a> { } impl<'a> InferenceTyLoweringContext<'a> { + #[inline] pub(super) fn new( db: &'a dyn HirDatabase, resolver: &'a Resolver, @@ -62,65 +66,62 @@ impl<'a> InferenceTyLoweringContext<'a> { Self { ctx: TyLoweringContext::new(db, resolver, types_map, owner), diagnostics, source } } - pub(super) fn resolve_path_in_type_ns( - &mut self, - path: &Path, + #[inline] + pub(super) fn at_path<'b>( + &'b mut self, + path: &'b Path, node: ExprOrPatId, - ) -> Option<(TypeNs, Option<usize>)> { - let diagnostics = self.diagnostics; - self.ctx.resolve_path_in_type_ns(path, &mut |_, diag| { - diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag }) - }) + ) -> PathLoweringContext<'b, 'a> { + let on_diagnostic = PathDiagnosticCallback { + data: Either::Right(PathDiagnosticCallbackData { diagnostics: self.diagnostics, node }), + callback: |data, _, diag| { + let data = data.as_ref().right().unwrap(); + data.diagnostics + .push(InferenceDiagnostic::PathDiagnostic { node: data.node, diag }); + }, + }; + PathLoweringContext::new(&mut self.ctx, on_diagnostic, path) } - pub(super) fn resolve_path_in_value_ns( - &mut self, - path: &Path, - node: ExprOrPatId, - hygiene_id: HygieneId, - ) -> Option<ResolveValueResult> { - let diagnostics = self.diagnostics; - self.ctx.resolve_path_in_value_ns(path, hygiene_id, &mut |_, diag| { - diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag }) - }) + #[inline] + pub(super) fn at_path_forget_diagnostics<'b>( + &'b mut self, + path: &'b Path, + ) -> PathLoweringContext<'b, 'a> { + let on_diagnostic = PathDiagnosticCallback { + data: Either::Right(PathDiagnosticCallbackData { + diagnostics: self.diagnostics, + node: ExprOrPatId::ExprId(Idx::from_raw(RawIdx::from_u32(0))), + }), + callback: |_data, _, _diag| {}, + }; + PathLoweringContext::new(&mut self.ctx, on_diagnostic, path) } - pub(super) fn lower_partly_resolved_path( - &mut self, - node: ExprOrPatId, - resolution: TypeNs, - resolved_segment: PathSegment<'_>, - remaining_segments: PathSegments<'_>, - resolved_segment_idx: u32, - infer_args: bool, - ) -> (Ty, Option<TypeNs>) { - let diagnostics = self.diagnostics; - self.ctx.lower_partly_resolved_path( - resolution, - resolved_segment, - remaining_segments, - resolved_segment_idx, - infer_args, - &mut |_, diag| diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag }), - ) + #[inline] + pub(super) fn forget_diagnostics(&mut self) { + self.ctx.diagnostics.clear(); } } impl<'a> Deref for InferenceTyLoweringContext<'a> { type Target = TyLoweringContext<'a>; + #[inline] fn deref(&self) -> &Self::Target { &self.ctx } } impl DerefMut for InferenceTyLoweringContext<'_> { + #[inline] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.ctx } } impl Drop for InferenceTyLoweringContext<'_> { + #[inline] fn drop(&mut self) { self.diagnostics .push_ty_diagnostics(self.source, std::mem::take(&mut self.ctx.diagnostics)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index b951443897c..80e3ca1fa28 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -43,9 +43,9 @@ use crate::{ primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, traits::FnTrait, - Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, FnAbi, FnPointer, - FnSig, FnSubst, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, - TyBuilder, TyExt, TyKind, + Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext, + DeclOrigin, FnAbi, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, Substitution, + TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; use super::{ @@ -334,7 +334,11 @@ impl InferenceContext<'_> { ExprIsRead::No }; let input_ty = self.infer_expr(expr, &Expectation::none(), child_is_read); - self.infer_top_pat(pat, &input_ty); + self.infer_top_pat( + pat, + &input_ty, + Some(DeclContext { origin: DeclOrigin::LetExpr }), + ); self.result.standard_types.bool_.clone() } Expr::Block { statements, tail, label, id } => { @@ -461,7 +465,7 @@ impl InferenceContext<'_> { // Now go through the argument patterns for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) { - self.infer_top_pat(*arg_pat, arg_ty); + self.infer_top_pat(*arg_pat, arg_ty, None); } // FIXME: lift these out into a struct @@ -485,78 +489,7 @@ impl InferenceContext<'_> { ty } - Expr::Call { callee, args, .. } => { - let callee_ty = self.infer_expr(*callee, &Expectation::none(), ExprIsRead::Yes); - let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true); - let (res, derefed_callee) = loop { - let Some((callee_deref_ty, _)) = derefs.next() else { - break (None, callee_ty.clone()); - }; - if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) { - break (Some(res), callee_deref_ty); - } - }; - // if the function is unresolved, we use is_varargs=true to - // suppress the arg count diagnostic here - let is_varargs = - derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs) - || res.is_none(); - let (param_tys, ret_ty) = match res { - Some((func, params, ret_ty)) => { - let mut adjustments = auto_deref_adjust_steps(&derefs); - if let TyKind::Closure(c, _) = - self.table.resolve_completely(callee_ty.clone()).kind(Interner) - { - if let Some(par) = self.current_closure { - self.closure_dependencies.entry(par).or_default().push(*c); - } - self.deferred_closures.entry(*c).or_default().push(( - derefed_callee.clone(), - callee_ty.clone(), - params.clone(), - tgt_expr, - )); - } - if let Some(fn_x) = func { - self.write_fn_trait_method_resolution( - fn_x, - &derefed_callee, - &mut adjustments, - &callee_ty, - ¶ms, - tgt_expr, - ); - } - self.write_expr_adj(*callee, adjustments); - (params, ret_ty) - } - None => { - self.push_diagnostic(InferenceDiagnostic::ExpectedFunction { - call_expr: tgt_expr, - found: callee_ty.clone(), - }); - (Vec::new(), self.err_ty()) - } - }; - let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args); - self.register_obligations_for_call(&callee_ty); - - let expected_inputs = self.expected_inputs_for_expected_output( - expected, - ret_ty.clone(), - param_tys.clone(), - ); - - self.check_call_arguments( - tgt_expr, - args, - &expected_inputs, - ¶m_tys, - &indices_to_skip, - is_varargs, - ); - self.normalize_associated_types_in(ret_ty) - } + Expr::Call { callee, args, .. } => self.infer_call(tgt_expr, *callee, args, expected), Expr::MethodCall { receiver, args, method_name, generic_args } => self .infer_method_call( tgt_expr, @@ -582,7 +515,7 @@ impl InferenceContext<'_> { let mut all_arms_diverge = Diverges::Always; for arm in arms.iter() { let input_ty = self.resolve_ty_shallow(&input_ty); - self.infer_top_pat(arm.pat, &input_ty); + self.infer_top_pat(arm.pat, &input_ty, None); } let expected = expected.adjust_for_branches(&mut self.table); @@ -927,7 +860,7 @@ impl InferenceContext<'_> { let resolver_guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); self.inside_assignment = true; - self.infer_top_pat(target, &rhs_ty); + self.infer_top_pat(target, &rhs_ty, None); self.inside_assignment = false; self.resolver.reset_to_guard(resolver_guard); } @@ -1632,8 +1565,11 @@ impl InferenceContext<'_> { decl_ty }; - this.infer_top_pat(*pat, &ty); + let decl = DeclContext { + origin: DeclOrigin::LocalDecl { has_else: else_branch.is_some() }, + }; + this.infer_top_pat(*pat, &ty, Some(decl)); if let Some(expr) = else_branch { let previous_diverges = mem::replace(&mut this.diverges, Diverges::Maybe); @@ -1865,6 +1801,107 @@ impl InferenceContext<'_> { } } + fn infer_call( + &mut self, + tgt_expr: ExprId, + callee: ExprId, + args: &[ExprId], + expected: &Expectation, + ) -> Ty { + let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes); + let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true); + let (res, derefed_callee) = loop { + let Some((callee_deref_ty, _)) = derefs.next() else { + break (None, callee_ty.clone()); + }; + if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) { + break (Some(res), callee_deref_ty); + } + }; + // if the function is unresolved, we use is_varargs=true to + // suppress the arg count diagnostic here + let is_varargs = + derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs) || res.is_none(); + let (param_tys, ret_ty) = match res { + Some((func, params, ret_ty)) => { + let mut adjustments = auto_deref_adjust_steps(&derefs); + if let TyKind::Closure(c, _) = + self.table.resolve_completely(callee_ty.clone()).kind(Interner) + { + if let Some(par) = self.current_closure { + self.closure_dependencies.entry(par).or_default().push(*c); + } + self.deferred_closures.entry(*c).or_default().push(( + derefed_callee.clone(), + callee_ty.clone(), + params.clone(), + tgt_expr, + )); + } + if let Some(fn_x) = func { + self.write_fn_trait_method_resolution( + fn_x, + &derefed_callee, + &mut adjustments, + &callee_ty, + ¶ms, + tgt_expr, + ); + } + self.write_expr_adj(callee, adjustments); + (params, ret_ty) + } + None => { + self.push_diagnostic(InferenceDiagnostic::ExpectedFunction { + call_expr: tgt_expr, + found: callee_ty.clone(), + }); + (Vec::new(), self.err_ty()) + } + }; + let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args); + self.check_call( + tgt_expr, + args, + callee_ty, + ¶m_tys, + ret_ty, + &indices_to_skip, + is_varargs, + expected, + ) + } + + fn check_call( + &mut self, + tgt_expr: ExprId, + args: &[ExprId], + callee_ty: Ty, + param_tys: &[Ty], + ret_ty: Ty, + indices_to_skip: &[u32], + is_varargs: bool, + expected: &Expectation, + ) -> Ty { + self.register_obligations_for_call(&callee_ty); + + let expected_inputs = self.expected_inputs_for_expected_output( + expected, + ret_ty.clone(), + param_tys.to_owned(), + ); + + self.check_call_arguments( + tgt_expr, + args, + &expected_inputs, + param_tys, + indices_to_skip, + is_varargs, + ); + self.normalize_associated_types_in(ret_ty) + } + fn infer_method_call( &mut self, tgt_expr: ExprId, @@ -1885,21 +1922,32 @@ impl InferenceContext<'_> { VisibleFromModule::Filter(self.resolver.module()), method_name, ); - let (receiver_ty, method_ty, substs) = match resolved { + match resolved { Some((adjust, func, visible)) => { - let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); - let generics = generics(self.db.upcast(), func.into()); - let substs = self.substs_for_method_call(generics, generic_args); - self.write_expr_adj(receiver, adjustments); - self.write_method_resolution(tgt_expr, func, substs.clone()); if !visible { self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id: tgt_expr.into(), item: func.into(), }) } - (ty, self.db.value_ty(func.into()).unwrap(), substs) + + let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); + self.write_expr_adj(receiver, adjustments); + + let generics = generics(self.db.upcast(), func.into()); + let substs = self.substs_for_method_call(generics, generic_args); + self.write_method_resolution(tgt_expr, func, substs.clone()); + self.check_method_call( + tgt_expr, + args, + self.db.value_ty(func.into()).expect("we have a function def"), + substs, + ty, + expected, + ) } + // Failed to resolve, report diagnostic and try to resolve as call to field access or + // assoc function None => { let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name) { @@ -1919,12 +1967,11 @@ impl InferenceContext<'_> { VisibleFromModule::Filter(self.resolver.module()), Some(method_name), method_resolution::LookupMode::Path, - |_ty, item, visible| { - if visible { - Some(item) - } else { - None + |_ty, item, visible| match item { + hir_def::AssocItemId::FunctionId(function_id) if visible => { + Some(function_id) } + _ => None, }, ); @@ -1932,17 +1979,45 @@ impl InferenceContext<'_> { expr: tgt_expr, receiver: receiver_ty.clone(), name: method_name.clone(), - field_with_same_name: field_with_same_name_exists, + field_with_same_name: field_with_same_name_exists.clone(), assoc_func_with_same_name, }); - ( - receiver_ty, - Binders::empty(Interner, self.err_ty()), - Substitution::empty(Interner), - ) + + let recovered = match assoc_func_with_same_name { + Some(f) => { + let generics = generics(self.db.upcast(), f.into()); + let substs = self.substs_for_method_call(generics, generic_args); + let f = self + .db + .value_ty(f.into()) + .expect("we have a function def") + .substitute(Interner, &substs); + let sig = f.callable_sig(self.db).expect("we have a function def"); + Some((f, sig, true)) + } + None => field_with_same_name_exists.and_then(|field_ty| { + let callable_sig = field_ty.callable_sig(self.db)?; + Some((field_ty, callable_sig, false)) + }), + }; + match recovered { + Some((callee_ty, sig, strip_first)) => self.check_call( + tgt_expr, + args, + callee_ty, + sig.params().get(strip_first as usize..).unwrap_or(&[]), + sig.ret().clone(), + &[], + true, + expected, + ), + None => { + self.check_call_arguments(tgt_expr, args, &[], &[], &[], true); + self.err_ty() + } + } } - }; - self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected) + } } fn check_method_call( @@ -2012,9 +2087,10 @@ impl InferenceContext<'_> { expected_inputs: &[Ty], param_tys: &[Ty], skip_indices: &[u32], - is_varargs: bool, + ignore_arg_param_mismatch: bool, ) { - let arg_count_mismatch = args.len() != param_tys.len() + skip_indices.len() && !is_varargs; + let arg_count_mismatch = + !ignore_arg_param_mismatch && args.len() != param_tys.len() + skip_indices.len(); if arg_count_mismatch { self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount { call_expr: expr, @@ -2043,7 +2119,7 @@ impl InferenceContext<'_> { continue; } - while skip_indices.peek().is_some_and(|i| *i < idx as u32) { + while skip_indices.peek().is_some_and(|&i| i < idx as u32) { skip_indices.next(); } if skip_indices.peek().copied() == Some(idx as u32) { @@ -2125,8 +2201,8 @@ impl InferenceContext<'_> { for kind_id in def_generics.iter_self_id().take(self_params) { let arg = args.peek(); let arg = match (kind_id, arg) { - // Lifetimes can be elided. - // Once we have implemented lifetime elision correctly, + // Lifetimes can be inferred. + // Once we have implemented lifetime inference correctly, // this should be handled in a proper way. ( GenericParamId::LifetimeParamId(_), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 00398f019da..db93116f107 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -3,23 +3,24 @@ use std::iter::repeat_with; use hir_def::{ - body::Body, + expr_store::Body, hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId}, path::Path, + HasModule, }; use hir_expand::name::Name; use stdx::TupleExt; use crate::{ - consteval::{try_const_usize, usize_const}, + consteval::{self, try_const_usize, usize_const}, infer::{ coerce::CoerceNever, expr::ExprIsRead, BindingMode, Expectation, InferenceContext, TypeMismatch, }, lower::lower_to_chalk_mutability, primitive::UintTy, - static_lifetime, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty, - TyBuilder, TyExt, TyKind, + static_lifetime, DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar, + Substitution, Ty, TyBuilder, TyExt, TyKind, }; impl InferenceContext<'_> { @@ -34,6 +35,7 @@ impl InferenceContext<'_> { id: PatId, ellipsis: Option<u32>, subs: &[PatId], + decl: Option<DeclContext>, ) -> Ty { let (ty, def) = self.resolve_variant(id.into(), path, true); let var_data = def.map(|it| it.variant_data(self.db.upcast())); @@ -92,13 +94,13 @@ impl InferenceContext<'_> { } }; - self.infer_pat(subpat, &expected_ty, default_bm); + self.infer_pat(subpat, &expected_ty, default_bm, decl); } } None => { let err_ty = self.err_ty(); for &inner in subs { - self.infer_pat(inner, &err_ty, default_bm); + self.infer_pat(inner, &err_ty, default_bm, decl); } } } @@ -114,6 +116,7 @@ impl InferenceContext<'_> { default_bm: BindingMode, id: PatId, subs: impl ExactSizeIterator<Item = (Name, PatId)>, + decl: Option<DeclContext>, ) -> Ty { let (ty, def) = self.resolve_variant(id.into(), path, false); if let Some(variant) = def { @@ -162,13 +165,13 @@ impl InferenceContext<'_> { } }; - self.infer_pat(inner, &expected_ty, default_bm); + self.infer_pat(inner, &expected_ty, default_bm, decl); } } None => { let err_ty = self.err_ty(); for (_, inner) in subs { - self.infer_pat(inner, &err_ty, default_bm); + self.infer_pat(inner, &err_ty, default_bm, decl); } } } @@ -185,6 +188,7 @@ impl InferenceContext<'_> { default_bm: BindingMode, ellipsis: Option<u32>, subs: &[PatId], + decl: Option<DeclContext>, ) -> Ty { let expected = self.resolve_ty_shallow(expected); let expectations = match expected.as_tuple() { @@ -209,12 +213,12 @@ impl InferenceContext<'_> { // Process pre for (ty, pat) in inner_tys.iter_mut().zip(pre) { - *ty = self.infer_pat(*pat, ty, default_bm); + *ty = self.infer_pat(*pat, ty, default_bm, decl); } // Process post for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) { - *ty = self.infer_pat(*pat, ty, default_bm); + *ty = self.infer_pat(*pat, ty, default_bm, decl); } TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys)) @@ -223,11 +227,17 @@ impl InferenceContext<'_> { /// The resolver needs to be updated to the surrounding expression when inside assignment /// (because there, `Pat::Path` can refer to a variable). - pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty) { - self.infer_pat(pat, expected, BindingMode::default()); + pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty, decl: Option<DeclContext>) { + self.infer_pat(pat, expected, BindingMode::default(), decl); } - fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty { + fn infer_pat( + &mut self, + pat: PatId, + expected: &Ty, + mut default_bm: BindingMode, + decl: Option<DeclContext>, + ) -> Ty { let mut expected = self.resolve_ty_shallow(expected); if matches!(&self.body[pat], Pat::Ref { .. }) || self.inside_assignment { @@ -261,11 +271,11 @@ impl InferenceContext<'_> { let ty = match &self.body[pat] { Pat::Tuple { args, ellipsis } => { - self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args) + self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args, decl) } Pat::Or(pats) => { for pat in pats.iter() { - self.infer_pat(*pat, &expected, default_bm); + self.infer_pat(*pat, &expected, default_bm, decl); } expected.clone() } @@ -274,6 +284,7 @@ impl InferenceContext<'_> { lower_to_chalk_mutability(mutability), &expected, default_bm, + decl, ), Pat::TupleStruct { path: p, args: subpats, ellipsis } => self .infer_tuple_struct_pat_like( @@ -283,10 +294,11 @@ impl InferenceContext<'_> { pat, *ellipsis, subpats, + decl, ), Pat::Record { path: p, args: fields, ellipsis: _ } => { let subs = fields.iter().map(|f| (f.name.clone(), f.pat)); - self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs) + self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs, decl) } Pat::Path(path) => { let ty = self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty()); @@ -319,10 +331,10 @@ impl InferenceContext<'_> { } } Pat::Bind { id, subpat } => { - return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected); + return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected, decl); } Pat::Slice { prefix, slice, suffix } => { - self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm) + self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm, decl) } Pat::Wild => expected.clone(), Pat::Range { .. } => { @@ -345,7 +357,7 @@ impl InferenceContext<'_> { _ => (self.result.standard_types.unknown.clone(), None), }; - let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm); + let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm, decl); let mut b = TyBuilder::adt(self.db, box_adt).push(inner_ty); if let Some(alloc_ty) = alloc_ty { @@ -420,6 +432,7 @@ impl InferenceContext<'_> { mutability: Mutability, expected: &Ty, default_bm: BindingMode, + decl: Option<DeclContext>, ) -> Ty { let (expectation_type, expectation_lt) = match expected.as_reference() { Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()), @@ -433,7 +446,7 @@ impl InferenceContext<'_> { (inner_ty, inner_lt) } }; - let subty = self.infer_pat(inner_pat, &expectation_type, default_bm); + let subty = self.infer_pat(inner_pat, &expectation_type, default_bm, decl); TyKind::Ref(mutability, expectation_lt, subty).intern(Interner) } @@ -444,6 +457,7 @@ impl InferenceContext<'_> { default_bm: BindingMode, subpat: Option<PatId>, expected: &Ty, + decl: Option<DeclContext>, ) -> Ty { let Binding { mode, .. } = self.body.bindings[binding]; let mode = if mode == BindingAnnotation::Unannotated { @@ -454,7 +468,7 @@ impl InferenceContext<'_> { self.result.binding_modes.insert(pat, mode); let inner_ty = match subpat { - Some(subpat) => self.infer_pat(subpat, expected, default_bm), + Some(subpat) => self.infer_pat(subpat, expected, default_bm, decl), None => expected.clone(), }; let inner_ty = self.insert_type_vars_shallow(inner_ty); @@ -478,14 +492,28 @@ impl InferenceContext<'_> { slice: &Option<PatId>, suffix: &[PatId], default_bm: BindingMode, + decl: Option<DeclContext>, ) -> Ty { + let expected = self.resolve_ty_shallow(expected); + + // If `expected` is an infer ty, we try to equate it to an array if the given pattern + // allows it. See issue #16609 + if self.pat_is_irrefutable(decl) && expected.is_ty_var() { + if let Some(resolved_array_ty) = + self.try_resolve_slice_ty_to_array_ty(prefix, suffix, slice) + { + self.unify(&expected, &resolved_array_ty); + } + } + + let expected = self.resolve_ty_shallow(&expected); let elem_ty = match expected.kind(Interner) { TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(), _ => self.err_ty(), }; for &pat_id in prefix.iter().chain(suffix.iter()) { - self.infer_pat(pat_id, &elem_ty, default_bm); + self.infer_pat(pat_id, &elem_ty, default_bm, decl); } if let &Some(slice_pat_id) = slice { @@ -499,7 +527,7 @@ impl InferenceContext<'_> { _ => TyKind::Slice(elem_ty.clone()), } .intern(Interner); - self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm); + self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm, decl); } match expected.kind(Interner) { @@ -528,7 +556,7 @@ impl InferenceContext<'_> { self.infer_expr(expr, &Expectation::has_type(expected.clone()), ExprIsRead::Yes) } - fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool { + fn is_non_ref_pat(&mut self, body: &hir_def::expr_store::Body, pat: PatId) -> bool { match &body[pat] { Pat::Tuple { .. } | Pat::TupleStruct { .. } @@ -536,9 +564,10 @@ impl InferenceContext<'_> { | Pat::Range { .. } | Pat::Slice { .. } => true, Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)), - Pat::Path(p) => { - let v = self.resolve_value_path_inner(p, pat.into()); - v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_))) + Pat::Path(path) => { + // A const is a reference pattern, but other value ns things aren't (see #16131). + let resolved = self.resolve_value_path_inner(path, pat.into(), true); + resolved.is_some_and(|it| !matches!(it.0, hir_def::resolver::ValueNs::ConstId(_))) } Pat::ConstBlock(..) => false, Pat::Lit(expr) => !matches!( @@ -553,6 +582,59 @@ impl InferenceContext<'_> { | Pat::Expr(_) => false, } } + + fn try_resolve_slice_ty_to_array_ty( + &mut self, + before: &[PatId], + suffix: &[PatId], + slice: &Option<PatId>, + ) -> Option<Ty> { + if !slice.is_none() { + return None; + } + + let len = before.len() + suffix.len(); + let size = + consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db.upcast())); + + let elem_ty = self.table.new_type_var(); + let array_ty = TyKind::Array(elem_ty.clone(), size).intern(Interner); + Some(array_ty) + } + + /// Used to determine whether we can infer the expected type in the slice pattern to be of type array. + /// This is only possible if we're in an irrefutable pattern. If we were to allow this in refutable + /// patterns we wouldn't e.g. report ambiguity in the following situation: + /// + /// ```ignore(rust) + /// struct Zeroes; + /// const ARR: [usize; 2] = [0; 2]; + /// const ARR2: [usize; 2] = [2; 2]; + /// + /// impl Into<&'static [usize; 2]> for Zeroes { + /// fn into(self) -> &'static [usize; 2] { + /// &ARR + /// } + /// } + /// + /// impl Into<&'static [usize]> for Zeroes { + /// fn into(self) -> &'static [usize] { + /// &ARR2 + /// } + /// } + /// + /// fn main() { + /// let &[a, b]: &[usize] = Zeroes.into() else { + /// .. + /// }; + /// } + /// ``` + /// + /// If we're in an irrefutable pattern we prefer the array impl candidate given that + /// the slice impl candidate would be rejected anyway (if no ambiguity existed). + fn pat_is_irrefutable(&self, decl_ctxt: Option<DeclContext>) -> bool { + matches!(decl_ctxt, Some(DeclContext { origin: DeclOrigin::LocalDecl { has_else: false } })) + } } pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index 73bcefaf2a9..6254bc12392 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -7,7 +7,6 @@ use hir_def::{ AdtId, AssocItemId, GenericDefId, ItemContainerId, Lookup, }; use hir_expand::name::Name; -use intern::sym; use stdx::never; use crate::{ @@ -41,7 +40,7 @@ impl InferenceContext<'_> { } fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> { - let (value, self_subst) = self.resolve_value_path_inner(path, id)?; + let (value, self_subst) = self.resolve_value_path_inner(path, id, false)?; let value_def: ValueTyDefId = match value { ValueNs::FunctionId(it) => it.into(), @@ -86,16 +85,22 @@ impl InferenceContext<'_> { } }; - let generic_def_id = value_def.to_generic_def_id(self.db); - let Some(generic_def) = generic_def_id else { - // `value_def` is the kind of item that can never be generic (i.e. statics, at least - // currently). We can just skip the binders to get its type. + let generic_def = value_def.to_generic_def_id(self.db); + if let GenericDefId::StaticId(_) = generic_def { + // `Static` is the kind of item that can never be generic currently. We can just skip the binders to get its type. let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders(); stdx::always!(binders.is_empty(Interner), "non-empty binders for non-generic def",); return Some(ValuePathResolution::NonGeneric(ty)); }; - let substs = self.with_body_ty_lowering(|ctx| ctx.substs_from_path(path, value_def, true)); + let substs = self.with_body_ty_lowering(|ctx| { + let mut path_ctx = ctx.at_path(path, id); + let last_segment = path.segments().len().checked_sub(1); + if let Some(last_segment) = last_segment { + path_ctx.set_current_segment(last_segment) + } + path_ctx.substs_from_path(value_def, true) + }); let substs = substs.as_slice(Interner); if let ValueNs::EnumVariantId(_) = value { @@ -122,7 +127,7 @@ impl InferenceContext<'_> { } let parent_substs = self_subst.or_else(|| { - let generics = generics(self.db.upcast(), generic_def_id?); + let generics = generics(self.db.upcast(), generic_def); let parent_params_len = generics.parent_generics()?.len(); let parent_args = &substs[substs.len() - parent_params_len..]; Some(Substitution::from_iter(Interner, parent_args)) @@ -147,6 +152,7 @@ impl InferenceContext<'_> { &mut self, path: &Path, id: ExprOrPatId, + no_diagnostics: bool, ) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> { // Don't use `self.make_ty()` here as we need `orig_ns`. let mut ctx = TyLoweringContext::new( @@ -157,33 +163,83 @@ impl InferenceContext<'_> { &self.diagnostics, InferenceTyDiagnosticSource::Body, ); + let mut path_ctx = if no_diagnostics { + ctx.at_path_forget_diagnostics(path) + } else { + ctx.at_path(path, id) + }; let (value, self_subst) = if let Some(type_ref) = path.type_anchor() { let last = path.segments().last()?; - let (ty, orig_ns) = ctx.lower_ty_ext(type_ref); + let (ty, orig_ns) = path_ctx.ty_ctx().lower_ty_ext(type_ref); let ty = self.table.insert_type_vars(ty); let ty = self.table.normalize_associated_types_in(ty); - let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); - let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty); - drop(ctx); + path_ctx.ignore_last_segment(); + let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns); + drop_ctx(ctx, no_diagnostics); let ty = self.table.insert_type_vars(ty); let ty = self.table.normalize_associated_types_in(ty); self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? } else { let hygiene = self.body.expr_or_pat_path_hygiene(id); // FIXME: report error, unresolved first path segment - let value_or_partial = ctx.resolve_path_in_value_ns(path, id, hygiene)?; - drop(ctx); + let value_or_partial = path_ctx.resolve_path_in_value_ns(hygiene)?; match value_or_partial { - ResolveValueResult::ValueNs(it, _) => (it, None), - ResolveValueResult::Partial(def, remaining_index, _) => self - .resolve_assoc_item(id, def, path, remaining_index, id) - .map(|(it, substs)| (it, Some(substs)))?, + ResolveValueResult::ValueNs(it, _) => { + drop_ctx(ctx, no_diagnostics); + (it, None) + } + ResolveValueResult::Partial(def, remaining_index, _) => { + // there may be more intermediate segments between the resolved one and + // the end. Only the last segment needs to be resolved to a value; from + // the segments before that, we need to get either a type or a trait ref. + + let remaining_segments = path.segments().skip(remaining_index); + let is_before_last = remaining_segments.len() == 1; + let last_segment = remaining_segments + .last() + .expect("there should be at least one segment here"); + + let (resolution, substs) = match (def, is_before_last) { + (TypeNs::TraitId(trait_), true) => { + let self_ty = self.table.new_type_var(); + let trait_ref = + path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty); + drop_ctx(ctx, no_diagnostics); + self.resolve_trait_assoc_item(trait_ref, last_segment, id) + } + (def, _) => { + // Either we already have a type (e.g. `Vec::new`), or we have a + // trait but it's not the last segment, so the next segment + // should resolve to an associated type of that trait (e.g. `<T + // as Iterator>::Item::default`) + path_ctx.ignore_last_segment(); + let (ty, _) = path_ctx.lower_partly_resolved_path(def, true); + drop_ctx(ctx, no_diagnostics); + if ty.is_unknown() { + return None; + } + + let ty = self.insert_type_vars(ty); + let ty = self.normalize_associated_types_in(ty); + + self.resolve_ty_assoc_item(ty, last_segment.name, id) + } + }?; + (resolution, Some(substs)) + } } }; - Some((value, self_subst)) + return Some((value, self_subst)); + + #[inline] + fn drop_ctx(mut ctx: TyLoweringContext<'_>, no_diagnostics: bool) { + if no_diagnostics { + ctx.forget_diagnostics(); + } + } } fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) { @@ -213,89 +269,6 @@ impl InferenceContext<'_> { } } - fn resolve_assoc_item( - &mut self, - node: ExprOrPatId, - def: TypeNs, - path: &Path, - remaining_index: usize, - id: ExprOrPatId, - ) -> Option<(ValueNs, Substitution)> { - // there may be more intermediate segments between the resolved one and - // the end. Only the last segment needs to be resolved to a value; from - // the segments before that, we need to get either a type or a trait ref. - - let _d; - let (resolved_segment, remaining_segments) = match path { - Path::Normal { .. } | Path::BarePath(_) => { - assert!(remaining_index < path.segments().len()); - ( - path.segments().get(remaining_index - 1).unwrap(), - path.segments().skip(remaining_index), - ) - } - Path::LangItem(..) => ( - PathSegment { - name: { - _d = Name::new_symbol_root(sym::Unknown.clone()); - &_d - }, - args_and_bindings: None, - }, - path.segments(), - ), - }; - let is_before_last = remaining_segments.len() == 1; - - match (def, is_before_last) { - (TypeNs::TraitId(trait_), true) => { - let segment = - remaining_segments.last().expect("there should be at least one segment here"); - let self_ty = self.table.new_type_var(); - let trait_ref = self.with_body_ty_lowering(|ctx| { - ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty) - }); - self.resolve_trait_assoc_item(trait_ref, segment, id) - } - (def, _) => { - // Either we already have a type (e.g. `Vec::new`), or we have a - // trait but it's not the last segment, so the next segment - // should resolve to an associated type of that trait (e.g. `<T - // as Iterator>::Item::default`) - let remaining_segments_for_ty = - remaining_segments.take(remaining_segments.len() - 1); - let mut ctx = TyLoweringContext::new( - self.db, - &self.resolver, - &self.body.types, - self.owner.into(), - &self.diagnostics, - InferenceTyDiagnosticSource::Body, - ); - let (ty, _) = ctx.lower_partly_resolved_path( - node, - def, - resolved_segment, - remaining_segments_for_ty, - (remaining_index - 1) as u32, - true, - ); - drop(ctx); - if ty.is_unknown() { - return None; - } - - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); - - let segment = - remaining_segments.last().expect("there should be at least one segment here"); - - self.resolve_ty_assoc_item(ty, segment.name, id) - } - } - } - fn resolve_trait_assoc_item( &mut self, trait_ref: TraitRef, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index 108171586ea..a72bcad50a0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -15,6 +15,7 @@ use hir_def::{ use la_arena::{Idx, RawIdx}; use rustc_abi::AddressSpace; use rustc_index::{IndexSlice, IndexVec}; +use rustc_hashes::Hash64; use triomphe::Arc; @@ -178,7 +179,7 @@ fn layout_of_simd_ty( .size .checked_mul(e_len, dl) .ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?; - let align = dl.vector_align(size); + let align = dl.llvmlike_vector_align(size); let size = size.align_to(align.abi); // Compute the placement of the vector fields: @@ -193,11 +194,12 @@ fn layout_of_simd_ty( fields, backend_repr: BackendRepr::Vector { element: e_abi, count: e_len }, largest_niche: e_ly.largest_niche, + uninhabited: false, size, align, max_repr_align: None, unadjusted_abi_align: align.abi, - randomization_seed: 0, + randomization_seed: Hash64::ZERO, })) } @@ -296,25 +298,22 @@ pub fn layout_of_ty_query( .checked_mul(count, dl) .ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?; - let backend_repr = - if count != 0 && matches!(element.backend_repr, BackendRepr::Uninhabited) { - BackendRepr::Uninhabited - } else { - BackendRepr::Memory { sized: true } - }; + let backend_repr = BackendRepr::Memory { sized: true }; let largest_niche = if count != 0 { element.largest_niche } else { None }; + let uninhabited = if count != 0 { element.uninhabited } else { false }; Layout { variants: Variants::Single { index: struct_variant_idx() }, fields: FieldsShape::Array { stride: element.size, count }, backend_repr, largest_niche, + uninhabited, align: element.align, size, max_repr_align: None, unadjusted_abi_align: element.align.abi, - randomization_seed: 0, + randomization_seed: Hash64::ZERO, } } TyKind::Slice(element) => { @@ -324,11 +323,12 @@ pub fn layout_of_ty_query( fields: FieldsShape::Array { stride: element.size, count: 0 }, backend_repr: BackendRepr::Memory { sized: false }, largest_niche: None, + uninhabited: false, align: element.align, size: Size::ZERO, max_repr_align: None, unadjusted_abi_align: element.align.abi, - randomization_seed: 0, + randomization_seed: Hash64::ZERO, } } TyKind::Str => Layout { @@ -336,11 +336,12 @@ pub fn layout_of_ty_query( fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 }, backend_repr: BackendRepr::Memory { sized: false }, largest_niche: None, + uninhabited: false, align: dl.i8_align, size: Size::ZERO, max_repr_align: None, unadjusted_abi_align: dl.i8_align.abi, - randomization_seed: 0, + randomization_seed: Hash64::ZERO, }, // Potentially-wide pointers. TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 3c18ea92816..daddcf0b242 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -12,6 +12,9 @@ extern crate ra_ap_rustc_index as rustc_index; #[cfg(feature = "in-rust-tree")] extern crate rustc_abi; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_hashes; + #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_abi as rustc_abi; @@ -21,6 +24,9 @@ extern crate rustc_pattern_analysis; #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis; +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_hashes as rustc_hashes; + mod builder; mod chalk_db; mod chalk_ext; @@ -100,7 +106,9 @@ pub use mapping::{ }; pub use method_resolution::check_orphan_rules; pub use traits::TraitEnvironment; -pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call}; +pub use utils::{ + all_super_traits, direct_super_traits, is_fn_unsafe_to_call, TargetFeatures, Unsafety, +}; pub use variance::Variance; pub use chalk_ir::{ @@ -1047,3 +1055,20 @@ pub fn known_const_to_ast( } Some(make::expr_const_value(konst.display(db, edition).to_string().as_str())) } + +#[derive(Debug, Copy, Clone)] +pub(crate) enum DeclOrigin { + LetExpr, + /// from `let x = ..` + LocalDecl { + has_else: bool, + }, +} + +/// Provides context for checking patterns in declarations. More specifically this +/// allows us to infer array types if the pattern is irrefutable and allows us to infer +/// the size of the array. See issue rust-lang/rust#76342. +#[derive(Debug, Copy, Clone)] +pub(crate) struct DeclContext { + pub(crate) origin: DeclOrigin, +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 432b8f4d94e..af73b5ed9a7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -6,6 +6,7 @@ //! //! This usually involves resolving names, collecting generic arguments etc. pub(crate) mod diagnostics; +pub(crate) mod path; use std::{ cell::OnceCell, @@ -23,7 +24,6 @@ use chalk_ir::{ use either::Either; use hir_def::{ - body::HygieneId, builtin_type::BuiltinType, data::{adt::StructKind, TraitFlags}, expander::Expander, @@ -33,22 +33,20 @@ use hir_def::{ }, lang_item::LangItem, nameres::MacroSubNs, - path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments}, - resolver::{HasResolver, LifetimeNs, ResolveValueResult, Resolver, TypeNs, ValueNs}, + path::{GenericArg, ModPath, Path, PathKind}, + resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, type_ref::{ ConstRef, LifetimeRef, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId, TypesMap, TypesSourceMap, }, AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, - FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, - LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, - TypeOwnerId, UnionId, VariantId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, LocalFieldId, + Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashSet; use rustc_pattern_analysis::Captures; -use smallvec::SmallVec; use stdx::{impl_from, never}; use syntax::ast; use triomphe::{Arc, ThinArc}; @@ -62,18 +60,19 @@ use crate::{ db::HirDatabase, error_lifetime, generics::{generics, trait_self_param_idx, Generics}, - lower::diagnostics::*, + lower::{ + diagnostics::*, + path::{PathDiagnosticCallback, PathLoweringContext}, + }, make_binders, mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk}, - static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, - utils::{ - all_super_trait_refs, associated_type_by_name_including_super_traits, InTypeConstIdMetadata, - }, - AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, - FnAbi, FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, - LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, ProjectionTy, - QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, - TraitRefExt, Ty, TyBuilder, TyKind, WhereClause, + static_lifetime, to_chalk_trait_id, to_placeholder_idx, + utils::{all_super_trait_refs, InTypeConstIdMetadata}, + AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, FnAbi, + FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, + LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, QuantifiedWhereClause, + QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, + TyKind, WhereClause, }; #[derive(Debug, Default)] @@ -106,6 +105,8 @@ impl ImplTraitLoweringState { } } +pub(crate) struct PathDiagnosticCallbackData(TypeRefId); + #[derive(Debug)] pub struct TyLoweringContext<'a> { pub db: &'a dyn HirDatabase, @@ -527,9 +528,8 @@ impl<'a> TyLoweringContext<'a> { if path.segments().len() > 1 { return None; } - let resolution = match self - .resolve_path_in_type_ns(path, &mut Self::on_path_diagnostic_callback(type_ref_id)) - { + let mut ctx = self.at_path(PathId::from_type_ref_unchecked(type_ref_id)); + let resolution = match ctx.resolve_path_in_type_ns() { Some((it, None)) => it, _ => return None, }; @@ -539,409 +539,36 @@ impl<'a> TyLoweringContext<'a> { } } - pub(crate) fn lower_ty_relative_path( - &mut self, - ty: Ty, - // We need the original resolution to lower `Self::AssocTy` correctly - res: Option<TypeNs>, - remaining_segments: PathSegments<'_>, - ) -> (Ty, Option<TypeNs>) { - match remaining_segments.len() { - 0 => (ty, res), - 1 => { - // resolve unselected assoc types - let segment = remaining_segments.first().unwrap(); - (self.select_associated_type(res, segment), None) - } - _ => { - // FIXME report error (ambiguous associated type) - (TyKind::Error.intern(Interner), None) - } - } - } - - pub(crate) fn lower_partly_resolved_path( - &mut self, - resolution: TypeNs, - resolved_segment: PathSegment<'_>, - remaining_segments: PathSegments<'_>, - _resolved_segment_idx: u32, - infer_args: bool, - _on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic), - ) -> (Ty, Option<TypeNs>) { - let ty = match resolution { - TypeNs::TraitId(trait_) => { - let ty = match remaining_segments.len() { - 1 => { - let trait_ref = self.lower_trait_ref_from_resolved_path( - trait_, - resolved_segment, - TyKind::Error.intern(Interner), - ); - let segment = remaining_segments.first().unwrap(); - let found = self - .db - .trait_data(trait_ref.hir_trait_id()) - .associated_type_by_name(segment.name); - - match found { - Some(associated_ty) => { - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`trait_ref.substitution`). - let substitution = self.substs_from_path_segment( - segment, - Some(associated_ty.into()), - false, - None, - ); - let len_self = - generics(self.db.upcast(), associated_ty.into()).len_self(); - let substitution = Substitution::from_iter( - Interner, - substitution - .iter(Interner) - .take(len_self) - .chain(trait_ref.substitution.iter(Interner)), - ); - TyKind::Alias(AliasTy::Projection(ProjectionTy { - associated_ty_id: to_assoc_type_id(associated_ty), - substitution, - })) - .intern(Interner) - } - None => { - // FIXME: report error (associated type not found) - TyKind::Error.intern(Interner) - } - } - } - 0 => { - // Trait object type without dyn; this should be handled in upstream. See - // `lower_path()`. - stdx::never!("unexpected fully resolved trait path"); - TyKind::Error.intern(Interner) - } - _ => { - // FIXME report error (ambiguous associated type) - TyKind::Error.intern(Interner) - } - }; - return (ty, None); - } - TypeNs::TraitAliasId(_) => { - // FIXME(trait_alias): Implement trait alias. - return (TyKind::Error.intern(Interner), None); - } - TypeNs::GenericParam(param_id) => match self.type_param_mode { - ParamLoweringMode::Placeholder => { - TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into())) - } - ParamLoweringMode::Variable => { - let idx = match self - .generics() - .expect("generics in scope") - .type_or_const_param_idx(param_id.into()) - { - None => { - never!("no matching generics"); - return (TyKind::Error.intern(Interner), None); - } - Some(idx) => idx, - }; - - TyKind::BoundVar(BoundVar::new(self.in_binders, idx)) - } - } - .intern(Interner), - TypeNs::SelfType(impl_id) => { - let generics = self.generics().expect("impl should have generic param scope"); - - match self.type_param_mode { - ParamLoweringMode::Placeholder => { - // `def` can be either impl itself or item within, and we need impl itself - // now. - let generics = generics.parent_or_self(); - let subst = generics.placeholder_subst(self.db); - self.db.impl_self_ty(impl_id).substitute(Interner, &subst) - } - ParamLoweringMode::Variable => { - let starting_from = match generics.def() { - GenericDefId::ImplId(_) => 0, - // `def` is an item within impl. We need to substitute `BoundVar`s but - // remember that they are for parent (i.e. impl) generic params so they - // come after our own params. - _ => generics.len_self(), - }; - TyBuilder::impl_self_ty(self.db, impl_id) - .fill_with_bound_vars(self.in_binders, starting_from) - .build() - } - } - } - TypeNs::AdtSelfType(adt) => { - let generics = generics(self.db.upcast(), adt.into()); - let substs = match self.type_param_mode { - ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db), - ParamLoweringMode::Variable => { - generics.bound_vars_subst(self.db, self.in_binders) - } - }; - self.db.ty(adt.into()).substitute(Interner, &substs) - } - - TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args), - TypeNs::BuiltinType(it) => { - self.lower_path_inner(resolved_segment, it.into(), infer_args) - } - TypeNs::TypeAliasId(it) => { - self.lower_path_inner(resolved_segment, it.into(), infer_args) - } - // FIXME: report error - TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None), - }; - self.lower_ty_relative_path(ty, Some(resolution), remaining_segments) - } - - fn handle_type_ns_resolution( - &mut self, - resolution: &TypeNs, - resolved_segment: PathSegment<'_>, - resolved_segment_idx: usize, - on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic), - ) { - let mut prohibit_generics_on_resolved = |reason| { - if resolved_segment.args_and_bindings.is_some() { - on_diagnostic( - self, - PathLoweringDiagnostic::GenericArgsProhibited { - segment: resolved_segment_idx as u32, - reason, - }, - ); - } - }; - - match resolution { - TypeNs::SelfType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) - } - TypeNs::GenericParam(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam) - } - TypeNs::AdtSelfType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) - } - TypeNs::BuiltinType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy) - } - TypeNs::AdtId(_) - | TypeNs::EnumVariantId(_) - | TypeNs::TypeAliasId(_) - | TypeNs::TraitId(_) - | TypeNs::TraitAliasId(_) => {} - } - } - - pub(crate) fn resolve_path_in_type_ns_fully( - &mut self, - path: &Path, - on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic), - ) -> Option<TypeNs> { - let (res, unresolved) = self.resolve_path_in_type_ns(path, on_diagnostic)?; - if unresolved.is_some() { - return None; - } - Some(res) - } - - pub(crate) fn resolve_path_in_type_ns( - &mut self, - path: &Path, - on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic), - ) -> Option<(TypeNs, Option<usize>)> { - let (resolution, remaining_index, _, prefix_info) = - self.resolver.resolve_path_in_type_ns_with_prefix_info(self.db.upcast(), path)?; - let segments = path.segments(); - - match path { - // `segments.is_empty()` can occur with `self`. - Path::Normal(..) if !segments.is_empty() => (), - _ => return Some((resolution, remaining_index)), - }; - - let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index { - None if prefix_info.enum_variant => { - (segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2)) - } - None => (segments.strip_last(), segments.len() - 1, None), - Some(i) => (segments.take(i - 1), i - 1, None), - }; - - for (i, mod_segment) in module_segments.iter().enumerate() { - if mod_segment.args_and_bindings.is_some() { - on_diagnostic( - self, - PathLoweringDiagnostic::GenericArgsProhibited { - segment: i as u32, - reason: GenericArgsProhibitedReason::Module, - }, - ); - } - } - - if let Some(enum_segment) = enum_segment { - if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - on_diagnostic( - self, - PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }, - ); - } - } - - self.handle_type_ns_resolution( - &resolution, - segments.get(resolved_segment_idx).expect("should have resolved segment"), - resolved_segment_idx, - on_diagnostic, - ); - - Some((resolution, remaining_index)) - } - - pub(crate) fn resolve_path_in_value_ns( - &mut self, - path: &Path, - hygiene_id: HygieneId, - on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic), - ) -> Option<ResolveValueResult> { - let (res, prefix_info) = self.resolver.resolve_path_in_value_ns_with_prefix_info( - self.db.upcast(), - path, - hygiene_id, - )?; - - let segments = path.segments(); - match path { - // `segments.is_empty()` can occur with `self`. - Path::Normal(..) if !segments.is_empty() => (), - _ => return Some(res), - }; - - let (mod_segments, enum_segment) = match res { - ResolveValueResult::Partial(_, unresolved_segment, _) => { - (segments.take(unresolved_segment - 1), None) - } - ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _) - if prefix_info.enum_variant => - { - (segments.strip_last_two(), segments.len().checked_sub(2)) - } - ResolveValueResult::ValueNs(..) => (segments.strip_last(), None), - }; - for (i, mod_segment) in mod_segments.iter().enumerate() { - if mod_segment.args_and_bindings.is_some() { - on_diagnostic( - self, - PathLoweringDiagnostic::GenericArgsProhibited { - segment: i as u32, - reason: GenericArgsProhibitedReason::Module, - }, - ); - } - } - - if let Some(enum_segment) = enum_segment { - if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - on_diagnostic( - self, - PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }, - ); - } + #[inline] + fn on_path_diagnostic_callback(type_ref: TypeRefId) -> PathDiagnosticCallback<'static> { + PathDiagnosticCallback { + data: Either::Left(PathDiagnosticCallbackData(type_ref)), + callback: |data, this, diag| { + let type_ref = data.as_ref().left().unwrap().0; + this.push_diagnostic(type_ref, TyLoweringDiagnosticKind::PathDiagnostic(diag)) + }, } - - match &res { - ResolveValueResult::ValueNs(resolution, _) => { - let resolved_segment_idx = - segments.len().checked_sub(1).unwrap_or_else(|| panic!("{path:?}")); - let resolved_segment = segments.last().unwrap(); - - let mut prohibit_generics_on_resolved = |reason| { - if resolved_segment.args_and_bindings.is_some() { - on_diagnostic( - self, - PathLoweringDiagnostic::GenericArgsProhibited { - segment: resolved_segment_idx as u32, - reason, - }, - ); - } - }; - - match resolution { - ValueNs::ImplSelf(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) - } - // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not - // E0109 (generic arguments provided for a type that doesn't accept them) for - // consts and statics, presumably as a defense against future in which consts - // and statics can be generic, or just because it was easier for rustc implementors. - // That means we'll show the wrong error code. Because of us it's easier to do it - // this way :) - ValueNs::GenericParam(_) | ValueNs::ConstId(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const) - } - ValueNs::StaticId(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static) - } - ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {} - ValueNs::LocalBinding(_) => {} - } - } - ResolveValueResult::Partial(resolution, unresolved_idx, _) => { - let resolved_segment_idx = unresolved_idx - 1; - let resolved_segment = segments.get(resolved_segment_idx).unwrap(); - self.handle_type_ns_resolution( - resolution, - resolved_segment, - resolved_segment_idx, - on_diagnostic, - ); - } - }; - Some(res) } - fn on_path_diagnostic_callback( - type_ref: TypeRefId, - ) -> impl FnMut(&mut Self, PathLoweringDiagnostic) { - move |this, diag| { - this.push_diagnostic(type_ref, TyLoweringDiagnosticKind::PathDiagnostic(diag)) - } + #[inline] + fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'a> { + PathLoweringContext::new( + self, + Self::on_path_diagnostic_callback(path_id.type_ref()), + &self.types_map[path_id], + ) } pub(crate) fn lower_path(&mut self, path: &Path, path_id: PathId) -> (Ty, Option<TypeNs>) { // Resolve the path (in type namespace) if let Some(type_ref) = path.type_anchor() { let (ty, res) = self.lower_ty_ext(type_ref); - return self.lower_ty_relative_path(ty, res, path.segments()); + let mut ctx = self.at_path(path_id); + return ctx.lower_ty_relative_path(ty, res); } - let (resolution, remaining_index) = match self.resolve_path_in_type_ns( - path, - &mut Self::on_path_diagnostic_callback(path_id.type_ref()), - ) { + let mut ctx = self.at_path(path_id); + let (resolution, remaining_index) = match ctx.resolve_path_in_type_ns() { Some(it) => it, None => return (TyKind::Error.intern(Interner), None), }; @@ -953,354 +580,21 @@ impl<'a> TyLoweringContext<'a> { return (ty, None); } - let (resolved_segment_idx, resolved_segment, remaining_segments) = match remaining_index { - None => ( - path.segments().len() - 1, - path.segments().last().expect("resolved path has at least one element"), - PathSegments::EMPTY, - ), - Some(i) => (i - 1, path.segments().get(i - 1).unwrap(), path.segments().skip(i)), - }; - - self.lower_partly_resolved_path( - resolution, - resolved_segment, - remaining_segments, - resolved_segment_idx as u32, - false, - &mut Self::on_path_diagnostic_callback(path_id.type_ref()), - ) - } - - fn select_associated_type(&mut self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty { - let Some((generics, res)) = self.generics().zip(res) else { - return TyKind::Error.intern(Interner); - }; - let ty = named_associated_type_shorthand_candidates( - self.db, - generics.def(), - res, - Some(segment.name.clone()), - move |name, t, associated_ty| { - let generics = self.generics().unwrap(); - - if name != segment.name { - return None; - } - - let parent_subst = t.substitution.clone(); - let parent_subst = match self.type_param_mode { - ParamLoweringMode::Placeholder => { - // if we're lowering to placeholders, we have to put them in now. - let s = generics.placeholder_subst(self.db); - s.apply(parent_subst, Interner) - } - ParamLoweringMode::Variable => { - // We need to shift in the bound vars, since - // `named_associated_type_shorthand_candidates` does not do that. - parent_subst.shifted_in_from(Interner, self.in_binders) - } - }; - - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`t.substitution`). - let substs = - self.substs_from_path_segment(segment, Some(associated_ty.into()), false, None); - - let len_self = - crate::generics::generics(self.db.upcast(), associated_ty.into()).len_self(); - - let substs = Substitution::from_iter( - Interner, - substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)), - ); - - Some( - TyKind::Alias(AliasTy::Projection(ProjectionTy { - associated_ty_id: to_assoc_type_id(associated_ty), - substitution: substs, - })) - .intern(Interner), - ) - }, - ); - - ty.unwrap_or_else(|| TyKind::Error.intern(Interner)) - } - - fn lower_path_inner( - &mut self, - segment: PathSegment<'_>, - typeable: TyDefId, - infer_args: bool, - ) -> Ty { - let generic_def = match typeable { - TyDefId::BuiltinType(_) => None, - TyDefId::AdtId(it) => Some(it.into()), - TyDefId::TypeAliasId(it) => Some(it.into()), - }; - let substs = self.substs_from_path_segment(segment, generic_def, infer_args, None); - self.db.ty(typeable).substitute(Interner, &substs) - } - - /// Collect generic arguments from a path into a `Substs`. See also - /// `create_substs_for_ast_path` and `def_to_ty` in rustc. - pub(super) fn substs_from_path( - &mut self, - path: &Path, - // Note that we don't call `db.value_type(resolved)` here, - // `ValueTyDefId` is just a convenient way to pass generics and - // special-case enum variants - resolved: ValueTyDefId, - infer_args: bool, - ) -> Substitution { - let last = path.segments().last(); - let (segment, generic_def) = match resolved { - ValueTyDefId::FunctionId(it) => (last, Some(it.into())), - ValueTyDefId::StructId(it) => (last, Some(it.into())), - ValueTyDefId::UnionId(it) => (last, Some(it.into())), - ValueTyDefId::ConstId(it) => (last, Some(it.into())), - ValueTyDefId::StaticId(_) => (last, None), - ValueTyDefId::EnumVariantId(var) => { - // the generic args for an enum variant may be either specified - // on the segment referring to the enum, or on the segment - // referring to the variant. So `Option::<T>::None` and - // `Option::None::<T>` are both allowed (though the former is - // preferred). See also `def_ids_for_path_segments` in rustc. - let len = path.segments().len(); - let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx)); - let segment = match penultimate { - Some(segment) if segment.args_and_bindings.is_some() => Some(segment), - _ => last, - }; - (segment, Some(var.lookup(self.db.upcast()).parent.into())) - } - }; - if let Some(segment) = segment { - self.substs_from_path_segment(segment, generic_def, infer_args, None) - } else if let Some(generic_def) = generic_def { - // lang item - self.substs_from_args_and_bindings(None, Some(generic_def), infer_args, None) - } else { - Substitution::empty(Interner) - } - } - - pub(super) fn substs_from_path_segment( - &mut self, - segment: PathSegment<'_>, - def: Option<GenericDefId>, - infer_args: bool, - explicit_self_ty: Option<Ty>, - ) -> Substitution { - self.substs_from_args_and_bindings( - segment.args_and_bindings, - def, - infer_args, - explicit_self_ty, - ) - } - - fn substs_from_args_and_bindings( - &mut self, - args_and_bindings: Option<&GenericArgs>, - def: Option<GenericDefId>, - infer_args: bool, - explicit_self_ty: Option<Ty>, - ) -> Substitution { - let Some(def) = def else { return Substitution::empty(Interner) }; - - // Order is - // - Optional Self parameter - // - Lifetime parameters - // - Type or Const parameters - // - Parent parameters - let def_generics = generics(self.db.upcast(), def); - let ( - parent_params, - self_param, - type_params, - const_params, - impl_trait_params, - lifetime_params, - ) = def_generics.provenance_split(); - let item_len = - self_param as usize + type_params + const_params + impl_trait_params + lifetime_params; - let total_len = parent_params + item_len; - - let mut substs = Vec::new(); - - // we need to iterate the lifetime and type/const params separately as our order of them - // differs from the supplied syntax - - let ty_error = || TyKind::Error.intern(Interner).cast(Interner); - let mut def_toc_iter = def_generics.iter_self_type_or_consts_id(); - let fill_self_param = || { - if self_param { - let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error); - - if let Some(id) = def_toc_iter.next() { - assert!(matches!(id, GenericParamId::TypeParamId(_))); - substs.push(self_ty); - } - } - }; - let mut had_explicit_args = false; - - if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings { - // Fill in the self param first - if has_self_type && self_param { - had_explicit_args = true; - if let Some(id) = def_toc_iter.next() { - assert!(matches!(id, GenericParamId::TypeParamId(_))); - had_explicit_args = true; - if let GenericArg::Type(ty) = &args[0] { - substs.push(self.lower_ty(*ty).cast(Interner)); - } - } - } else { - fill_self_param() - }; - - // Then fill in the supplied lifetime args, or error lifetimes if there are too few - // (default lifetimes aren't a thing) - for arg in args - .iter() - .filter_map(|arg| match arg { - GenericArg::Lifetime(arg) => Some(self.lower_lifetime(arg)), - _ => None, - }) - .chain(iter::repeat(error_lifetime())) - .take(lifetime_params) - { - substs.push(arg.cast(Interner)); - } - - let skip = if has_self_type { 1 } else { 0 }; - // Fill in supplied type and const args - // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that - for (arg, id) in args - .iter() - .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) - .skip(skip) - .take(type_params + const_params) - .zip(def_toc_iter) - { - had_explicit_args = true; - let arg = generic_arg_to_chalk( - self.db, - id, - arg, - self, - self.types_map, - |this, type_ref| this.lower_ty(type_ref), - |this, const_ref, ty| this.lower_const(const_ref, ty), - |this, lifetime_ref| this.lower_lifetime(lifetime_ref), - ); - substs.push(arg); - } - } else { - fill_self_param(); - } - - let param_to_err = |id| match id { - GenericParamId::ConstParamId(x) => unknown_const_as_generic(self.db.const_param_ty(x)), - GenericParamId::TypeParamId(_) => ty_error(), - GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), - }; - // handle defaults. In expression or pattern path segments without - // explicitly specified type arguments, missing type arguments are inferred - // (i.e. defaults aren't used). - // Generic parameters for associated types are not supposed to have defaults, so we just - // ignore them. - let is_assoc_ty = || match def { - GenericDefId::TypeAliasId(id) => { - matches!(id.lookup(self.db.upcast()).container, ItemContainerId::TraitId(_)) - } - _ => false, - }; - let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty(); - if fill_defaults { - let defaults = &*self.db.generic_defaults(def); - let (item, _parent) = defaults.split_at(item_len); - let parent_from = item_len - substs.len(); - - let mut rem = - def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>(); - // Fill in defaults for type/const params - for (idx, default_ty) in item[substs.len()..].iter().enumerate() { - // each default can depend on the previous parameters - let substs_so_far = Substitution::from_iter( - Interner, - substs.iter().cloned().chain(rem[idx..].iter().cloned()), - ); - substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); - } - // Fill in remaining parent params - substs.extend(rem.drain(parent_from..)); - } else { - // Fill in remaining def params and parent params - substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err)); - } - - assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len()); - Substitution::from_iter(Interner, substs) - } - - pub(crate) fn lower_trait_ref_from_resolved_path( - &mut self, - resolved: TraitId, - segment: PathSegment<'_>, - explicit_self_ty: Ty, - ) -> TraitRef { - let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty); - TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs } - } - - fn prohibit_generics( - &mut self, - path_id: PathId, - idx: u32, - segments: PathSegments<'_>, - reason: GenericArgsProhibitedReason, - ) { - segments.iter().zip(idx..).for_each(|(segment, idx)| { - if segment.args_and_bindings.is_some() { - self.push_diagnostic( - path_id.type_ref(), - TyLoweringDiagnosticKind::PathDiagnostic( - PathLoweringDiagnostic::GenericArgsProhibited { segment: idx, reason }, - ), - ); - } - }); + ctx.lower_partly_resolved_path(resolution, false) } fn lower_trait_ref_from_path( &mut self, path_id: PathId, explicit_self_ty: Ty, - ) -> Option<TraitRef> { - let path = &self.types_map[path_id]; - let resolved = match self.resolve_path_in_type_ns_fully( - path, - &mut Self::on_path_diagnostic_callback(path_id.type_ref()), - )? { + ) -> Option<(TraitRef, PathLoweringContext<'_, 'a>)> { + let mut ctx = self.at_path(path_id); + let resolved = match ctx.resolve_path_in_type_ns_fully()? { // FIXME(trait_alias): We need to handle trait alias here. TypeNs::TraitId(tr) => tr, _ => return None, }; - // Do this after we verify it's indeed a trait to not confuse the user if they're not modules. - self.prohibit_generics( - path_id, - 0, - path.segments().strip_last(), - GenericArgsProhibitedReason::Module, - ); - let segment = path.segments().last().expect("path should have at least one segment"); - Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty)) + Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty), ctx)) } fn lower_trait_ref( @@ -1308,16 +602,7 @@ impl<'a> TyLoweringContext<'a> { trait_ref: &HirTraitRef, explicit_self_ty: Ty, ) -> Option<TraitRef> { - self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty) - } - - fn trait_ref_substs_from_path( - &mut self, - segment: PathSegment<'_>, - resolved: TraitId, - explicit_self_ty: Ty, - ) -> Substitution { - self.substs_from_path_segment(segment, Some(resolved.into()), false, Some(explicit_self_ty)) + self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0) } pub(crate) fn lower_where_predicate<'b>( @@ -1365,11 +650,18 @@ impl<'a> TyLoweringContext<'a> { self_ty: Ty, ignore_bindings: bool, ) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'a> { - let mut trait_ref = None; - let clause = match bound { - &TypeBound::Path(path, TraitBoundModifier::None) => { - trait_ref = self.lower_trait_ref_from_path(path, self_ty); - trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders) + let mut assoc_bounds = None; + let mut clause = None; + match bound { + &TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => { + // FIXME Don't silently drop the hrtb lifetimes here + if let Some((trait_ref, ctx)) = self.lower_trait_ref_from_path(path, self_ty) { + if !ignore_bindings { + assoc_bounds = + ctx.assoc_type_bindings_from_type_bound(bound, trait_ref.clone()); + } + clause = Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref))); + } } &TypeBound::Path(path, TraitBoundModifier::Maybe) => { let sized_trait = self @@ -1381,170 +673,21 @@ impl<'a> TyLoweringContext<'a> { // If we got another trait here ignore the bound completely. let trait_id = self .lower_trait_ref_from_path(path, self_ty.clone()) - .map(|trait_ref| trait_ref.hir_trait_id()); + .map(|(trait_ref, _)| trait_ref.hir_trait_id()); if trait_id == sized_trait { self.unsized_types.insert(self_ty); } - None - } - &TypeBound::ForLifetime(_, path) => { - // FIXME Don't silently drop the hrtb lifetimes here - trait_ref = self.lower_trait_ref_from_path(path, self_ty); - trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders) } TypeBound::Lifetime(l) => { let lifetime = self.lower_lifetime(l); - Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives { + clause = Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives { ty: self_ty, lifetime, - }))) + }))); } - TypeBound::Use(_) | TypeBound::Error => None, - }; - clause.into_iter().chain( - trait_ref - .filter(move |_| !ignore_bindings) - .map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)) - .into_iter() - .flatten(), - ) - } - - fn assoc_type_bindings_from_type_bound<'b>( - &'b mut self, - bound: &'b TypeBound, - trait_ref: TraitRef, - ) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'a> { - let last_segment = match bound { - &TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => { - self.types_map[path].segments().last() - } - TypeBound::Path(_, TraitBoundModifier::Maybe) - | TypeBound::Use(_) - | TypeBound::Error - | TypeBound::Lifetime(_) => None, - }; - last_segment - .into_iter() - .filter_map(|segment| segment.args_and_bindings) - .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) - .flat_map(move |binding| { - let found = associated_type_by_name_including_super_traits( - self.db, - trait_ref.clone(), - &binding.name, - ); - let (super_trait_ref, associated_ty) = match found { - None => return SmallVec::new(), - Some(t) => t, - }; - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`super_trait_ref.substitution`). - let substitution = self.substs_from_path_segment( - // FIXME: This is hack. We shouldn't really build `PathSegment` directly. - PathSegment { name: &binding.name, args_and_bindings: binding.args.as_ref() }, - Some(associated_ty.into()), - false, // this is not relevant - Some(super_trait_ref.self_type_parameter(Interner)), - ); - let self_params = generics(self.db.upcast(), associated_ty.into()).len_self(); - let substitution = Substitution::from_iter( - Interner, - substitution - .iter(Interner) - .take(self_params) - .chain(super_trait_ref.substitution.iter(Interner)), - ); - let projection_ty = ProjectionTy { - associated_ty_id: to_assoc_type_id(associated_ty), - substitution, - }; - let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity( - binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), - ); - if let Some(type_ref) = binding.type_ref { - match (&self.types_map[type_ref], self.impl_trait_mode.mode) { - (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (), - (_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => { - let ty = self.lower_ty(type_ref); - let alias_eq = - AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty }; - predicates - .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq))); - } - (_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => { - // Find the generic index for the target of our `bound` - let target_param_idx = self - .resolver - .where_predicates_in_scope() - .find_map(|(p, _)| match p { - WherePredicate::TypeBound { - target: WherePredicateTypeTarget::TypeOrConstParam(idx), - bound: b, - } if b == bound => Some(idx), - _ => None, - }); - let ty = if let Some(target_param_idx) = target_param_idx { - let mut counter = 0; - let generics = self.generics().expect("generics in scope"); - for (idx, data) in generics.iter_self_type_or_consts() { - // Count the number of `impl Trait` things that appear before - // the target of our `bound`. - // Our counter within `impl_trait_mode` should be that number - // to properly lower each types within `type_ref` - if data.type_param().is_some_and(|p| { - p.provenance == TypeParamProvenance::ArgumentImplTrait - }) { - counter += 1; - } - if idx == *target_param_idx { - break; - } - } - let mut ext = TyLoweringContext::new_maybe_unowned( - self.db, - self.resolver, - self.types_map, - self.types_source_map, - self.owner, - ) - .with_type_param_mode(self.type_param_mode); - match self.impl_trait_mode.mode { - ImplTraitLoweringMode::Param => { - ext.impl_trait_mode = - ImplTraitLoweringState::param(counter); - } - ImplTraitLoweringMode::Variable => { - ext.impl_trait_mode = - ImplTraitLoweringState::variable(counter); - } - _ => unreachable!(), - } - let ty = ext.lower_ty(type_ref); - self.diagnostics.extend(ext.diagnostics); - ty - } else { - self.lower_ty(type_ref) - }; - - let alias_eq = - AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty }; - predicates - .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq))); - } - } - } - for bound in binding.bounds.iter() { - predicates.extend(self.lower_type_bound( - bound, - TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner), - false, - )); - } - predicates - }) + TypeBound::Use(_) | TypeBound::Error => {} + } + clause.into_iter().chain(assoc_bounds.into_iter().flatten()) } fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty { @@ -2471,14 +1614,14 @@ pub enum ValueTyDefId { impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); impl ValueTyDefId { - pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> Option<GenericDefId> { + pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> GenericDefId { match self { - Self::FunctionId(id) => Some(id.into()), - Self::StructId(id) => Some(id.into()), - Self::UnionId(id) => Some(id.into()), - Self::EnumVariantId(var) => Some(var.lookup(db.upcast()).parent.into()), - Self::ConstId(id) => Some(id.into()), - Self::StaticId(_) => None, + Self::FunctionId(id) => id.into(), + Self::StructId(id) => id.into(), + Self::UnionId(id) => id.into(), + Self::EnumVariantId(var) => var.lookup(db.upcast()).parent.into(), + Self::ConstId(id) => id.into(), + Self::StaticId(id) => id.into(), } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs index 7fe196cdbb5..5c77bcd0736 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs @@ -26,11 +26,11 @@ pub enum GenericArgsProhibitedReason { Static, /// When there is a generic enum, within the expression `Enum::Variant`, /// either `Enum` or `Variant` are allowed to have generic arguments, but not both. - // FIXME: This is not used now but it should be. EnumVariant, } #[derive(Debug, PartialEq, Eq, Clone)] pub enum PathLoweringDiagnostic { GenericArgsProhibited { segment: u32, reason: GenericArgsProhibitedReason }, + ParenthesizedGenericArgsWithoutFnTrait { segment: u32 }, } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs new file mode 100644 index 00000000000..22c5bb9923f --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -0,0 +1,911 @@ +//! A wrapper around [`TyLoweringContext`] specifically for lowering paths. + +use std::iter; + +use chalk_ir::{cast::Cast, fold::Shift, BoundVar}; +use either::Either; +use hir_def::{ + data::TraitFlags, + expr_store::HygieneId, + generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget}, + path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments}, + resolver::{ResolveValueResult, TypeNs, ValueNs}, + type_ref::{TypeBound, TypeRef}, + GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId, +}; +use smallvec::SmallVec; +use stdx::never; + +use crate::{ + consteval::unknown_const_as_generic, + error_lifetime, + generics::generics, + lower::{ + generic_arg_to_chalk, named_associated_type_shorthand_candidates, ImplTraitLoweringState, + }, + to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, + utils::associated_type_by_name_including_super_traits, + AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, Interner, + ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, QuantifiedWhereClause, Substitution, + TraitRef, Ty, TyBuilder, TyDefId, TyKind, TyLoweringContext, ValueTyDefId, WhereClause, +}; + +type CallbackData<'a> = Either< + super::PathDiagnosticCallbackData, + crate::infer::diagnostics::PathDiagnosticCallbackData<'a>, +>; + +// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box<dyn FnMut()>` +// because of the allocation, so we create a lifetime-less callback, tailored for our needs. +pub(crate) struct PathDiagnosticCallback<'a> { + pub(crate) data: CallbackData<'a>, + pub(crate) callback: fn(&CallbackData<'_>, &mut TyLoweringContext<'_>, PathLoweringDiagnostic), +} + +pub(crate) struct PathLoweringContext<'a, 'b> { + ctx: &'a mut TyLoweringContext<'b>, + on_diagnostic: PathDiagnosticCallback<'a>, + path: &'a Path, + segments: PathSegments<'a>, + current_segment_idx: usize, + /// Contains the previous segment if `current_segment_idx == segments.len()` + current_or_prev_segment: PathSegment<'a>, +} + +impl<'a, 'b> PathLoweringContext<'a, 'b> { + #[inline] + pub(crate) fn new( + ctx: &'a mut TyLoweringContext<'b>, + on_diagnostic: PathDiagnosticCallback<'a>, + path: &'a Path, + ) -> Self { + let segments = path.segments(); + let first_segment = segments.first().unwrap_or(PathSegment::MISSING); + Self { + ctx, + on_diagnostic, + path, + segments, + current_segment_idx: 0, + current_or_prev_segment: first_segment, + } + } + + #[inline] + #[cold] + fn on_diagnostic(&mut self, diag: PathLoweringDiagnostic) { + (self.on_diagnostic.callback)(&self.on_diagnostic.data, self.ctx, diag); + } + + #[inline] + pub(crate) fn ty_ctx(&mut self) -> &mut TyLoweringContext<'b> { + self.ctx + } + + #[inline] + fn current_segment_u32(&self) -> u32 { + self.current_segment_idx as u32 + } + + #[inline] + fn skip_resolved_segment(&mut self) { + if !matches!(self.path, Path::LangItem(..)) { + // In lang items, the resolved "segment" is not one of the segments. Perhaps we should've put it + // point at -1, but I don't feel this is clearer. + self.current_segment_idx += 1; + } + self.update_current_segment(); + } + + #[inline] + fn update_current_segment(&mut self) { + self.current_or_prev_segment = + self.segments.get(self.current_segment_idx).unwrap_or(self.current_or_prev_segment); + } + + #[inline] + pub(crate) fn ignore_last_segment(&mut self) { + self.segments = self.segments.strip_last(); + } + + #[inline] + pub(crate) fn set_current_segment(&mut self, segment: usize) { + self.current_segment_idx = segment; + self.current_or_prev_segment = self + .segments + .get(segment) + .expect("invalid segment passed to PathLoweringContext::set_current_segment()"); + } + + pub(crate) fn lower_ty_relative_path( + &mut self, + ty: Ty, + // We need the original resolution to lower `Self::AssocTy` correctly + res: Option<TypeNs>, + ) -> (Ty, Option<TypeNs>) { + match self.segments.len() - self.current_segment_idx { + 0 => (ty, res), + 1 => { + // resolve unselected assoc types + (self.select_associated_type(res), None) + } + _ => { + // FIXME report error (ambiguous associated type) + (TyKind::Error.intern(Interner), None) + } + } + } + + fn prohibit_parenthesized_generic_args(&mut self) -> bool { + if let Some(generic_args) = self.current_or_prev_segment.args_and_bindings { + if generic_args.desugared_from_fn { + let segment = self.current_segment_u32(); + self.on_diagnostic( + PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, + ); + return true; + } + } + false + } + + // When calling this, the current segment is the resolved segment (we don't advance it yet). + pub(crate) fn lower_partly_resolved_path( + &mut self, + resolution: TypeNs, + infer_args: bool, + ) -> (Ty, Option<TypeNs>) { + let remaining_segments = self.segments.skip(self.current_segment_idx + 1); + + let ty = match resolution { + TypeNs::TraitId(trait_) => { + let ty = match remaining_segments.len() { + 1 => { + let trait_ref = self.lower_trait_ref_from_resolved_path( + trait_, + TyKind::Error.intern(Interner), + ); + + self.skip_resolved_segment(); + let segment = self.current_or_prev_segment; + let found = + self.ctx.db.trait_data(trait_).associated_type_by_name(segment.name); + + match found { + Some(associated_ty) => { + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`trait_ref.substitution`). + let substitution = self.substs_from_path_segment( + associated_ty.into(), + false, + None, + ); + let len_self = + generics(self.ctx.db.upcast(), associated_ty.into()).len_self(); + let substitution = Substitution::from_iter( + Interner, + substitution + .iter(Interner) + .take(len_self) + .chain(trait_ref.substitution.iter(Interner)), + ); + TyKind::Alias(AliasTy::Projection(ProjectionTy { + associated_ty_id: to_assoc_type_id(associated_ty), + substitution, + })) + .intern(Interner) + } + None => { + // FIXME: report error (associated type not found) + TyKind::Error.intern(Interner) + } + } + } + 0 => { + // Trait object type without dyn; this should be handled in upstream. See + // `lower_path()`. + stdx::never!("unexpected fully resolved trait path"); + TyKind::Error.intern(Interner) + } + _ => { + // FIXME report error (ambiguous associated type) + TyKind::Error.intern(Interner) + } + }; + return (ty, None); + } + TypeNs::TraitAliasId(_) => { + // FIXME(trait_alias): Implement trait alias. + return (TyKind::Error.intern(Interner), None); + } + TypeNs::GenericParam(param_id) => match self.ctx.type_param_mode { + ParamLoweringMode::Placeholder => { + TyKind::Placeholder(to_placeholder_idx(self.ctx.db, param_id.into())) + } + ParamLoweringMode::Variable => { + let idx = match self + .ctx + .generics() + .expect("generics in scope") + .type_or_const_param_idx(param_id.into()) + { + None => { + never!("no matching generics"); + return (TyKind::Error.intern(Interner), None); + } + Some(idx) => idx, + }; + + TyKind::BoundVar(BoundVar::new(self.ctx.in_binders, idx)) + } + } + .intern(Interner), + TypeNs::SelfType(impl_id) => { + let generics = self.ctx.generics().expect("impl should have generic param scope"); + + match self.ctx.type_param_mode { + ParamLoweringMode::Placeholder => { + // `def` can be either impl itself or item within, and we need impl itself + // now. + let generics = generics.parent_or_self(); + let subst = generics.placeholder_subst(self.ctx.db); + self.ctx.db.impl_self_ty(impl_id).substitute(Interner, &subst) + } + ParamLoweringMode::Variable => { + let starting_from = match generics.def() { + GenericDefId::ImplId(_) => 0, + // `def` is an item within impl. We need to substitute `BoundVar`s but + // remember that they are for parent (i.e. impl) generic params so they + // come after our own params. + _ => generics.len_self(), + }; + TyBuilder::impl_self_ty(self.ctx.db, impl_id) + .fill_with_bound_vars(self.ctx.in_binders, starting_from) + .build() + } + } + } + TypeNs::AdtSelfType(adt) => { + let generics = generics(self.ctx.db.upcast(), adt.into()); + let substs = match self.ctx.type_param_mode { + ParamLoweringMode::Placeholder => generics.placeholder_subst(self.ctx.db), + ParamLoweringMode::Variable => { + generics.bound_vars_subst(self.ctx.db, self.ctx.in_binders) + } + }; + self.ctx.db.ty(adt.into()).substitute(Interner, &substs) + } + + TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args), + TypeNs::BuiltinType(it) => self.lower_path_inner(it.into(), infer_args), + TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args), + // FIXME: report error + TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None), + }; + + self.skip_resolved_segment(); + self.lower_ty_relative_path(ty, Some(resolution)) + } + + fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) { + let mut prohibit_generics_on_resolved = |reason| { + if self.current_or_prev_segment.args_and_bindings.is_some() { + let segment = self.current_segment_u32(); + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment, + reason, + }); + } + }; + + match resolution { + TypeNs::SelfType(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + } + TypeNs::GenericParam(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam) + } + TypeNs::AdtSelfType(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + } + TypeNs::BuiltinType(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy) + } + TypeNs::AdtId(_) + | TypeNs::EnumVariantId(_) + | TypeNs::TypeAliasId(_) + | TypeNs::TraitId(_) + | TypeNs::TraitAliasId(_) => {} + } + } + + pub(crate) fn resolve_path_in_type_ns_fully(&mut self) -> Option<TypeNs> { + let (res, unresolved) = self.resolve_path_in_type_ns()?; + if unresolved.is_some() { + return None; + } + Some(res) + } + + pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option<usize>)> { + let (resolution, remaining_index, _, prefix_info) = self + .ctx + .resolver + .resolve_path_in_type_ns_with_prefix_info(self.ctx.db.upcast(), self.path)?; + + let segments = self.segments; + if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { + // `segments.is_empty()` can occur with `self`. + return Some((resolution, remaining_index)); + } + + let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index { + None if prefix_info.enum_variant => { + (segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2)) + } + None => (segments.strip_last(), segments.len() - 1, None), + Some(i) => (segments.take(i - 1), i - 1, None), + }; + + self.current_segment_idx = resolved_segment_idx; + self.current_or_prev_segment = + segments.get(resolved_segment_idx).expect("should have resolved segment"); + + if matches!(self.path, Path::BarePath(..)) { + // Bare paths cannot have generics, so skip them as an optimization. + return Some((resolution, remaining_index)); + } + + for (i, mod_segment) in module_segments.iter().enumerate() { + if mod_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: i as u32, + reason: GenericArgsProhibitedReason::Module, + }); + } + } + + if let Some(enum_segment) = enum_segment { + if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); + } + } + + self.handle_type_ns_resolution(&resolution); + + Some((resolution, remaining_index)) + } + + pub(crate) fn resolve_path_in_value_ns( + &mut self, + hygiene_id: HygieneId, + ) -> Option<ResolveValueResult> { + let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info( + self.ctx.db.upcast(), + self.path, + hygiene_id, + )?; + + let segments = self.segments; + if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { + // `segments.is_empty()` can occur with `self`. + return Some(res); + } + + let (mod_segments, enum_segment, resolved_segment_idx) = match res { + ResolveValueResult::Partial(_, unresolved_segment, _) => { + (segments.take(unresolved_segment - 1), None, unresolved_segment - 1) + } + ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _) + if prefix_info.enum_variant => + { + (segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1) + } + ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1), + }; + + self.current_segment_idx = resolved_segment_idx; + self.current_or_prev_segment = + segments.get(resolved_segment_idx).expect("should have resolved segment"); + + for (i, mod_segment) in mod_segments.iter().enumerate() { + if mod_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: i as u32, + reason: GenericArgsProhibitedReason::Module, + }); + } + } + + if let Some(enum_segment) = enum_segment { + if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); + } + } + + match &res { + ResolveValueResult::ValueNs(resolution, _) => { + let resolved_segment_idx = self.current_segment_u32(); + let resolved_segment = self.current_or_prev_segment; + + let mut prohibit_generics_on_resolved = |reason| { + if resolved_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: resolved_segment_idx, + reason, + }); + } + }; + + match resolution { + ValueNs::ImplSelf(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + } + // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not + // E0109 (generic arguments provided for a type that doesn't accept them) for + // consts and statics, presumably as a defense against future in which consts + // and statics can be generic, or just because it was easier for rustc implementors. + // That means we'll show the wrong error code. Because of us it's easier to do it + // this way :) + ValueNs::GenericParam(_) | ValueNs::ConstId(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const) + } + ValueNs::StaticId(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static) + } + ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {} + ValueNs::LocalBinding(_) => {} + } + } + ResolveValueResult::Partial(resolution, _, _) => { + self.handle_type_ns_resolution(resolution); + } + }; + Some(res) + } + + fn select_associated_type(&mut self, res: Option<TypeNs>) -> Ty { + let Some((generics, res)) = self.ctx.generics().zip(res) else { + return TyKind::Error.intern(Interner); + }; + let segment = self.current_or_prev_segment; + let ty = named_associated_type_shorthand_candidates( + self.ctx.db, + generics.def(), + res, + Some(segment.name.clone()), + move |name, t, associated_ty| { + let generics = self.ctx.generics().unwrap(); + + if name != segment.name { + return None; + } + + let parent_subst = t.substitution.clone(); + let parent_subst = match self.ctx.type_param_mode { + ParamLoweringMode::Placeholder => { + // if we're lowering to placeholders, we have to put them in now. + let s = generics.placeholder_subst(self.ctx.db); + s.apply(parent_subst, Interner) + } + ParamLoweringMode::Variable => { + // We need to shift in the bound vars, since + // `named_associated_type_shorthand_candidates` does not do that. + parent_subst.shifted_in_from(Interner, self.ctx.in_binders) + } + }; + + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`t.substitution`). + let substs = self.substs_from_path_segment(associated_ty.into(), false, None); + + let len_self = + crate::generics::generics(self.ctx.db.upcast(), associated_ty.into()) + .len_self(); + + let substs = Substitution::from_iter( + Interner, + substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)), + ); + + Some( + TyKind::Alias(AliasTy::Projection(ProjectionTy { + associated_ty_id: to_assoc_type_id(associated_ty), + substitution: substs, + })) + .intern(Interner), + ) + }, + ); + + ty.unwrap_or_else(|| TyKind::Error.intern(Interner)) + } + + fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty { + let generic_def = match typeable { + TyDefId::BuiltinType(builtin) => return TyBuilder::builtin(builtin), + TyDefId::AdtId(it) => it.into(), + TyDefId::TypeAliasId(it) => it.into(), + }; + let substs = self.substs_from_path_segment(generic_def, infer_args, None); + self.ctx.db.ty(typeable).substitute(Interner, &substs) + } + + /// Collect generic arguments from a path into a `Substs`. See also + /// `create_substs_for_ast_path` and `def_to_ty` in rustc. + pub(crate) fn substs_from_path( + &mut self, + // Note that we don't call `db.value_type(resolved)` here, + // `ValueTyDefId` is just a convenient way to pass generics and + // special-case enum variants + resolved: ValueTyDefId, + infer_args: bool, + ) -> Substitution { + let prev_current_segment_idx = self.current_segment_idx; + let prev_current_segment = self.current_or_prev_segment; + + let generic_def = match resolved { + ValueTyDefId::FunctionId(it) => it.into(), + ValueTyDefId::StructId(it) => it.into(), + ValueTyDefId::UnionId(it) => it.into(), + ValueTyDefId::ConstId(it) => it.into(), + ValueTyDefId::StaticId(_) => return Substitution::empty(Interner), + ValueTyDefId::EnumVariantId(var) => { + // the generic args for an enum variant may be either specified + // on the segment referring to the enum, or on the segment + // referring to the variant. So `Option::<T>::None` and + // `Option::None::<T>` are both allowed (though the former is + // FIXME: This isn't strictly correct, enum variants may be used not through the enum + // (via `use Enum::Variant`). The resolver returns whether they were, but we don't have its result + // available here. The worst that can happen is that we will show some confusing diagnostics to the user, + // if generics exist on the module and they don't match with the variant. + // preferred). See also `def_ids_for_path_segments` in rustc. + // + // `wrapping_sub(1)` will return a number which `get` will return None for if current_segment_idx<2. + // This simplifies the code a bit. + let penultimate_idx = self.current_segment_idx.wrapping_sub(1); + let penultimate = self.segments.get(penultimate_idx); + if let Some(penultimate) = penultimate { + if self.current_or_prev_segment.args_and_bindings.is_none() + && penultimate.args_and_bindings.is_some() + { + self.current_segment_idx = penultimate_idx; + self.current_or_prev_segment = penultimate; + } + } + var.lookup(self.ctx.db.upcast()).parent.into() + } + }; + let result = self.substs_from_path_segment(generic_def, infer_args, None); + self.current_segment_idx = prev_current_segment_idx; + self.current_or_prev_segment = prev_current_segment; + result + } + + pub(crate) fn substs_from_path_segment( + &mut self, + def: GenericDefId, + infer_args: bool, + explicit_self_ty: Option<Ty>, + ) -> Substitution { + let prohibit_parens = match def { + GenericDefId::TraitId(trait_) => { + let trait_data = self.ctx.db.trait_data(trait_); + !trait_data.flags.contains(TraitFlags::RUSTC_PAREN_SUGAR) + } + _ => true, + }; + if prohibit_parens && self.prohibit_parenthesized_generic_args() { + return TyBuilder::unknown_subst(self.ctx.db, def); + } + + self.substs_from_args_and_bindings( + self.current_or_prev_segment.args_and_bindings, + def, + infer_args, + explicit_self_ty, + ) + } + + pub(super) fn substs_from_args_and_bindings( + &mut self, + args_and_bindings: Option<&GenericArgs>, + def: GenericDefId, + infer_args: bool, + explicit_self_ty: Option<Ty>, + ) -> Substitution { + // Order is + // - Optional Self parameter + // - Lifetime parameters + // - Type or Const parameters + // - Parent parameters + let def_generics = generics(self.ctx.db.upcast(), def); + let ( + parent_params, + self_param, + type_params, + const_params, + impl_trait_params, + lifetime_params, + ) = def_generics.provenance_split(); + let item_len = + self_param as usize + type_params + const_params + impl_trait_params + lifetime_params; + let total_len = parent_params + item_len; + + let mut substs = Vec::new(); + + // we need to iterate the lifetime and type/const params separately as our order of them + // differs from the supplied syntax + + let ty_error = || TyKind::Error.intern(Interner).cast(Interner); + let mut def_toc_iter = def_generics.iter_self_type_or_consts_id(); + let fill_self_param = || { + if self_param { + let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error); + + if let Some(id) = def_toc_iter.next() { + assert!(matches!(id, GenericParamId::TypeParamId(_))); + substs.push(self_ty); + } + } + }; + let mut had_explicit_args = false; + + if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings { + // Fill in the self param first + if has_self_type && self_param { + had_explicit_args = true; + if let Some(id) = def_toc_iter.next() { + assert!(matches!(id, GenericParamId::TypeParamId(_))); + had_explicit_args = true; + if let GenericArg::Type(ty) = &args[0] { + substs.push(self.ctx.lower_ty(*ty).cast(Interner)); + } + } + } else { + fill_self_param() + }; + + // Then fill in the supplied lifetime args, or error lifetimes if there are too few + // (default lifetimes aren't a thing) + for arg in args + .iter() + .filter_map(|arg| match arg { + GenericArg::Lifetime(arg) => Some(self.ctx.lower_lifetime(arg)), + _ => None, + }) + .chain(iter::repeat(error_lifetime())) + .take(lifetime_params) + { + substs.push(arg.cast(Interner)); + } + + let skip = if has_self_type { 1 } else { 0 }; + // Fill in supplied type and const args + // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that + for (arg, id) in args + .iter() + .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) + .skip(skip) + .take(type_params + const_params) + .zip(def_toc_iter) + { + had_explicit_args = true; + let arg = generic_arg_to_chalk( + self.ctx.db, + id, + arg, + self.ctx, + self.ctx.types_map, + |ctx, type_ref| ctx.lower_ty(type_ref), + |ctx, const_ref, ty| ctx.lower_const(const_ref, ty), + |ctx, lifetime_ref| ctx.lower_lifetime(lifetime_ref), + ); + substs.push(arg); + } + } else { + fill_self_param(); + } + + let param_to_err = |id| match id { + GenericParamId::ConstParamId(x) => { + unknown_const_as_generic(self.ctx.db.const_param_ty(x)) + } + GenericParamId::TypeParamId(_) => ty_error(), + GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), + }; + // handle defaults. In expression or pattern path segments without + // explicitly specified type arguments, missing type arguments are inferred + // (i.e. defaults aren't used). + // Generic parameters for associated types are not supposed to have defaults, so we just + // ignore them. + let is_assoc_ty = || match def { + GenericDefId::TypeAliasId(id) => { + matches!(id.lookup(self.ctx.db.upcast()).container, ItemContainerId::TraitId(_)) + } + _ => false, + }; + let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty(); + if fill_defaults { + let defaults = &*self.ctx.db.generic_defaults(def); + let (item, _parent) = defaults.split_at(item_len); + let parent_from = item_len - substs.len(); + + let mut rem = + def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>(); + // Fill in defaults for type/const params + for (idx, default_ty) in item[substs.len()..].iter().enumerate() { + // each default can depend on the previous parameters + let substs_so_far = Substitution::from_iter( + Interner, + substs.iter().cloned().chain(rem[idx..].iter().cloned()), + ); + substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); + } + // Fill in remaining parent params + substs.extend(rem.drain(parent_from..)); + } else { + // Fill in remaining def params and parent params + substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err)); + } + + assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len()); + Substitution::from_iter(Interner, substs) + } + + pub(crate) fn lower_trait_ref_from_resolved_path( + &mut self, + resolved: TraitId, + explicit_self_ty: Ty, + ) -> TraitRef { + let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty); + TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs } + } + + fn trait_ref_substs_from_path( + &mut self, + resolved: TraitId, + explicit_self_ty: Ty, + ) -> Substitution { + self.substs_from_path_segment(resolved.into(), false, Some(explicit_self_ty)) + } + + pub(super) fn assoc_type_bindings_from_type_bound<'c>( + mut self, + bound: &'c TypeBound, + trait_ref: TraitRef, + ) -> Option<impl Iterator<Item = QuantifiedWhereClause> + use<'a, 'b, 'c>> { + self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| { + args_and_bindings.bindings.iter().flat_map(move |binding| { + let found = associated_type_by_name_including_super_traits( + self.ctx.db, + trait_ref.clone(), + &binding.name, + ); + let (super_trait_ref, associated_ty) = match found { + None => return SmallVec::new(), + Some(t) => t, + }; + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`super_trait_ref.substitution`). + let substitution = self.substs_from_args_and_bindings( + binding.args.as_ref(), + associated_ty.into(), + false, // this is not relevant + Some(super_trait_ref.self_type_parameter(Interner)), + ); + let self_params = generics(self.ctx.db.upcast(), associated_ty.into()).len_self(); + let substitution = Substitution::from_iter( + Interner, + substitution + .iter(Interner) + .take(self_params) + .chain(super_trait_ref.substitution.iter(Interner)), + ); + let projection_ty = ProjectionTy { + associated_ty_id: to_assoc_type_id(associated_ty), + substitution, + }; + let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity( + binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), + ); + if let Some(type_ref) = binding.type_ref { + match (&self.ctx.types_map[type_ref], self.ctx.impl_trait_mode.mode) { + (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (), + (_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => { + let ty = self.ctx.lower_ty(type_ref); + let alias_eq = + AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty }; + predicates + .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq))); + } + (_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => { + // Find the generic index for the target of our `bound` + let target_param_idx = + self.ctx.resolver.where_predicates_in_scope().find_map(|(p, _)| { + match p { + WherePredicate::TypeBound { + target: WherePredicateTypeTarget::TypeOrConstParam(idx), + bound: b, + } if b == bound => Some(idx), + _ => None, + } + }); + let ty = if let Some(target_param_idx) = target_param_idx { + let mut counter = 0; + let generics = self.ctx.generics().expect("generics in scope"); + for (idx, data) in generics.iter_self_type_or_consts() { + // Count the number of `impl Trait` things that appear before + // the target of our `bound`. + // Our counter within `impl_trait_mode` should be that number + // to properly lower each types within `type_ref` + if data.type_param().is_some_and(|p| { + p.provenance == TypeParamProvenance::ArgumentImplTrait + }) { + counter += 1; + } + if idx == *target_param_idx { + break; + } + } + let mut ext = TyLoweringContext::new_maybe_unowned( + self.ctx.db, + self.ctx.resolver, + self.ctx.types_map, + self.ctx.types_source_map, + self.ctx.owner, + ) + .with_type_param_mode(self.ctx.type_param_mode); + match self.ctx.impl_trait_mode.mode { + ImplTraitLoweringMode::Param => { + ext.impl_trait_mode = + ImplTraitLoweringState::param(counter); + } + ImplTraitLoweringMode::Variable => { + ext.impl_trait_mode = + ImplTraitLoweringState::variable(counter); + } + _ => unreachable!(), + } + let ty = ext.lower_ty(type_ref); + self.ctx.diagnostics.extend(ext.diagnostics); + ty + } else { + self.ctx.lower_ty(type_ref) + }; + + let alias_eq = + AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty }; + predicates + .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq))); + } + } + } + for bound in binding.bounds.iter() { + predicates.extend(self.ctx.lower_type_bound( + bound, + TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner), + false, + )); + } + predicates + }) + }) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index 1cea67ee964..db94351dcc9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -4,6 +4,7 @@ //! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs. use std::ops::ControlFlow; +use arrayvec::ArrayVec; use base_db::CrateId; use chalk_ir::{cast::Cast, UniverseIndex, WithKind}; use hir_def::{ @@ -732,15 +733,27 @@ fn lookup_impl_assoc_item_for_trait_ref( let self_ty = trait_ref.self_type_parameter(Interner); let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?; let impls = db.trait_impls_in_deps(env.krate); - let self_impls = match self_ty.kind(Interner) { - TyKind::Adt(id, _) => { - id.0.module(db.upcast()).containing_block().and_then(|it| db.trait_impls_in_block(it)) + + let trait_module = hir_trait_id.module(db.upcast()); + let type_module = match self_ty_fp { + TyFingerprint::Adt(adt_id) => Some(adt_id.module(db.upcast())), + TyFingerprint::ForeignType(type_id) => { + Some(from_foreign_def_id(type_id).module(db.upcast())) } + TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db.upcast())), _ => None, }; + + let def_blocks: ArrayVec<_, 2> = + [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())] + .into_iter() + .flatten() + .filter_map(|block_id| db.trait_impls_in_block(block_id)) + .collect(); + let impls = impls .iter() - .chain(self_impls.as_ref()) + .chain(&def_blocks) .flat_map(|impls| impls.for_trait_and_self_ty(hir_trait_id, self_ty_fp)); let table = InferenceTable::new(db, env); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 59c583afb2a..41304bbd8a9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -10,13 +10,13 @@ use crate::{ lang_items::is_box, mapping::ToChalk, CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap, - Substitution, TraitEnvironment, Ty, TyKind, + Substitution, TraitEnvironment, Ty, TyExt, TyKind, }; use base_db::CrateId; use chalk_ir::Mutability; use either::Either; use hir_def::{ - body::Body, + expr_store::Body, hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId}, DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId, }; @@ -144,6 +144,13 @@ impl<V, T> ProjectionElem<V, T> { closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty, krate: CrateId, ) -> Ty { + // we only bail on mir building when there are type mismatches + // but error types may pop up resulting in us still attempting to build the mir + // so just propagate the error type + if base.is_unknown() { + return TyKind::Error.intern(Interner); + } + if matches!(base.kind(Interner), TyKind::Alias(_) | TyKind::AssociatedType(..)) { base = normalize( db, @@ -166,7 +173,7 @@ impl<V, T> ProjectionElem<V, T> { TyKind::Error.intern(Interner) } }, - ProjectionElem::Field(Either::Left(f)) => match &base.kind(Interner) { + ProjectionElem::Field(Either::Left(f)) => match base.kind(Interner) { TyKind::Adt(_, subst) => { db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index dcae6877ba8..6b20522cf34 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -6,9 +6,9 @@ use base_db::CrateId; use chalk_ir::{cast::Cast, Mutability}; use either::Either; use hir_def::{ - body::HygieneId, builtin_type::BuiltinType, data::adt::{StructFlags, VariantData}, + expr_store::HygieneId, lang_item::LangItem, layout::{TagEncoding, Variants}, resolver::{HasResolver, TypeNs, ValueNs}, @@ -1644,14 +1644,15 @@ impl Evaluator<'_> { Variants::Multiple { tag, tag_encoding, variants, .. } => { let size = tag.size(&*self.target_data_layout).bytes_usize(); let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field + let is_signed = tag.is_signed(); match tag_encoding { TagEncoding::Direct => { let tag = &bytes[offset..offset + size]; - Ok(i128::from_le_bytes(pad16(tag, false))) + Ok(i128::from_le_bytes(pad16(tag, is_signed))) } TagEncoding::Niche { untagged_variant, niche_start, .. } => { let tag = &bytes[offset..offset + size]; - let candidate_tag = i128::from_le_bytes(pad16(tag, false)) + let candidate_tag = i128::from_le_bytes(pad16(tag, is_signed)) .wrapping_sub(*niche_start as i128) as usize; let idx = variants @@ -2943,10 +2944,10 @@ pub fn render_const_using_debug_impl( // a3 = ::core::fmt::Arguments::new_v1(a1, a2) // FIXME: similarly, we should call function here, not directly working with memory. let a3 = evaluator.heap_allocate(evaluator.ptr_size() * 6, evaluator.ptr_size())?; - evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a1.to_bytes())?; + evaluator.write_memory(a3, &a1.to_bytes())?; + evaluator.write_memory(a3.offset(evaluator.ptr_size()), &[1])?; + evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a2.to_bytes())?; evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?; - evaluator.write_memory(a3.offset(4 * evaluator.ptr_size()), &a2.to_bytes())?; - evaluator.write_memory(a3.offset(5 * evaluator.ptr_size()), &[1])?; let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully( db.upcast(), &hir_def::path::Path::from_known_path_with_no_generic(path![std::fmt::format]), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index 0a78f4a5b24..38b189a517f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -1,11 +1,12 @@ //! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation //! is not available. //! -use std::cmp; +use std::cmp::{self, Ordering}; use chalk_ir::TyKind; use hir_def::{ builtin_type::{BuiltinInt, BuiltinUint}, + lang_item::LangItemTarget, resolver::HasResolver, }; use hir_expand::name::Name; @@ -1317,6 +1318,82 @@ impl Evaluator<'_> { self.write_memory_using_ref(dst, size)?.fill(val); Ok(()) } + "ptr_metadata" => { + let [ptr] = args else { + return Err(MirEvalError::InternalError( + "ptr_metadata args are not provided".into(), + )); + }; + let arg = ptr.interval.get(self)?.to_owned(); + let metadata = &arg[self.ptr_size()..]; + destination.write_from_bytes(self, metadata)?; + Ok(()) + } + "three_way_compare" => { + let [lhs, rhs] = args else { + return Err(MirEvalError::InternalError( + "three_way_compare args are not provided".into(), + )); + }; + let Some(ty) = + generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) + else { + return Err(MirEvalError::InternalError( + "three_way_compare generic arg is not provided".into(), + )); + }; + let signed = match ty.as_builtin().unwrap() { + BuiltinType::Int(_) => true, + BuiltinType::Uint(_) => false, + _ => { + return Err(MirEvalError::InternalError( + "three_way_compare expects an integral type".into(), + )) + } + }; + let rhs = rhs.get(self)?; + let lhs = lhs.get(self)?; + let mut result = Ordering::Equal; + for (l, r) in lhs.iter().zip(rhs).rev() { + let it = l.cmp(r); + if it != Ordering::Equal { + result = it; + break; + } + } + if signed { + if let Some((&l, &r)) = lhs.iter().zip(rhs).next_back() { + if l != r { + result = (l as i8).cmp(&(r as i8)); + } + } + } + if let Some(LangItemTarget::EnumId(e)) = + self.db.lang_item(self.crate_id, LangItem::Ordering) + { + let ty = self.db.ty(e.into()); + let r = self + .compute_discriminant(ty.skip_binders().clone(), &[result as i8 as u8])?; + destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])?; + Ok(()) + } else { + Err(MirEvalError::InternalError("Ordering enum not found".into())) + } + } + "aggregate_raw_ptr" => { + let [data, meta] = args else { + return Err(MirEvalError::InternalError( + "aggregate_raw_ptr args are not provided".into(), + )); + }; + destination.write_from_interval(self, data.interval)?; + Interval { + addr: destination.addr.offset(data.interval.size), + size: destination.size - data.interval.size, + } + .write_from_interval(self, meta.interval)?; + Ok(()) + } _ if needs_override => not_supported!("intrinsic {name} is not implemented"), _ => return Ok(false), } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index f1e86daea23..9625ae5f88e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -912,3 +912,36 @@ fn main() { "", ); } + +#[test] +fn regression_19021() { + check_pass( + r#" +//- minicore: deref +use core::ops::Deref; + +#[lang = "owned_box"] +struct Box<T>(T); + +impl<T> Deref for Box<T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +struct Foo; + +fn main() { + let x = Box(Foo); + let y = &Foo; + + || match x { + ref x => x, + _ => y, + }; +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 1d1044df6e9..f88696692e6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -5,11 +5,11 @@ use std::{fmt::Write, iter, mem}; use base_db::ra_salsa::Cycle; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ - body::{Body, HygieneId}, data::adt::{StructKind, VariantData}, + expr_store::{Body, HygieneId}, hir::{ - ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, - LiteralOrConst, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, + ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, + Pat, PatId, RecordFieldPat, RecordLitField, }, lang_item::{LangItem, LangItemTarget}, path::Path, @@ -1358,20 +1358,10 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(()) } - fn lower_literal_or_const_to_operand( - &mut self, - ty: Ty, - loc: &LiteralOrConst, - ) -> Result<Operand> { - match loc { - LiteralOrConst::Literal(l) => self.lower_literal_to_operand(ty, l), - LiteralOrConst::Const(c) => { - let c = match &self.body.pats[*c] { - Pat::Path(p) => p, - _ => not_supported!( - "only `char` and numeric types are allowed in range patterns" - ), - }; + fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> { + match &self.body.exprs[*loc] { + Expr::Literal(l) => self.lower_literal_to_operand(ty, l), + Expr::Path(c) => { let edition = self.edition(); let unresolved_name = || MirLowerError::unresolved_path(self.db, c, edition, &self.body.types); @@ -1392,6 +1382,9 @@ impl<'ctx> MirLowerCtx<'ctx> { } } } + _ => { + not_supported!("only `char` and numeric types are allowed in range patterns"); + } } } @@ -2156,7 +2149,7 @@ pub fn lower_to_mir( // need to take this input explicitly. root_expr: ExprId, ) -> Result<MirBody> { - if infer.has_errors { + if infer.type_mismatches().next().is_some() { return Err(MirLowerError::HasErrors); } let mut ctx = MirLowerCtx::new(db, owner, body, infer); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 2ffea34c85a..289175feefb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,6 +1,6 @@ //! MIR lowering for patterns -use hir_def::{hir::LiteralOrConst, AssocItemId}; +use hir_def::{hir::ExprId, AssocItemId}; use crate::{ mir::{ @@ -207,7 +207,7 @@ impl MirLowerCtx<'_> { )? } Pat::Range { start, end } => { - let mut add_check = |l: &LiteralOrConst, binop| -> Result<()> { + let mut add_check = |l: &ExprId, binop| -> Result<()> { let lv = self.lower_literal_or_const_to_operand(self.infer[pattern].clone(), l)?; let else_target = *current_else.get_or_insert_with(|| self.new_basic_block()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index 06765a104cb..2a26101ac43 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -6,7 +6,7 @@ use std::{ }; use either::Either; -use hir_def::{body::Body, hir::BindingId}; +use hir_def::{expr_store::Body, hir::BindingId}; use hir_expand::{name::Name, Lookup}; use la_arena::ArenaMap; use span::Edition; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 00da9b25176..f5a4d4ff35c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -18,8 +18,8 @@ use std::sync::LazyLock; use base_db::SourceDatabaseFileInputExt as _; use expect_test::Expect; use hir_def::{ - body::{Body, BodySourceMap}, db::DefDatabase, + expr_store::{Body, BodySourceMap}, hir::{ExprId, Pat, PatId}, item_scope::ItemScope, nameres::DefMap, @@ -117,7 +117,7 @@ fn check_impl( expected.trim_start_matches("adjustments:").trim().to_owned(), ); } else { - panic!("unexpected annotation: {expected}"); + panic!("unexpected annotation: {expected} @ {range:?}"); } had_annotations = true; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs index 7992f1feeeb..ef94814d587 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs @@ -185,11 +185,10 @@ fn test() { let t = &mut 1; let x = match 1 { 1 => t as *mut i32, + //^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer) 2 => t as &i32, //^^^^^^^^^ expected *mut i32, got &'? i32 _ => t as *const i32, - // ^^^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer) - }; x; //^ type: *const i32 @@ -536,7 +535,7 @@ fn test() { #[test] fn coerce_unsize_generic() { - check( + check_no_mismatches( r#" //- minicore: coerce_unsized struct Foo<T> { t: T }; @@ -544,9 +543,7 @@ struct Bar<T>(Foo<T>); fn test() { let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] }; - //^^^^^^^^^^^^^^^^^^^^^ expected &'? Foo<[usize]>, got &'? Foo<[i32; 3]> let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); - //^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &'? Bar<[usize]>, got &'? Bar<[i32; 3]> } "#, ); @@ -958,3 +955,24 @@ fn f() { "#, ); } + +#[test] +fn coerce_nested_unsized_struct() { + check_types( + r#" +//- minicore: fn, coerce_unsized, dispatch_from_dyn, sized +use core::marker::Unsize; + +struct Foo<T: ?Sized>(T); + +fn need(_: &Foo<dyn Fn(i32) -> i32>) { +} + +fn test() { + let callback = |x| x; + //^ i32 + need(&Foo(callback)); +} +"#, + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs index def06f2d59d..855034117c0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs @@ -153,3 +153,53 @@ fn consume() -> Option<()> { "#, ); } + +#[test] +fn method_call_on_field() { + check( + r#" +struct S { + field: fn(f32) -> u32, + field2: u32 +} + +fn main() { + let s = S { field: |_| 0, field2: 0 }; + s.field(0); + // ^ expected f32, got i32 + // ^^^^^^^^^^ type: u32 + s.field2(0); + // ^ type: i32 + // ^^^^^^^^^^^ type: {unknown} + s.not_a_field(0); + // ^ type: i32 + // ^^^^^^^^^^^^^^^^ type: {unknown} +} +"#, + ); +} + +#[test] +fn method_call_on_assoc() { + check( + r#" +struct S; + +impl S { + fn not_a_method() -> f32 { 0.0 } + fn not_a_method2(this: Self, param: f32) -> Self { this } + fn not_a_method3(param: f32) -> Self { S } +} + +fn main() { + S.not_a_method(0); + // ^^^^^^^^^^^^^^^^^ type: f32 + S.not_a_method2(0); + // ^ expected f32, got i32 + // ^^^^^^^^^^^^^^^^^^ type: S + S.not_a_method3(0); + // ^^^^^^^^^^^^^^^^^^ type: S +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs index 8866de22dfb..3a258ecad10 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs @@ -1210,7 +1210,7 @@ impl<T> Slice<T> { fn main() { let foo: Slice<u32>; foo.into_vec(); // we shouldn't crash on this at least -} //^^^^^^^^^^^^^^ {unknown} +} //^^^^^^^^^^^^^^ () "#, ); } @@ -2163,9 +2163,9 @@ impl Receiver for Bar { fn main() { let bar = Bar; let _v1 = bar.foo1(); - //^^^ type: i32 + //^^^ type: {unknown} let _v2 = bar.foo2(); - //^^^ type: bool + //^^^ type: {unknown} } "#, ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index 15636604570..50a1ecd006d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -3814,3 +3814,50 @@ async fn foo(a: (), b: i32) -> u32 { "#, ); } + +#[test] +fn irrefutable_slices() { + check_infer( + r#" +//- minicore: from +struct A; + +impl From<A> for [u8; 2] { + fn from(a: A) -> Self { + [0; 2] + } +} +impl From<A> for [u8; 3] { + fn from(a: A) -> Self { + [0; 3] + } +} + + +fn main() { + let a = A; + let [b, c] = a.into(); +} +"#, + expect![[r#" + 50..51 'a': A + 64..86 '{ ... }': [u8; 2] + 74..80 '[0; 2]': [u8; 2] + 75..76 '0': u8 + 78..79 '2': usize + 128..129 'a': A + 142..164 '{ ... }': [u8; 3] + 152..158 '[0; 3]': [u8; 3] + 153..154 '0': u8 + 156..157 '3': usize + 179..224 '{ ...o(); }': () + 189..190 'a': A + 193..194 'A': A + 204..210 '[b, c]': [u8; 2] + 205..206 'b': u8 + 208..209 'c': u8 + 213..214 'a': A + 213..221 'a.into()': [u8; 2] + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index dda7bfb2baf..f0eb41b1ce7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -4694,21 +4694,21 @@ fn f<T: Send, U>() { Struct::<T>::IS_SEND; //^^^^^^^^^^^^^^^^^^^^Yes Struct::<U>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^Yes + //^^^^^^^^^^^^^^^^^^^^{unknown} Struct::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^^^Yes + //^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} Enum::<T>::IS_SEND; //^^^^^^^^^^^^^^^^^^Yes Enum::<U>::IS_SEND; - //^^^^^^^^^^^^^^^^^^Yes + //^^^^^^^^^^^^^^^^^^{unknown} Enum::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^Yes + //^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} Union::<T>::IS_SEND; //^^^^^^^^^^^^^^^^^^^Yes Union::<U>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^Yes + //^^^^^^^^^^^^^^^^^^^{unknown} Union::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^^Yes + //^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} PhantomData::<T>::IS_SEND; //^^^^^^^^^^^^^^^^^^^^^^^^^Yes PhantomData::<U>::IS_SEND; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index bf7892f69bd..c131e97bc4c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -9,19 +9,22 @@ use chalk_ir::{ DebruijnIndex, }; use hir_def::{ + attr::Attrs, db::DefDatabase, generics::{WherePredicate, WherePredicateTypeTarget}, lang_item::LangItem, resolver::{HasResolver, TypeNs}, + tt, type_ref::{TraitBoundModifier, TypeRef}, EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId, TypeOrConstParamId, }; use hir_expand::name::Name; -use intern::sym; +use intern::{sym, Symbol}; use rustc_abi::TargetDataLayout; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; +use span::Edition; use stdx::never; use crate::{ @@ -264,10 +267,65 @@ impl<'a> ClosureSubst<'a> { } } -pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { +#[derive(Debug, Default)] +pub struct TargetFeatures { + enabled: FxHashSet<Symbol>, +} + +impl TargetFeatures { + pub fn from_attrs(attrs: &Attrs) -> Self { + let enabled = attrs + .by_key(&sym::target_feature) + .tt_values() + .filter_map(|tt| { + match tt.token_trees().flat_tokens() { + [ + tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)), + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })), + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })), + ] if enable_ident.sym == sym::enable => Some(features), + _ => None, + } + }) + .flat_map(|features| features.as_str().split(',').map(Symbol::intern)) + .collect(); + Self { enabled } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Unsafety { + Safe, + Unsafe, + /// A lint. + DeprecatedSafe2024, +} + +pub fn is_fn_unsafe_to_call( + db: &dyn HirDatabase, + func: FunctionId, + caller_target_features: &TargetFeatures, + call_edition: Edition, +) -> Unsafety { let data = db.function_data(func); if data.is_unsafe() { - return true; + return Unsafety::Unsafe; + } + + if data.has_target_feature() { + // RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>. + let callee_target_features = TargetFeatures::from_attrs(&db.attrs(func.into())); + if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) { + return Unsafety::Unsafe; + } + } + + if data.is_deprecated_safe_2024() { + if call_edition.at_least_2024() { + return Unsafety::Unsafe; + } else { + return Unsafety::DeprecatedSafe2024; + } } let loc = func.lookup(db.upcast()); @@ -279,14 +337,22 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { if is_intrinsic_block { // legacy intrinsics // extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute - !db.attrs(func.into()).by_key(&sym::rustc_safe_intrinsic).exists() + if db.attrs(func.into()).by_key(&sym::rustc_safe_intrinsic).exists() { + Unsafety::Safe + } else { + Unsafety::Unsafe + } } else { // Function in an `extern` block are always unsafe to call, except when // it is marked as `safe`. - !data.is_safe() + if data.is_safe() { + Unsafety::Safe + } else { + Unsafety::Unsafe + } } } - _ => false, + _ => Unsafety::Safe, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs index afd163fbd96..3a22158ce6f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs @@ -1028,6 +1028,7 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V); } GenericDefId::ImplId(_) => return None, GenericDefId::ConstId(_) => return None, + GenericDefId::StaticId(_) => return None, }, )) }) diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index fc77d1889c8..1ed0daa3756 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -6,10 +6,11 @@ use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_def::{ + expr_store::ExprOrPatPtr, hir::ExprOrPatId, path::{hir_segment_to_ast_segment, ModPath}, type_ref::TypesSourceMap, - AssocItemId, DefWithBodyId, SyntheticSyntax, + DefWithBodyId, SyntheticSyntax, }; use hir_expand::{name::Name, HirFileId, InFile}; use hir_ty::{ @@ -24,7 +25,7 @@ use syntax::{ }; use triomphe::Arc; -use crate::{AssocItem, Field, Local, Trait, Type}; +use crate::{AssocItem, Field, Function, Local, Trait, Type}; pub use hir_def::VariantId; pub use hir_ty::{ @@ -111,18 +112,19 @@ diagnostics![ UnusedMut, UnusedVariable, GenericArgsProhibited, + ParenthesizedGenericArgsWithoutFnTrait, ]; #[derive(Debug)] pub struct BreakOutsideOfLoop { - pub expr: InFile<AstPtr<ast::Expr>>, + pub expr: InFile<ExprOrPatPtr>, pub is_break: bool, pub bad_value_break: bool, } #[derive(Debug)] pub struct TypedHole { - pub expr: InFile<AstPtr<ast::Expr>>, + pub expr: InFile<ExprOrPatPtr>, pub expected: Type, } @@ -221,26 +223,26 @@ pub struct NoSuchField { #[derive(Debug)] pub struct PrivateAssocItem { - pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, + pub expr_or_pat: InFile<ExprOrPatPtr>, pub item: AssocItem, } #[derive(Debug)] pub struct MismatchedTupleStructPatArgCount { - pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, + pub expr_or_pat: InFile<ExprOrPatPtr>, pub expected: usize, pub found: usize, } #[derive(Debug)] pub struct ExpectedFunction { - pub call: InFile<AstPtr<ast::Expr>>, + pub call: InFile<ExprOrPatPtr>, pub found: Type, } #[derive(Debug)] pub struct UnresolvedField { - pub expr: InFile<AstPtr<ast::Expr>>, + pub expr: InFile<ExprOrPatPtr>, pub receiver: Type, pub name: Name, pub method_with_same_name_exists: bool, @@ -248,34 +250,40 @@ pub struct UnresolvedField { #[derive(Debug)] pub struct UnresolvedMethodCall { - pub expr: InFile<AstPtr<ast::Expr>>, + pub expr: InFile<ExprOrPatPtr>, pub receiver: Type, pub name: Name, pub field_with_same_name: Option<Type>, - pub assoc_func_with_same_name: Option<AssocItemId>, + pub assoc_func_with_same_name: Option<Function>, } #[derive(Debug)] pub struct UnresolvedAssocItem { - pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, + pub expr_or_pat: InFile<ExprOrPatPtr>, } #[derive(Debug)] pub struct UnresolvedIdent { - pub node: InFile<(AstPtr<Either<ast::Expr, ast::Pat>>, Option<TextRange>)>, + pub node: InFile<(ExprOrPatPtr, Option<TextRange>)>, } #[derive(Debug)] pub struct PrivateField { - pub expr: InFile<AstPtr<ast::Expr>>, + pub expr: InFile<ExprOrPatPtr>, pub field: Field, } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum UnsafeLint { + HardError, + UnsafeOpInUnsafeFn, + DeprecatedSafe2024, +} + #[derive(Debug)] pub struct MissingUnsafe { - pub node: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, - /// If true, the diagnostics is an `unsafe_op_in_unsafe_fn` lint instead of a hard error. - pub only_lint: bool, + pub node: InFile<ExprOrPatPtr>, + pub lint: UnsafeLint, pub reason: UnsafetyReason, } @@ -296,7 +304,7 @@ pub struct ReplaceFilterMapNextWithFindMap { #[derive(Debug)] pub struct MismatchedArgCount { - pub call_expr: InFile<AstPtr<ast::Expr>>, + pub call_expr: InFile<ExprOrPatPtr>, pub expected: usize, pub found: usize, } @@ -315,7 +323,7 @@ pub struct NonExhaustiveLet { #[derive(Debug)] pub struct TypeMismatch { - pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, + pub expr_or_pat: InFile<ExprOrPatPtr>, pub expected: Type, pub actual: Type, } @@ -389,13 +397,13 @@ pub struct RemoveUnnecessaryElse { #[derive(Debug)] pub struct CastToUnsized { - pub expr: InFile<AstPtr<ast::Expr>>, + pub expr: InFile<ExprOrPatPtr>, pub cast_ty: Type, } #[derive(Debug)] pub struct InvalidCast { - pub expr: InFile<AstPtr<ast::Expr>>, + pub expr: InFile<ExprOrPatPtr>, pub error: CastError, pub expr_ty: Type, pub cast_ty: Type, @@ -407,11 +415,16 @@ pub struct GenericArgsProhibited { pub reason: GenericArgsProhibitedReason, } +#[derive(Debug)] +pub struct ParenthesizedGenericArgsWithoutFnTrait { + pub args: InFile<AstPtr<ast::ParenthesizedArgList>>, +} + impl AnyDiagnostic { pub(crate) fn body_validation_diagnostic( db: &dyn HirDatabase, diagnostic: BodyValidationDiagnostic, - source_map: &hir_def::body::BodySourceMap, + source_map: &hir_def::expr_store::BodySourceMap, ) -> Option<AnyDiagnostic> { match diagnostic { BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => { @@ -422,9 +435,7 @@ impl AnyDiagnostic { .collect(); let record = match record { - Either::Left(record_expr) => { - source_map.expr_syntax(record_expr).ok()?.map(AstPtr::wrap_left) - } + Either::Left(record_expr) => source_map.expr_syntax(record_expr).ok()?, Either::Right(record_pat) => source_map.pat_syntax(record_pat).ok()?, }; let file = record.file_id; @@ -468,7 +479,7 @@ impl AnyDiagnostic { return Some( ReplaceFilterMapNextWithFindMap { file: next_source_ptr.file_id, - next_expr: next_source_ptr.value, + next_expr: next_source_ptr.value.cast()?, } .into(), ); @@ -478,7 +489,9 @@ impl AnyDiagnostic { match source_map.expr_syntax(match_expr) { Ok(source_ptr) => { let root = source_ptr.file_syntax(db.upcast()); - if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) { + if let Either::Left(ast::Expr::MatchExpr(match_expr)) = + &source_ptr.value.to_node(&root) + { match match_expr.expr() { Some(scrut_expr) if match_expr.match_arm_list().is_some() => { return Some( @@ -547,7 +560,7 @@ impl AnyDiagnostic { def: DefWithBodyId, d: &InferenceDiagnostic, outer_types_source_map: &TypesSourceMap, - source_map: &hir_def::body::BodySourceMap, + source_map: &hir_def::expr_store::BodySourceMap, ) -> Option<AnyDiagnostic> { let expr_syntax = |expr| { source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok() @@ -555,7 +568,7 @@ impl AnyDiagnostic { let pat_syntax = |pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok(); let expr_or_pat_syntax = |id| match id { - ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(|it| it.map(AstPtr::wrap_left)), + ExprOrPatId::ExprId(expr) => expr_syntax(expr), ExprOrPatId::PatId(pat) => pat_syntax(pat), }; Some(match d { @@ -616,7 +629,7 @@ impl AnyDiagnostic { field_with_same_name: field_with_same_name .clone() .map(|ty| Type::new(db, def, ty)), - assoc_func_with_same_name: *assoc_func_with_same_name, + assoc_func_with_same_name: assoc_func_with_same_name.map(Into::into), } .into() } @@ -627,7 +640,7 @@ impl AnyDiagnostic { &InferenceDiagnostic::UnresolvedIdent { id } => { let node = match id { ExprOrPatId::ExprId(id) => match source_map.expr_syntax(id) { - Ok(syntax) => syntax.map(|it| (it.wrap_left(), None)), + Ok(syntax) => syntax.map(|it| (it, None)), Err(SyntheticSyntax) => source_map .format_args_implicit_capture(id)? .map(|(node, range)| (node.wrap_left(), Some(range))), @@ -646,7 +659,7 @@ impl AnyDiagnostic { } &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => { let expr_or_pat = match pat { - ExprOrPatId::ExprId(expr) => expr_syntax(expr)?.map(AstPtr::wrap_left), + ExprOrPatId::ExprId(expr) => expr_syntax(expr)?, ExprOrPatId::PatId(pat) => { let InFile { file_id, value } = pat_syntax(pat)?; @@ -696,8 +709,8 @@ impl AnyDiagnostic { diag: &PathLoweringDiagnostic, path: InFile<ast::Path>, ) -> Option<AnyDiagnostic> { - Some(match diag { - &PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => { + Some(match *diag { + PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => { let segment = hir_segment_to_ast_segment(&path.value, segment)?; let args = if let Some(generics) = segment.generic_arg_list() { AstPtr::new(&generics).wrap_left() @@ -707,6 +720,12 @@ impl AnyDiagnostic { let args = path.with_value(args); GenericArgsProhibited { args, reason }.into() } + PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment } => { + let segment = hir_segment_to_ast_segment(&path.value, segment)?; + let args = AstPtr::new(&segment.parenthesized_arg_list()?); + let args = path.with_value(args); + ParenthesizedGenericArgsWithoutFnTrait { args }.into() + } }) } diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index b29c91694d3..6f4168ab086 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -80,7 +80,9 @@ impl HirDisplay for Function { if data.is_async() { f.write_str("async ")?; } - if self.is_unsafe_to_call(db) { + // FIXME: This will show `unsafe` for functions that are `#[target_feature]` but not unsafe + // (they are conditionally unsafe to call). We probably should show something else. + if self.is_unsafe_to_call(db, None, f.edition()) { f.write_str("unsafe ")?; } if let Some(abi) = &data.abi { diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs index 2ad39817b2f..72df07ef8c0 100644 --- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs +++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs @@ -49,6 +49,7 @@ from_id![ (hir_def::LifetimeParamId, crate::LifetimeParam), (hir_def::MacroId, crate::Macro), (hir_def::ExternCrateId, crate::ExternCrateDecl), + (hir_def::ExternBlockId, crate::ExternBlock), ]; impl From<AdtId> for Adt { @@ -183,6 +184,7 @@ impl From<GenericDef> for GenericDefId { GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), GenericDef::Impl(it) => GenericDefId::ImplId(it.id), GenericDef::Const(it) => GenericDefId::ConstId(it.id), + GenericDef::Static(it) => GenericDefId::StaticId(it.id), } } } @@ -197,6 +199,7 @@ impl From<GenericDefId> for GenericDef { GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()), GenericDefId::ImplId(it) => GenericDef::Impl(it.into()), GenericDefId::ConstId(it) => GenericDef::Const(it.into()), + GenericDefId::StaticId(it) => GenericDef::Static(it.into()), } } } diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs index 82c90ac3010..a34b4980832 100644 --- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs @@ -248,7 +248,7 @@ impl HasSource for Param { let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?; let root = db.parse_or_expand(file_id); match value.to_node(&root) { - ast::Expr::ClosureExpr(it) => it + Either::Left(ast::Expr::ClosureExpr(it)) => it .param_list()? .params() .nth(self.idx) @@ -301,7 +301,7 @@ impl HasSource for InlineAsmOperand { let root = src.file_syntax(db.upcast()); return src .map(|ast| match ast.to_node(&root) { - ast::Expr::AsmExpr(asm) => asm + Either::Left(ast::Expr::AsmExpr(asm)) => asm .asm_pieces() .filter_map(|it| match it { ast::AsmPiece::AsmOperandNamed(it) => Some(it), diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 0cbc75726bf..5923a1bc30e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -42,8 +42,8 @@ use arrayvec::ArrayVec; use base_db::{CrateDisplayName, CrateId, CrateOrigin}; use either::Either; use hir_def::{ - body::BodyDiagnostic, data::{adt::VariantData, TraitFlags}, + expr_store::ExpressionStoreDiagnostics, generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance}, hir::{BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat}, item_tree::{AttrOwner, FieldParent, ItemTreeFieldId, ItemTreeNode}, @@ -55,8 +55,8 @@ use hir_def::{ resolver::{HasResolver, Resolver}, type_ref::TypesSourceMap, AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, - CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, - GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId, + CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax, TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; @@ -1892,10 +1892,10 @@ impl DefWithBody { for diag in source_map.diagnostics() { acc.push(match diag { - BodyDiagnostic::InactiveCode { node, cfg, opts } => { + ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => { InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() } - BodyDiagnostic::MacroError { node, err } => { + ExpressionStoreDiagnostics::MacroError { node, err } => { let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast()); @@ -1919,20 +1919,22 @@ impl DefWithBody { } .into() } - BodyDiagnostic::UnresolvedMacroCall { node, path } => UnresolvedMacroCall { - macro_call: (*node).map(|ast_ptr| ast_ptr.into()), - precise_location: None, - path: path.clone(), - is_bang: true, + ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => { + UnresolvedMacroCall { + macro_call: (*node).map(|ast_ptr| ast_ptr.into()), + precise_location: None, + path: path.clone(), + is_bang: true, + } + .into() } - .into(), - BodyDiagnostic::AwaitOutsideOfAsync { node, location } => { + ExpressionStoreDiagnostics::AwaitOutsideOfAsync { node, location } => { AwaitOutsideOfAsync { node: *node, location: location.clone() }.into() } - BodyDiagnostic::UnreachableLabel { node, name } => { + ExpressionStoreDiagnostics::UnreachableLabel { node, name } => { UnreachableLabel { node: *node, name: name.clone() }.into() } - BodyDiagnostic::UndeclaredLabel { node, name } => { + ExpressionStoreDiagnostics::UndeclaredLabel { node, name } => { UndeclaredLabel { node: *node, name: name.clone() }.into() } }); @@ -1955,7 +1957,7 @@ impl DefWithBody { ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right), }; let expr_or_pat = match expr_or_pat { - Ok(Either::Left(expr)) => expr.map(AstPtr::wrap_left), + Ok(Either::Left(expr)) => expr, Ok(Either::Right(InFile { file_id, value: pat })) => { // cast from Either<Pat, SelfParam> -> Either<_, Pat> let Some(ptr) = AstPtr::try_from_raw(pat.syntax_node_ptr()) else { @@ -1976,16 +1978,40 @@ impl DefWithBody { ); } - let (unsafe_exprs, only_lint) = hir_ty::diagnostics::missing_unsafe(db, self.into()); - for (node, reason) in unsafe_exprs { + let missing_unsafe = hir_ty::diagnostics::missing_unsafe(db, self.into()); + for (node, reason) in missing_unsafe.unsafe_exprs { match source_map.expr_or_pat_syntax(node) { - Ok(node) => acc.push(MissingUnsafe { node, only_lint, reason }.into()), + Ok(node) => acc.push( + MissingUnsafe { + node, + lint: if missing_unsafe.fn_is_unsafe { + UnsafeLint::UnsafeOpInUnsafeFn + } else { + UnsafeLint::HardError + }, + reason, + } + .into(), + ), Err(SyntheticSyntax) => { // FIXME: Here and elsewhere in this file, the `expr` was // desugared, report or assert that this doesn't happen. } } } + for node in missing_unsafe.deprecated_safe_calls { + match source_map.expr_syntax(node) { + Ok(node) => acc.push( + MissingUnsafe { + node, + lint: UnsafeLint::DeprecatedSafe2024, + reason: UnsafetyReason::UnsafeFnCall, + } + .into(), + ), + Err(SyntheticSyntax) => never!("synthetic DeprecatedSafe2024"), + } + } if let Ok(borrowck_results) = db.borrowck(self.into()) { for borrowck_result in borrowck_results.iter() { @@ -2301,6 +2327,13 @@ impl Function { db.function_data(self.id).is_async() } + pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> { + match self.id.lookup(db.upcast()).container { + ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }), + _ => None, + } + } + pub fn returns_impl_future(self, db: &dyn HirDatabase) -> bool { if self.is_async(db) { return true; @@ -2361,8 +2394,19 @@ impl Function { db.attrs(self.id.into()).is_unstable() } - pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool { - hir_ty::is_fn_unsafe_to_call(db, self.id) + pub fn is_unsafe_to_call( + self, + db: &dyn HirDatabase, + caller: Option<Function>, + call_edition: Edition, + ) -> bool { + let target_features = caller + .map(|caller| hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into()))) + .unwrap_or_default(); + matches!( + hir_ty::is_fn_unsafe_to_call(db, self.id, &target_features, call_edition), + hir_ty::Unsafety::Unsafe + ) } /// Whether this function declaration has a definition. @@ -2724,6 +2768,13 @@ impl Static { Type::from_value_def(db, self.id) } + pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> { + match self.id.lookup(db.upcast()).container { + ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }), + _ => None, + } + } + /// Evaluate the static initializer. pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst, ConstEvalError> { db.const_eval(self.id.into(), Substitution::empty(Interner), None) @@ -2892,6 +2943,17 @@ impl HasVisibility for TypeAlias { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ExternBlock { + pub(crate) id: ExternBlockId, +} + +impl ExternBlock { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.module(db.upcast()) } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct StaticLifetime; impl StaticLifetime { @@ -3453,6 +3515,7 @@ pub enum GenericDef { Impl(Impl), // consts can have type parameters from their parents (i.e. associated consts of traits) Const(Const), + Static(Static), } impl_from!( Function, @@ -3461,7 +3524,8 @@ impl_from!( TraitAlias, TypeAlias, Impl, - Const + Const, + Static for GenericDef ); @@ -3511,6 +3575,7 @@ impl GenericDef { GenericDef::TypeAlias(it) => it.id.into(), GenericDef::Impl(it) => it.id.into(), GenericDef::Const(it) => it.id.into(), + GenericDef::Static(it) => it.id.into(), } } @@ -3568,6 +3633,7 @@ impl GenericDef { item_tree_source_maps.impl_(id.value).generics() } GenericDefId::ConstId(_) => return, + GenericDefId::StaticId(_) => return, }, }; @@ -4551,10 +4617,7 @@ impl CaptureUsages { match span { mir::MirSpan::ExprId(expr) => { if let Ok(expr) = source_map.expr_syntax(expr) { - result.push(CaptureUsageSource { - is_ref, - source: expr.map(AstPtr::wrap_left), - }) + result.push(CaptureUsageSource { is_ref, source: expr }) } } mir::MirSpan::PatId(pat) => { @@ -4624,17 +4687,6 @@ impl Type { Type { env: TraitEnvironment::empty(krate), ty } } - pub fn reference(inner: &Type, m: Mutability) -> Type { - inner.derived( - TyKind::Ref( - if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not }, - hir_ty::error_lifetime(), - inner.ty.clone(), - ) - .intern(Interner), - ) - } - fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type { let resolver = lexical_env.resolver(db.upcast()); let environment = resolver @@ -4866,6 +4918,17 @@ impl Type { self.normalize_trait_assoc_type(db, &[], iterator_item.into()) } + pub fn impls_iterator(self, db: &dyn HirDatabase) -> bool { + let Some(iterator_trait) = + db.lang_item(self.env.krate, LangItem::Iterator).and_then(|it| it.as_trait()) + else { + return false; + }; + let canonical_ty = + Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; + method_resolution::implements_trait_unique(&canonical_ty, db, &self.env, iterator_trait) + } + /// Resolves the projection `<Self as IntoIterator>::IntoIter` and returns the resulting type pub fn into_iterator_iter(self, db: &dyn HirDatabase) -> Option<Type> { let trait_ = db.lang_item(self.env.krate, LangItem::IntoIterIntoIter).and_then(|it| { @@ -6139,9 +6202,15 @@ impl HasContainer for TraitAlias { } } +impl HasContainer for ExternBlock { + fn container(&self, db: &dyn HirDatabase) -> ItemContainer { + ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container }) + } +} + fn container_id_to_hir(c: ItemContainerId) -> ItemContainer { match c { - ItemContainerId::ExternBlockId(_id) => ItemContainer::ExternBlock(), + ItemContainerId::ExternBlockId(id) => ItemContainer::ExternBlock(ExternBlock { id }), ItemContainerId::ModuleId(id) => ItemContainer::Module(Module { id }), ItemContainerId::ImplId(id) => ItemContainer::Impl(Impl { id }), ItemContainerId::TraitId(id) => ItemContainer::Trait(Trait { id }), @@ -6153,7 +6222,7 @@ pub enum ItemContainer { Trait(Trait), Impl(Impl), Module(Module), - ExternBlock(), + ExternBlock(ExternBlock), Crate(CrateId), } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 09470bed9cf..c9145f7d212 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -1998,6 +1998,7 @@ to_def_impls![ (crate::Adt, ast::Adt, adt_to_def), (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def), (crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def), + (crate::ExternBlock, ast::ExternBlock, extern_block_to_def), (MacroCallId, ast::MacroCall, macro_call_to_macro_call), ]; @@ -2040,6 +2041,13 @@ impl SemanticsScope<'_> { Crate { id: self.resolver.krate() } } + pub fn containing_function(&self) -> Option<Function> { + self.resolver.body_owner().and_then(|owner| match owner { + DefWithBodyId::FunctionId(id) => Some(id.into()), + _ => None, + }) + } + pub(crate) fn resolver(&self) -> &Resolver { &self.resolver } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs index d5dfb985718..d0fdf5cbdf7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs @@ -74,6 +74,9 @@ impl ChildBySource for ItemScope { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { self.declarations().for_each(|item| add_module_def(db, res, file_id, item)); self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL)); + self.extern_blocks().for_each(|extern_block| { + insert_item_loc(db, res, file_id, extern_block, keys::EXTERN_BLOCK) + }); self.extern_crate_decls() .for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::EXTERN_CRATE)); self.use_decls().for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::USE)); diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 3c9e7065c41..4481b8855fd 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -92,10 +92,10 @@ use hir_def::{ DynMap, }, hir::{BindingId, Expr, LabelId}, - AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, - FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId, - ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, - VariantId, + AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, + ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, + Lookup, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, + UnionId, UseId, VariantId, }; use hir_expand::{ attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId, @@ -308,6 +308,12 @@ impl SourceToDefCtx<'_, '_> { ) -> Option<ExternCrateId> { self.to_def(src, keys::EXTERN_CRATE) } + pub(super) fn extern_block_to_def( + &mut self, + src: InFile<&ast::ExternBlock>, + ) -> Option<ExternBlockId> { + self.to_def(src, keys::EXTERN_BLOCK) + } #[allow(dead_code)] pub(super) fn use_to_def(&mut self, src: InFile<&ast::Use>) -> Option<UseId> { self.to_def(src, keys::USE) @@ -352,7 +358,7 @@ impl SourceToDefCtx<'_, '_> { let src = src.cloned().map(ast::Pat::from); let pat_id = source_map.node_pat(src.as_ref())?; // the pattern could resolve to a constant, verify that this is not the case - if let crate::Pat::Bind { id, .. } = body[pat_id] { + if let crate::Pat::Bind { id, .. } = body[pat_id.as_pat()?] { Some((container, id)) } else { None diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index b699ccde412..9019863f7fd 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -14,11 +14,11 @@ use crate::{ }; use either::Either; use hir_def::{ - body::{ + expr_store::{ scope::{ExprScopes, ScopeId}, Body, BodySourceMap, HygieneId, }, - hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat, PatId}, + hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat}, lang_item::LangItem, lower::LowerCtx, nameres::MacroSubNs, @@ -139,7 +139,7 @@ impl SourceAnalyzer { sm.node_expr(src.as_ref()) } - fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { + fn pat_id(&self, pat: &ast::Pat) -> Option<ExprOrPatId> { // FIXME: macros, see `expr_id` let src = InFile { file_id: self.file_id, value: pat }; self.body_source_map()?.node_pat(src) @@ -147,7 +147,7 @@ impl SourceAnalyzer { fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> { let pat_id = self.pat_id(&pat.clone().into())?; - if let Pat::Bind { id, .. } = self.body()?.pats[pat_id] { + if let Pat::Bind { id, .. } = self.body()?.pats[pat_id.as_pat()?] { Some(id) } else { None @@ -210,11 +210,20 @@ impl SourceAnalyzer { db: &dyn HirDatabase, pat: &ast::Pat, ) -> Option<(Type, Option<Type>)> { - let pat_id = self.pat_id(pat)?; + let expr_or_pat_id = self.pat_id(pat)?; let infer = self.infer.as_ref()?; - let coerced = - infer.pat_adjustments.get(&pat_id).and_then(|adjusts| adjusts.last().cloned()); - let ty = infer[pat_id].clone(); + let coerced = match expr_or_pat_id { + ExprOrPatId::ExprId(idx) => infer + .expr_adjustments + .get(&idx) + .and_then(|adjusts| adjusts.last().cloned()) + .map(|adjust| adjust.target), + ExprOrPatId::PatId(idx) => { + infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned()) + } + }; + + let ty = infer[expr_or_pat_id].clone(); let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty); Some((mk_ty(ty), coerced.map(mk_ty))) } @@ -248,7 +257,7 @@ impl SourceAnalyzer { ) -> Option<BindingMode> { let id = self.pat_id(&pat.clone().into())?; let infer = self.infer.as_ref()?; - infer.binding_modes.get(id).map(|bm| match bm { + infer.binding_modes.get(id.as_pat()?).map(|bm| match bm { hir_ty::BindingMode::Move => BindingMode::Move, hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut), hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => { @@ -266,7 +275,7 @@ impl SourceAnalyzer { Some( infer .pat_adjustments - .get(&pat_id)? + .get(&pat_id.as_pat()?)? .iter() .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone())) .collect(), @@ -649,10 +658,10 @@ impl SourceAnalyzer { let field_name = field.field_name()?.as_name(); let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?; let pat_id = self.pat_id(&record_pat.into())?; - let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?; + let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id.as_pat()?)?; let variant_data = variant.variant_data(db.upcast()); let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? }; - let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id)?.as_adt()?; + let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?; let field_ty = db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst); Some(( @@ -682,12 +691,20 @@ impl SourceAnalyzer { db: &dyn HirDatabase, pat: &ast::IdentPat, ) -> Option<ModuleDef> { - let pat_id = self.pat_id(&pat.clone().into())?; + let expr_or_pat_id = self.pat_id(&pat.clone().into())?; let body = self.body()?; - let path = match &body[pat_id] { - Pat::Path(path) => path, - _ => return None, + + let path = match expr_or_pat_id { + ExprOrPatId::ExprId(idx) => match &body[idx] { + Expr::Path(path) => path, + _ => return None, + }, + ExprOrPatId::PatId(idx) => match &body[idx] { + Pat::Path(path) => path, + _ => return None, + }, }; + let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, TypesMap::EMPTY)?; match res { PathResolution::Def(def) => Some(def), @@ -782,8 +799,9 @@ impl SourceAnalyzer { } prefer_value_ns = true; } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) { - let pat_id = self.pat_id(&path_pat.into())?; - if let Some((assoc, subs)) = infer.assoc_resolutions_for_pat(pat_id) { + let expr_or_pat_id = self.pat_id(&path_pat.into())?; + if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_or_pat_id) + { let (assoc, subst) = match assoc { AssocItemId::ConstId(const_id) => { let (konst, subst) = @@ -807,7 +825,7 @@ impl SourceAnalyzer { return Some((PathResolution::Def(AssocItem::from(assoc).into()), Some(subst))); } if let Some(VariantId::EnumVariantId(variant)) = - infer.variant_resolution_for_pat(pat_id) + infer.variant_resolution_for_expr_or_pat(expr_or_pat_id) { return Some((PathResolution::Def(ModuleDef::Variant(variant.into())), None)); } @@ -824,7 +842,7 @@ impl SourceAnalyzer { || parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from); if let Some(pat) = record_pat.or_else(tuple_struct_pat) { let pat_id = self.pat_id(&pat)?; - let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id); + let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id.as_pat()?); if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat { return Some(( PathResolution::Def(ModuleDef::Variant(variant.into())), @@ -866,7 +884,8 @@ impl SourceAnalyzer { // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are // trying to resolve foo::bar. - if path.parent_path().is_some() { + if let Some(parent_path) = path.parent_path() { + let parent_hir_path = Path::from_src(&mut ctx, parent_path); return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) { None if meta_path.is_some() => path .first_segment() @@ -876,6 +895,42 @@ impl SourceAnalyzer { .map(PathResolution::ToolModule) }) .map(|it| (it, None)), + // Case the type name conflict with use module, + // e.g. + // ``` + // use std::str; + // fn main() { + // str::from_utf8(); // as module std::str + // str::len(); // as primitive type str + // str::no_exist_item(); // as primitive type str + // } + // ``` + Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => { + if let (Some(mod_path), Some(parent_hir_path)) = + (hir_path.mod_path(), parent_hir_path) + { + if let Some(ModuleDefId::ModuleId(id)) = self + .resolver + .resolve_module_path_in_items(db.upcast(), mod_path) + .take_types() + { + let parent_hir_name = + parent_hir_path.segments().get(1).map(|it| it.name); + let module = crate::Module { id }; + if module + .scope(db, None) + .into_iter() + .any(|(name, _)| Some(&name) == parent_hir_name) + { + return Some(( + PathResolution::Def(ModuleDef::Module(module)), + None, + )); + }; + } + } + Some((it, None)) + } // FIXME: We do not show substitutions for parts of path, because this is really complex // due to the interactions with associated items of `impl`s and associated items of associated // types. @@ -1043,7 +1098,7 @@ impl SourceAnalyzer { let body = self.body()?; let infer = self.infer.as_ref()?; - let pat_id = self.pat_id(&pattern.clone().into())?; + let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?; let substs = infer.type_of_pat[pat_id].as_adt()?.1; let (variant, missing_fields, _exhaustive) = @@ -1105,16 +1160,9 @@ impl SourceAnalyzer { if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) { let mut is_unsafe = false; let mut walk_expr = |expr_id| { - unsafe_expressions( - db, - infer, - *def, - body, - expr_id, - &mut |_, inside_unsafe_block, _| { - is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No - }, - ) + unsafe_expressions(db, infer, *def, body, expr_id, &mut |inside_unsafe_block| { + is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No + }) }; match expanded_expr { ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr), @@ -1259,7 +1307,11 @@ fn scope_for( node: InFile<&SyntaxNode>, ) -> Option<ScopeId> { node.ancestors_with_macros(db.upcast()) - .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind())) + .take_while(|it| { + !ast::Item::can_cast(it.kind()) + || ast::MacroCall::can_cast(it.kind()) + || ast::Use::can_cast(it.kind()) + }) .filter_map(|it| it.map(ast::Expr::cast).transpose()) .filter_map(|it| source_map.node_expr(it.as_ref())?.as_expr()) .find_map(|it| scopes.scope_for(it)) diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search.rs b/src/tools/rust-analyzer/crates/hir/src/term_search.rs index 6f845137084..af72179305c 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search.rs @@ -145,7 +145,7 @@ impl LookupTable { self.data .iter() .find(|(t, _)| { - Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, ty) + t.add_reference(Mutability::Shared).could_unify_with_deeply(db, ty) }) .map(|(t, it)| { it.exprs(t) diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs index 1b0e6f8bd5b..847304d503a 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs @@ -15,6 +15,7 @@ use hir_ty::mir::BorrowKind; use hir_ty::TyBuilder; use itertools::Itertools; use rustc_hash::FxHashSet; +use span::Edition; use crate::{ Adt, AssocItem, GenericDef, GenericParam, HasAttrs, HasVisibility, Impl, ModuleDef, ScopeDef, @@ -365,7 +366,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>( let ret_ty = it.ret_type_with_args(db, generics.iter().cloned()); // Filter out private and unsafe functions if !it.is_visible_from(db, module) - || it.is_unsafe_to_call(db) + || it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME) || it.is_unstable(db) || ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr() @@ -470,7 +471,10 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( } // Filter out private and unsafe functions - if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + if !it.is_visible_from(db, module) + || it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME) + || it.is_unstable(db) + { return None; } @@ -658,7 +662,10 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( } // Filter out private and unsafe functions - if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + if !it.is_visible_from(db, module) + || it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME) + || it.is_unstable(db) + { return None; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 5899ec5a005..4a9e2256e9b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -6,7 +6,9 @@ use ide_db::syntax_helpers::suggest_name; use ide_db::RootDatabase; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast}; use itertools::Itertools; -use syntax::ast::edit_in_place::Removable; +use syntax::ast::edit::IndentLevel; +use syntax::ast::edit_in_place::Indent; +use syntax::ast::syntax_factory::SyntaxFactory; use syntax::ast::{self, make, AstNode, MatchArmList, MatchExpr, Pat}; use crate::{utils, AssistContext, AssistId, AssistKind, Assists}; @@ -200,8 +202,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) AssistId("add_missing_match_arms", AssistKind::QuickFix), "Fill match arms", ctx.sema.original_range(match_expr.syntax()).range, - |edit| { - let new_match_arm_list = match_arm_list.clone_for_update(); + |builder| { + let make = SyntaxFactory::new(); // having any hidden variants means that we need a catch-all arm needs_catch_all_arm |= has_hidden_variants; @@ -211,89 +213,85 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) // filter out hidden patterns because they're handled by the catch-all arm !hidden }) - .map(|(pat, _)| { - make::match_arm(pat, None, make::ext::expr_todo()).clone_for_update() - }); + .map(|(pat, _)| make.match_arm(pat, None, make::ext::expr_todo())); - let catch_all_arm = new_match_arm_list + let mut arms: Vec<_> = match_arm_list .arms() - .find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_)))); - if let Some(arm) = catch_all_arm { - let is_empty_expr = arm.expr().is_none_or(|e| match e { - ast::Expr::BlockExpr(b) => { - b.statements().next().is_none() && b.tail_expr().is_none() + .filter(|arm| { + if matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))) { + let is_empty_expr = arm.expr().is_none_or(|e| match e { + ast::Expr::BlockExpr(b) => { + b.statements().next().is_none() && b.tail_expr().is_none() + } + ast::Expr::TupleExpr(t) => t.fields().next().is_none(), + _ => false, + }); + if is_empty_expr { + false + } else { + cov_mark::hit!(add_missing_match_arms_empty_expr); + true + } + } else { + true } - ast::Expr::TupleExpr(t) => t.fields().next().is_none(), - _ => false, - }); - if is_empty_expr { - arm.remove(); - } else { - cov_mark::hit!(add_missing_match_arms_empty_expr); - } - } + }) + .collect(); - let mut added_arms = Vec::new(); - let mut todo_placeholders = Vec::new(); - for arm in missing_arms { - todo_placeholders.push(arm.expr().unwrap()); - added_arms.push(arm); - } + let first_new_arm_idx = arms.len(); + arms.extend(missing_arms); if needs_catch_all_arm && !has_catch_all_arm { cov_mark::hit!(added_wildcard_pattern); - let arm = - make::match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo()) - .clone_for_update(); - todo_placeholders.push(arm.expr().unwrap()); - added_arms.push(arm); - } - - let first_new_arm = added_arms.first().cloned(); - let last_new_arm = added_arms.last().cloned(); - - for arm in added_arms { - new_match_arm_list.add_arm(arm); + let arm = make.match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo()); + arms.push(arm); } - if let Some(cap) = ctx.config.snippet_cap { - if let Some(it) = first_new_arm - .and_then(|arm| arm.syntax().descendants().find_map(ast::WildcardPat::cast)) - { - edit.add_placeholder_snippet(cap, it); - } - - for placeholder in todo_placeholders { - edit.add_placeholder_snippet(cap, placeholder); - } - - if let Some(arm) = last_new_arm { - edit.add_tabstop_after(cap, arm); - } - } + let new_match_arm_list = make.match_arm_list(arms); - // FIXME: Hack for mutable syntax trees not having great support for macros + // FIXME: Hack for syntax trees not having great support for macros // Just replace the element that the original range came from let old_place = { // Find the original element let file = ctx.sema.parse(arm_list_range.file_id); let old_place = file.syntax().covering_element(arm_list_range.range); - // Make `old_place` mut match old_place { - syntax::SyntaxElement::Node(it) => { - syntax::SyntaxElement::from(edit.make_syntax_mut(it)) - } + syntax::SyntaxElement::Node(it) => it, syntax::SyntaxElement::Token(it) => { // If a token is found, it is '{' or '}' // The parent is `{ ... }` - let parent = it.parent().expect("Token must have a parent."); - syntax::SyntaxElement::from(edit.make_syntax_mut(parent)) + it.parent().expect("Token must have a parent.") } } }; - syntax::ted::replace(old_place, new_match_arm_list.syntax()); + let mut editor = builder.make_editor(&old_place); + new_match_arm_list.indent(IndentLevel::from_node(&old_place)); + editor.replace(old_place, new_match_arm_list.syntax()); + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(it) = new_match_arm_list + .arms() + .nth(first_new_arm_idx) + .and_then(|arm| arm.syntax().descendants().find_map(ast::WildcardPat::cast)) + { + editor.add_annotation(it.syntax(), builder.make_placeholder_snippet(cap)); + } + + for arm in new_match_arm_list.arms().skip(first_new_arm_idx) { + if let Some(expr) = arm.expr() { + editor.add_annotation(expr.syntax(), builder.make_placeholder_snippet(cap)); + } + } + + if let Some(arm) = new_match_arm_list.arms().skip(first_new_arm_idx).last() { + editor.add_annotation(arm.syntax(), builder.make_tabstop_after(cap)); + } + } + + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.file_id(), editor); }, ) } @@ -1377,6 +1375,9 @@ fn main() { ); } + // FIXME: Preserving comments is quite hard in the current transitional syntax editing model. + // Once we migrate to new trivia model addressed in #6854, remove the ignore attribute. + #[ignore] #[test] fn add_missing_match_arms_preserves_comments() { check_assist( @@ -1405,6 +1406,9 @@ fn foo(a: A) { ); } + // FIXME: Preserving comments is quite hard in the current transitional syntax editing model. + // Once we migrate to new trivia model addressed in #6854, remove the ignore attribute. + #[ignore] #[test] fn add_missing_match_arms_preserves_comments_empty() { check_assist( @@ -1502,10 +1506,10 @@ enum Test { fn foo(t: Test) { m!(match t { - Test::A => ${1:todo!()}, - Test::B => ${2:todo!()}, - Test::C => ${3:todo!()},$0 -}); + Test::A => ${1:todo!()}, + Test::B => ${2:todo!()}, + Test::C => ${3:todo!()},$0 + }); }"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs index 70fb5680052..491727a30a8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs @@ -15,7 +15,7 @@ use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKin // Assist: apply_demorgan // -// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law]. +// Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws). // This transforms expressions of the form `!l || !r` into `!(l && r)`. // This also works with `&&`. This assist can only be applied with the cursor // on either `||` or `&&`. @@ -131,7 +131,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti // Assist: apply_demorgan_iterator // -// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law] to +// Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws) to // `Iterator::all` and `Iterator::any`. // // This transforms expressions of the form `!iter.any(|x| predicate(x))` into diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs index d86948818b1..a92a000c3fb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs @@ -38,7 +38,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; // use super::AssistContext; // ``` // -// .Import Granularity +// #### Import Granularity // // It is possible to configure how use-trees are merged with the `imports.granularity.group` setting. // It has the following configurations: @@ -54,7 +54,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; // // In `VS Code` the configuration for this is `rust-analyzer.imports.granularity.group`. // -// .Import Prefix +// #### Import Prefix // // The style of imports in the same crate is configurable through the `imports.prefix` setting. // It has the following configurations: @@ -68,7 +68,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; // // In `VS Code` the configuration for this is `rust-analyzer.imports.prefix`. // -// image::https://user-images.githubusercontent.com/48062697/113020673-b85be580-917a-11eb-9022-59585f35d4f8.gif[] +//  // Assist: auto_import // diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 3c84f83906a..f6e516db888 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -140,8 +140,10 @@ fn edit_struct_references( match_ast! { match node { ast::TupleStructPat(tuple_struct_pat) => { + let file_range = ctx.sema.original_range_opt(&node)?; + edit.edit_file(file_range.file_id); edit.replace( - tuple_struct_pat.syntax().text_range(), + file_range.range, ast::make::record_pat_with_fields( tuple_struct_pat.path()?, ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map( @@ -924,4 +926,102 @@ pub struct Foo { #[my_custom_attr] field1: u32 } "#, ); } + + #[test] + fn convert_in_macro_pattern_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +macro_rules! foo { + ($expression:expr, $pattern:pat) => { + match $expression { + $pattern => true, + _ => false + } + }; +} +enum Expr { + A$0(usize), +} +fn main() { + let e = Expr::A(0); + foo!(e, Expr::A(0)); +} +"#, + r#" +macro_rules! foo { + ($expression:expr, $pattern:pat) => { + match $expression { + $pattern => true, + _ => false + } + }; +} +enum Expr { + A { field1: usize }, +} +fn main() { + let e = Expr::A { field1: 0 }; + foo!(e, Expr::A { field1: 0 }); +} +"#, + ); + } + + #[test] + fn convert_in_multi_file_macro_pattern_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +//- /main.rs +mod foo; + +enum Test { + A$0(i32) +} + +//- /foo.rs +use crate::Test; + +macro_rules! foo { + ($expression:expr, $pattern:pat) => { + match $expression { + $pattern => true, + _ => false + } + }; +} + +fn foo() { + let a = Test::A(0); + foo!(a, Test::A(0)); +} +"#, + r#" +//- /main.rs +mod foo; + +enum Test { + A { field1: i32 } +} + +//- /foo.rs +use crate::Test; + +macro_rules! foo { + ($expression:expr, $pattern:pat) => { + match $expression { + $pattern => true, + _ => false + } + }; +} + +fn foo() { + let a = Test::A { field1: 0 }; + foo!(a, Test::A { field1: 0 }); +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs index 094fdc46eb7..0b95d6177f9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs @@ -3,10 +3,11 @@ use hir::{AssocItem, Enum, HasVisibility, Module, ModuleDef, Name, PathResolutio use ide_db::{ defs::{Definition, NameRefClass}, search::SearchScope, + source_change::SourceChangeBuilder, }; use stdx::never; use syntax::{ - ast::{self, make}, + ast::{self, make, Use, UseTree, VisibilityKind}, ted, AstNode, Direction, SyntaxNode, SyntaxToken, T, }; @@ -43,6 +44,7 @@ use crate::{ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let star = ctx.find_token_syntax_at_offset(T![*])?; let use_tree = star.parent().and_then(ast::UseTree::cast)?; + let use_item = star.parent_ancestors().find_map(ast::Use::cast)?; let (parent, mod_path) = find_parent_and_path(&star)?; let target_module = match ctx.sema.resolve_path(&mod_path)? { PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it), @@ -53,8 +55,9 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> let current_scope = ctx.sema.scope(&star.parent()?)?; let current_module = current_scope.module(); - let refs_in_target = find_refs_in_mod(ctx, target_module, current_module)?; - let imported_defs = find_imported_defs(ctx, star)?; + if !is_visible_from(ctx, &target_module, current_module) { + return None; + } let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()); acc.add( @@ -62,37 +65,149 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> "Expand glob import", target.text_range(), |builder| { - let use_tree = builder.make_mut(use_tree); - - let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs); - let expanded = make::use_tree_list(names_to_import.iter().map(|n| { - let path = make::ext::ident_path( - &n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(), - ); - make::use_tree(path, None, None, false) - })) - .clone_for_update(); - - match use_tree.star_token() { - Some(star) => { - let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1; - if needs_braces { - ted::replace(star, expanded.syntax()) - } else { - let without_braces = expanded - .syntax() - .children_with_tokens() - .filter(|child| !matches!(child.kind(), T!['{'] | T!['}'])) - .collect(); - ted::replace_with_many(star, without_braces) - } - } - None => never!(), - } + build_expanded_import( + ctx, + builder, + use_tree, + use_item, + target_module, + current_module, + false, + ) + }, + ) +} + +// Assist: expand_glob_reexport +// +// Expands non-private glob imports. +// +// ``` +// mod foo { +// pub struct Bar; +// pub struct Baz; +// } +// +// pub use foo::*$0; +// ``` +// -> +// ``` +// mod foo { +// pub struct Bar; +// pub struct Baz; +// } +// +// pub use foo::{Bar, Baz}; +// ``` +pub(crate) fn expand_glob_reexport(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let star = ctx.find_token_syntax_at_offset(T![*])?; + let use_tree = star.parent().and_then(ast::UseTree::cast)?; + let use_item = star.parent_ancestors().find_map(ast::Use::cast)?; + let (parent, mod_path) = find_parent_and_path(&star)?; + let target_module = match ctx.sema.resolve_path(&mod_path)? { + PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it), + PathResolution::Def(ModuleDef::Adt(hir::Adt::Enum(e))) => Expandable::Enum(e), + _ => return None, + }; + + let current_scope = ctx.sema.scope(&star.parent()?)?; + let current_module = current_scope.module(); + + if let VisibilityKind::PubSelf = get_export_visibility_kind(&use_item) { + return None; + } + if !is_visible_from(ctx, &target_module, current_module) { + return None; + } + + let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()); + acc.add( + AssistId("expand_glob_reexport", AssistKind::RefactorRewrite), + "Expand glob reexport", + target.text_range(), + |builder| { + build_expanded_import( + ctx, + builder, + use_tree, + use_item, + target_module, + current_module, + true, + ) }, ) } +fn build_expanded_import( + ctx: &AssistContext<'_>, + builder: &mut SourceChangeBuilder, + use_tree: UseTree, + use_item: Use, + target_module: Expandable, + current_module: Module, + reexport_public_items: bool, +) { + let (must_be_pub, visible_from) = if !reexport_public_items { + (false, current_module) + } else { + match get_export_visibility_kind(&use_item) { + VisibilityKind::Pub => (true, current_module.krate().root_module()), + VisibilityKind::PubCrate => (false, current_module.krate().root_module()), + _ => (false, current_module), + } + }; + + let refs_in_target = find_refs_in_mod(ctx, target_module, visible_from, must_be_pub); + let imported_defs = find_imported_defs(ctx, use_item); + + let filtered_defs = + if reexport_public_items { refs_in_target } else { refs_in_target.used_refs(ctx) }; + + let use_tree = builder.make_mut(use_tree); + + let names_to_import = find_names_to_import(filtered_defs, imported_defs); + let expanded = make::use_tree_list(names_to_import.iter().map(|n| { + let path = make::ext::ident_path( + &n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(), + ); + make::use_tree(path, None, None, false) + })) + .clone_for_update(); + + match use_tree.star_token() { + Some(star) => { + let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1; + if needs_braces { + ted::replace(star, expanded.syntax()) + } else { + let without_braces = expanded + .syntax() + .children_with_tokens() + .filter(|child| !matches!(child.kind(), T!['{'] | T!['}'])) + .collect(); + ted::replace_with_many(star, without_braces) + } + } + None => never!(), + } +} + +fn get_export_visibility_kind(use_item: &Use) -> VisibilityKind { + use syntax::ast::HasVisibility as _; + match use_item.visibility() { + Some(vis) => match vis.kind() { + VisibilityKind::PubCrate => VisibilityKind::PubCrate, + VisibilityKind::Pub => VisibilityKind::Pub, + VisibilityKind::PubSelf => VisibilityKind::PubSelf, + // We don't handle pub(in ...) and pub(super) yet + VisibilityKind::In(_) => VisibilityKind::PubSelf, + VisibilityKind::PubSuper => VisibilityKind::PubSelf, + }, + None => VisibilityKind::PubSelf, + } +} + enum Expandable { Module(Module), Enum(Enum), @@ -130,14 +245,17 @@ struct Ref { // could be alias visible_name: Name, def: Definition, + is_pub: bool, } impl Ref { - fn from_scope_def(name: Name, scope_def: ScopeDef) -> Option<Self> { + fn from_scope_def(ctx: &AssistContext<'_>, name: Name, scope_def: ScopeDef) -> Option<Self> { match scope_def { - ScopeDef::ModuleDef(def) => { - Some(Ref { visible_name: name, def: Definition::from(def) }) - } + ScopeDef::ModuleDef(def) => Some(Ref { + visible_name: name, + def: Definition::from(def), + is_pub: matches!(def.visibility(ctx.db()), hir::Visibility::Public), + }), _ => None, } } @@ -180,32 +298,32 @@ fn find_refs_in_mod( ctx: &AssistContext<'_>, expandable: Expandable, visible_from: Module, -) -> Option<Refs> { - if !is_expandable_visible_from(ctx, &expandable, visible_from) { - return None; - } - + must_be_pub: bool, +) -> Refs { match expandable { Expandable::Module(module) => { let module_scope = module.scope(ctx.db(), Some(visible_from)); - let refs = - module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect(); - Some(Refs(refs)) + let refs = module_scope + .into_iter() + .filter_map(|(n, d)| Ref::from_scope_def(ctx, n, d)) + .filter(|r| !must_be_pub || r.is_pub) + .collect(); + Refs(refs) } - Expandable::Enum(enm) => Some(Refs( + Expandable::Enum(enm) => Refs( enm.variants(ctx.db()) .into_iter() - .map(|v| Ref { visible_name: v.name(ctx.db()), def: Definition::Variant(v) }) + .map(|v| Ref { + visible_name: v.name(ctx.db()), + def: Definition::Variant(v), + is_pub: true, + }) .collect(), - )), + ), } } -fn is_expandable_visible_from( - ctx: &AssistContext<'_>, - expandable: &Expandable, - from: Module, -) -> bool { +fn is_visible_from(ctx: &AssistContext<'_>, expandable: &Expandable, from: Module) -> bool { fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool { match module.parent(ctx.db()) { Some(parent) => { @@ -246,50 +364,34 @@ fn is_expandable_visible_from( // use foo::*$0; // use baz::Baz; // ↑ --------------- -fn find_imported_defs(ctx: &AssistContext<'_>, star: SyntaxToken) -> Option<Vec<Definition>> { - let parent_use_item_syntax = star.parent_ancestors().find_map(|n| { - if ast::Use::can_cast(n.kind()) { - Some(n) - } else { - None - } - })?; - - Some( - [Direction::Prev, Direction::Next] - .into_iter() - .flat_map(|dir| { - parent_use_item_syntax - .siblings(dir.to_owned()) - .filter(|n| ast::Use::can_cast(n.kind())) - }) - .flat_map(|n| n.descendants().filter_map(ast::NameRef::cast)) - .filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? { - NameRefClass::Definition( - def @ (Definition::Macro(_) - | Definition::Module(_) - | Definition::Function(_) - | Definition::Adt(_) - | Definition::Variant(_) - | Definition::Const(_) - | Definition::Static(_) - | Definition::Trait(_) - | Definition::TypeAlias(_)), - _, - ) => Some(def), - _ => None, - }) - .collect(), - ) +fn find_imported_defs(ctx: &AssistContext<'_>, use_item: Use) -> Vec<Definition> { + [Direction::Prev, Direction::Next] + .into_iter() + .flat_map(|dir| { + use_item.syntax().siblings(dir.to_owned()).filter(|n| ast::Use::can_cast(n.kind())) + }) + .flat_map(|n| n.descendants().filter_map(ast::NameRef::cast)) + .filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? { + NameRefClass::Definition( + def @ (Definition::Macro(_) + | Definition::Module(_) + | Definition::Function(_) + | Definition::Adt(_) + | Definition::Variant(_) + | Definition::Const(_) + | Definition::Static(_) + | Definition::Trait(_) + | Definition::TypeAlias(_)), + _, + ) => Some(def), + _ => None, + }) + .collect() } -fn find_names_to_import( - ctx: &AssistContext<'_>, - refs_in_target: Refs, - imported_defs: Vec<Definition>, -) -> Vec<Name> { - let used_refs = refs_in_target.used_refs(ctx).filter_out_by_defs(imported_defs); - used_refs.0.iter().map(|r| r.visible_name.clone()).collect() +fn find_names_to_import(refs_in_target: Refs, imported_defs: Vec<Definition>) -> Vec<Name> { + let final_refs = refs_in_target.filter_out_by_defs(imported_defs); + final_refs.0.iter().map(|r| r.visible_name.clone()).collect() } #[cfg(test)] @@ -1036,4 +1138,83 @@ mod abc { }"#, ) } + + #[test] + fn expanding_glob_reexport() { + check_assist( + expand_glob_reexport, + r" +mod foo { + pub struct Bar; + pub struct Baz; + struct Qux; + + pub fn f() {} + + pub(crate) fn g() {} + pub(self) fn h() {} +} + +pub use foo::*$0; +", + r" +mod foo { + pub struct Bar; + pub struct Baz; + struct Qux; + + pub fn f() {} + + pub(crate) fn g() {} + pub(self) fn h() {} +} + +pub use foo::{Bar, Baz, f}; +", + ) + } + + #[test] + fn expanding_recursive_glob_reexport() { + check_assist( + expand_glob_reexport, + r" +mod foo { + pub use bar::*; + mod bar { + pub struct Bar; + pub struct Baz; + } +} + +pub use foo::*$0; +", + r" +mod foo { + pub use bar::*; + mod bar { + pub struct Bar; + pub struct Baz; + } +} + +pub use foo::{Bar, Baz}; +", + ) + } + + #[test] + fn expanding_reexport_is_not_applicable_for_private_import() { + check_assist_not_applicable( + expand_glob_reexport, + r" +mod foo { + pub struct Bar; + pub struct Baz; +} + +use foo::*$0; +", + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index 5c95b25f28d..179742f91b4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -270,6 +270,7 @@ mod handlers { destructure_tuple_binding::destructure_tuple_binding, destructure_struct_binding::destructure_struct_binding, expand_glob_import::expand_glob_import, + expand_glob_import::expand_glob_reexport, explicit_enum_discriminant::explicit_enum_discriminant, extract_expressions_from_format_string::extract_expressions_from_format_string, extract_struct_from_enum_variant::extract_struct_from_enum_variant, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 54e42f126bc..0662527a387 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1,4 +1,4 @@ -//! Generated by `cargo codegen assists-doc-tests`, do not edit by hand. +//! Generated by `cargo xtask codegen assists-doc-tests`, do not edit by hand. use super::check_doc_test; @@ -910,6 +910,29 @@ fn qux(bar: Bar, baz: Baz) {} } #[test] +fn doctest_expand_glob_reexport() { + check_doc_test( + "expand_glob_reexport", + r#####" +mod foo { + pub struct Bar; + pub struct Baz; +} + +pub use foo::*$0; +"#####, + r#####" +mod foo { + pub struct Bar; + pub struct Baz; +} + +pub use foo::{Bar, Baz}; +"#####, + ) +} + +#[test] fn doctest_explicit_enum_discriminant() { check_doc_test( "explicit_enum_discriminant", diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 78ff4417913..c1332d99bff 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -793,8 +793,8 @@ pub(crate) fn convert_reference_type( } fn could_deref_to_target(ty: &hir::Type, target: &hir::Type, db: &dyn HirDatabase) -> bool { - let ty_ref = hir::Type::reference(ty, hir::Mutability::Shared); - let target_ref = hir::Type::reference(target, hir::Mutability::Shared); + let ty_ref = ty.add_reference(hir::Mutability::Shared); + let target_ref = target.add_reference(hir::Mutability::Shared); ty_ref.could_coerce_to(db, &target_ref) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index d12654665ce..b38b9ac1f53 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -89,7 +89,7 @@ pub(crate) fn complete_dot( acc.add_method(ctx, dot_access, func, None, None) }); - if ctx.config.enable_auto_iter { + if ctx.config.enable_auto_iter && !receiver_ty.strip_references().impls_iterator(ctx.db) { // FIXME: // Checking for the existence of `iter()` is complicated in our setup, because we need to substitute // its return type, so we instead check for `<&Self as IntoIterator>::IntoIter`. @@ -1500,9 +1500,31 @@ fn main() { bar.$0 } "#, + expect![[r#""#]], + ); + } + + #[test] + fn no_iter_suggestion_on_iterator() { + check_no_kw( + r#" +//- minicore: iterator +struct MyIter; +impl Iterator for MyIter { + type Item = (); + fn next(&mut self) -> Option<Self::Item> { None } +} + +fn main() { + MyIter.$0 +} +"#, expect![[r#" - me foo() fn(self: Bar) -"#]], + me by_ref() (as Iterator) fn(&mut self) -> &mut Self + me into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter + me next() (as Iterator) fn(&mut self) -> Option<<Self as Iterator>::Item> + me nth(…) (as Iterator) fn(&mut self, usize) -> Option<<Self as Iterator>::Item> + "#]], ); } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index 24243f57b46..b5555e66102 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -83,19 +83,19 @@ use crate::{ // NOTE: currently, if an assoc item comes from a trait that's not currently imported, and it also has an unresolved and/or partially-qualified path, // no imports will be proposed. // -// .Fuzzy search details +// #### Fuzzy search details // // To avoid an excessive amount of the results returned, completion input is checked for inclusion in the names only // (i.e. in `HashMap` in the `std::collections::HashMap` path). // For the same reasons, avoids searching for any path imports for inputs with their length less than 2 symbols // (but shows all associated items for any input length). // -// .Import configuration +// #### Import configuration // // It is possible to configure how use-trees are merged with the `imports.granularity.group` setting. // Mimics the corresponding behavior of the `Auto Import` feature. // -// .LSP and performance implications +// #### LSP and performance implications // // The feature is enabled only if the LSP client supports LSP protocol version 3.16+ and reports the `additionalTextEdits` // (case-sensitive) resolve client capability in its client capabilities. @@ -103,7 +103,7 @@ use crate::{ // For clients with no such support, all edits have to be calculated on the completion request, including the fuzzy search completion ones, // which might be slow ergo the feature is automatically disabled. // -// .Feature toggle +// #### Feature toggle // // The feature can be forcefully turned off in the settings with the `rust-analyzer.completion.autoimport.enable` flag. // Note that having this flag set to `true` does not guarantee that the feature is enabled: your client needs to have the corresponding diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 2c39a8fdfed..28e2853096e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -2,17 +2,18 @@ mod format_like; -use hir::ItemInNs; -use ide_db::text_edit::TextEdit; +use base_db::SourceDatabase; +use hir::{ItemInNs, Semantics}; use ide_db::{ documentation::{Documentation, HasDocs}, imports::insert_use::ImportScope, + text_edit::TextEdit, ty_filter::TryEnum, - SnippetCap, + RootDatabase, SnippetCap, }; use stdx::never; use syntax::{ - ast::{self, make, AstNode, AstToken}, + ast::{self, AstNode, AstToken}, SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR}, TextRange, TextSize, }; @@ -48,7 +49,8 @@ pub(crate) fn complete_postfix( }; let expr_ctx = &dot_access.ctx; - let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal); + let receiver_text = + get_receiver_text(&ctx.sema, dot_receiver, receiver_is_ambiguous_float_literal); let cap = match ctx.config.snippet_cap { Some(it) => it, @@ -172,13 +174,15 @@ pub(crate) fn complete_postfix( // The rest of the postfix completions create an expression that moves an argument, // so it's better to consider references now to avoid breaking the compilation - let (dot_receiver, node_to_replace_with) = include_references(dot_receiver); - let receiver_text = - get_receiver_text(&node_to_replace_with, receiver_is_ambiguous_float_literal); - let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) { - Some(it) => it, - None => return, - }; + let (dot_receiver_including_refs, prefix) = include_references(dot_receiver); + let mut receiver_text = + get_receiver_text(&ctx.sema, dot_receiver, receiver_is_ambiguous_float_literal); + receiver_text.insert_str(0, &prefix); + let postfix_snippet = + match build_postfix_snippet_builder(ctx, cap, &dot_receiver_including_refs) { + Some(it) => it, + None => return, + }; if !ctx.config.snippets.is_empty() { add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text); @@ -222,7 +226,7 @@ pub(crate) fn complete_postfix( postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})")) .add_to(acc, ctx.db); - if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) { + if let Some(parent) = dot_receiver_including_refs.syntax().parent().and_then(|p| p.parent()) { if matches!(parent.kind(), STMT_LIST | EXPR_STMT) { postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")) .add_to(acc, ctx.db); @@ -231,9 +235,9 @@ pub(crate) fn complete_postfix( } } - if let ast::Expr::Literal(literal) = dot_receiver.clone() { + if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() { if let Some(literal_text) = ast::String::cast(literal.token()) { - add_format_like_completions(acc, ctx, &dot_receiver, cap, &literal_text); + add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text); } } @@ -260,14 +264,20 @@ pub(crate) fn complete_postfix( } } -fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String { - let mut text = if receiver_is_ambiguous_float_literal { - let text = receiver.syntax().text(); - let without_dot = ..text.len() - TextSize::of('.'); - text.slice(without_dot).to_string() - } else { - receiver.to_string() +fn get_receiver_text( + sema: &Semantics<'_, RootDatabase>, + receiver: &ast::Expr, + receiver_is_ambiguous_float_literal: bool, +) -> String { + // Do not just call `receiver.to_string()`, as that will mess up whitespaces inside macros. + let Some(mut range) = sema.original_range_opt(receiver.syntax()) else { + return receiver.to_string(); }; + if receiver_is_ambiguous_float_literal { + range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.')) + } + let file_text = sema.db.file_text(range.file_id.file_id()); + let mut text = file_text[range.range].to_owned(); // The receiver texts should be interpreted as-is, as they are expected to be // normal Rust expressions. @@ -284,7 +294,7 @@ fn escape_snippet_bits(text: &mut String) { stdx::replace(text, '$', "\\$"); } -fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) { +fn include_references(initial_element: &ast::Expr) -> (ast::Expr, String) { let mut resulting_element = initial_element.clone(); while let Some(field_expr) = resulting_element.syntax().parent().and_then(ast::FieldExpr::cast) @@ -292,7 +302,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) { resulting_element = ast::Expr::from(field_expr); } - let mut new_element_opt = initial_element.clone(); + let mut prefix = String::new(); while let Some(parent_deref_element) = resulting_element.syntax().parent().and_then(ast::PrefixExpr::cast) @@ -303,7 +313,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) { resulting_element = ast::Expr::from(parent_deref_element); - new_element_opt = make::expr_prefix(syntax::T![*], new_element_opt).into(); + prefix.insert(0, '*'); } if let Some(first_ref_expr) = resulting_element.syntax().parent().and_then(ast::RefExpr::cast) { @@ -317,7 +327,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) { let exclusive = parent_ref_element.mut_token().is_some(); resulting_element = ast::Expr::from(parent_ref_element); - new_element_opt = make::expr_ref(new_element_opt, exclusive); + prefix.insert_str(0, if exclusive { "&mut " } else { "&" }); } } else { // If we do not find any ref expressions, restore @@ -325,7 +335,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) { resulting_element = initial_element.clone(); } - (resulting_element, new_element_opt) + (resulting_element, prefix) } fn build_postfix_snippet_builder<'ctx>( @@ -901,4 +911,31 @@ fn main() { "#, ); } + + #[test] + fn inside_macro() { + check_edit( + "box", + r#" +macro_rules! assert { + ( $it:expr $(,)? ) => { $it }; +} + +fn foo() { + let a = true; + assert!(if a == false { true } else { false }.$0); +} + "#, + r#" +macro_rules! assert { + ( $it:expr $(,)? ) => { $it }; +} + +fn foo() { + let a = true; + assert!(Box::new(if a == false { true } else { false })); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs index 2755329bb31..c612170eb54 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs @@ -14,7 +14,7 @@ // ** `logw` -> `log::warn!(...)` // ** `loge` -> `log::error!(...)` // -// image::https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif[] +//  use ide_db::{ syntax_helpers::format_string_exprs::{parse_format_exprs, with_placeholders, Arg}, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 2f1860cbb59..7862b258789 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -442,6 +442,8 @@ pub(crate) struct CompletionContext<'a> { pub(crate) krate: hir::Crate, /// The module of the `scope`. pub(crate) module: hir::Module, + /// The function where we're completing, if inside a function. + pub(crate) containing_function: Option<hir::Function>, /// Whether nightly toolchain is used. Cached since this is looked up a lot. pub(crate) is_nightly: bool, /// The edition of the current crate @@ -760,6 +762,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); + let containing_function = scope.containing_function(); let edition = krate.edition(db); let toolchain = db.toolchain_channel(krate.into()); @@ -874,6 +877,7 @@ impl<'a> CompletionContext<'a> { token, krate, module, + containing_function, is_nightly, edition, expected_name, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index f5a50ae8190..eecd412bc43 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -59,7 +59,7 @@ pub(super) fn expand_and_analyze( // make the offset point to the start of the original token, as that is what the // intermediate offsets calculated in expansion always points to let offset = offset - relative_offset; - let expansion = expand( + let expansion = expand_maybe_stop( sema, original_file.clone(), speculative_file.clone(), @@ -118,7 +118,7 @@ fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Opt /// that we check, we subtract `COMPLETION_MARKER.len()`. This may not be accurate because proc macros /// can insert the text of the completion marker in other places while removing the span, but this is /// the best we can do. -fn expand( +fn expand_maybe_stop( sema: &Semantics<'_, RootDatabase>, original_file: SyntaxNode, speculative_file: SyntaxNode, @@ -126,23 +126,48 @@ fn expand( fake_ident_token: SyntaxToken, relative_offset: TextSize, ) -> Option<ExpansionResult> { - let _p = tracing::info_span!("CompletionContext::expand").entered(); + if let result @ Some(_) = expand( + sema, + original_file.clone(), + speculative_file.clone(), + original_offset, + fake_ident_token.clone(), + relative_offset, + ) { + return result; + } + // This needs to come after the recursive call, because our "inside macro" detection is subtly wrong + // with regard to attribute macros named `test` that are not std's test. So hopefully we will expand + // them successfully above and be able to analyze. // Left biased since there may already be an identifier token there, and we appended to it. if !sema.might_be_inside_macro_call(&fake_ident_token) && token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset) .is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token)) { // Recursion base case. - return Some(ExpansionResult { + Some(ExpansionResult { original_file, speculative_file, original_offset, speculative_offset: fake_ident_token.text_range().start(), fake_ident_token, derive_ctx: None, - }); + }) + } else { + None } +} + +fn expand( + sema: &Semantics<'_, RootDatabase>, + original_file: SyntaxNode, + speculative_file: SyntaxNode, + original_offset: TextSize, + fake_ident_token: SyntaxToken, + relative_offset: TextSize, +) -> Option<ExpansionResult> { + let _p = tracing::info_span!("CompletionContext::expand").entered(); let parent_item = |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); @@ -197,7 +222,7 @@ fn expand( // stop here to prevent problems from happening return None; } - let result = expand( + let result = expand_maybe_stop( sema, actual_expansion.clone(), fake_expansion.clone(), @@ -317,7 +342,7 @@ fn expand( // stop here to prevent problems from happening return None; } - let result = expand( + let result = expand_maybe_stop( sema, actual_expansion.clone(), fake_expansion.clone(), @@ -386,7 +411,7 @@ fn expand( // stop here to prevent problems from happening return None; } - let result = expand( + let result = expand_maybe_stop( sema, actual_expansion.clone(), fake_expansion.clone(), diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 8051d48ca5f..a1f2eaeb1b6 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -106,11 +106,13 @@ impl CompletionFieldsToResolve { // // There also snippet completions: // -// .Expressions +// #### Expressions +// // - `pd` -> `eprintln!(" = {:?}", );` // - `ppd` -> `eprintln!(" = {:#?}", );` // -// .Items +// #### Items +// // - `tfn` -> `#[test] fn feature(){}` // - `tmod` -> // ```rust @@ -127,7 +129,7 @@ impl CompletionFieldsToResolve { // Those are the additional completion options with automatic `use` import and options from all project importable items, // fuzzy matched against the completion input. // -// image::https://user-images.githubusercontent.com/48062697/113020667-b72ab880-917a-11eb-8778-716cf26a0eb3.gif[] +//  /// Main entry point for completion. We run completion as a two-phase process. /// diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index c3354902c3b..fd90613964a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -144,7 +144,7 @@ fn render( let detail = if ctx.completion.config.full_function_signatures { detail_full(db, func, ctx.completion.edition) } else { - detail(db, func, ctx.completion.edition) + detail(ctx.completion, func, ctx.completion.edition) }; item.set_documentation(ctx.docs(func)) .set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func)) @@ -307,26 +307,26 @@ fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'sta "" } -fn detail(db: &dyn HirDatabase, func: hir::Function, edition: Edition) -> String { - let mut ret_ty = func.ret_type(db); +fn detail(ctx: &CompletionContext<'_>, func: hir::Function, edition: Edition) -> String { + let mut ret_ty = func.ret_type(ctx.db); let mut detail = String::new(); - if func.is_const(db) { + if func.is_const(ctx.db) { format_to!(detail, "const "); } - if func.is_async(db) { + if func.is_async(ctx.db) { format_to!(detail, "async "); - if let Some(async_ret) = func.async_ret_type(db) { + if let Some(async_ret) = func.async_ret_type(ctx.db) { ret_ty = async_ret; } } - if func.is_unsafe_to_call(db) { + if func.is_unsafe_to_call(ctx.db, ctx.containing_function, ctx.edition) { format_to!(detail, "unsafe "); } - format_to!(detail, "fn({})", params_display(db, func, edition)); + format_to!(detail, "fn({})", params_display(ctx.db, func, edition)); if !ret_ty.is_unit() { - format_to!(detail, " -> {}", ret_ty.display(db, edition)); + format_to!(detail, " -> {}", ret_ty.display(ctx.db, edition)); } detail } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs index 866b83a6146..07f33a826e4 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs @@ -8,8 +8,7 @@ // // A custom snippet can be defined by adding it to the `rust-analyzer.completion.snippets.custom` object respectively. // -// [source,json] -// ---- +// ```json // { // "rust-analyzer.completion.snippets.custom": { // "thread spawn": { @@ -25,7 +24,7 @@ // } // } // } -// ---- +// ``` // // In the example above: // @@ -39,6 +38,7 @@ // * `description` is an optional description of the snippet, if unset the snippet name will be used. // // * `requires` is an optional list of item paths that have to be resolvable in the current crate where the completion is rendered. + // On failure of resolution the snippet won't be applicable, otherwise the snippet will insert an import for the items on insertion if // the items aren't yet in scope. // @@ -55,8 +55,8 @@ // // For the VSCode editor, rust-analyzer also ships with a small set of defaults which can be removed // by overwriting the settings object mentioned above, the defaults are: -// [source,json] -// ---- +// +// ```json // { // "Arc::new": { // "postfix": "arc", @@ -98,7 +98,7 @@ // "scope": "expr" // } // } -// ---- +// ```` use hir::{ModPath, Name, Symbol}; use ide_db::imports::import_assets::LocatedImport; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs index 663a038580d..37557512837 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs @@ -1986,3 +1986,53 @@ fn foo() { "#]], ); } + +#[test] +fn non_std_test_attr_macro() { + check( + r#" +//- proc_macros: identity +use proc_macros::identity as test; + +#[test] +fn foo() { + $0 +} + "#, + expect![[r#" + fn foo() fn() + md proc_macros + bt u32 u32 + kw async + kw const + kw crate:: + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs index 35e3a8d9bf7..46ff4fbf9e9 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs @@ -44,12 +44,11 @@ impl RootDatabase { // // Clears rust-analyzer's internal database and prints memory usage statistics. // - // |=== - // | Editor | Action Name - // + // | Editor | Action Name | + // |---------|-------------| // | VS Code | **rust-analyzer: Memory Usage (Clears Database)** - // |=== - // image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[] + + //  pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes, usize)> { let mut acc: Vec<(String, Bytes, usize)> = vec![]; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index d12bda0816f..6f71c3d9bd7 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -108,7 +108,7 @@ impl Definition { ItemContainer::Trait(it) => Some(it.into()), ItemContainer::Impl(it) => Some(it.into()), ItemContainer::Module(it) => Some(it.into()), - ItemContainer::ExternBlock() | ItemContainer::Crate(_) => None, + ItemContainer::ExternBlock(_) | ItemContainer::Crate(_) => None, } } match self { @@ -986,6 +986,7 @@ impl From<GenericDef> for Definition { GenericDef::TypeAlias(it) => it.into(), GenericDef::Impl(it) => it.into(), GenericDef::Const(it) => it.into(), + GenericDef::Static(it) => it.into(), } } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs index 14af22c3193..ed9d6c67501 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs @@ -9107,8 +9107,8 @@ The tracking issue for this feature is: [#27721] deny_since: None, }, Lint { - label: "pattern_complexity", - description: r##"# `pattern_complexity` + label: "pattern_complexity_limit", + description: r##"# `pattern_complexity_limit` This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs index 0002fda0ba7..22dc3d9e29d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs @@ -6,12 +6,13 @@ mod topologic_sort; use std::time::Duration; -use hir::db::DefDatabase; +use hir::{db::DefDatabase, Symbol}; +use itertools::Itertools; use crate::{ base_db::{ ra_salsa::{Database, ParallelDatabase, Snapshot}, - Cancelled, CrateId, SourceDatabase, SourceRootDatabase, + Cancelled, CrateId, SourceDatabase, }, symbol_index::SymbolsDatabase, FxIndexMap, RootDatabase, @@ -21,11 +22,12 @@ use crate::{ #[derive(Debug)] pub struct ParallelPrimeCachesProgress { /// the crates that we are currently priming. - pub crates_currently_indexing: Vec<String>, + pub crates_currently_indexing: Vec<Symbol>, /// the total number of crates we want to prime. pub crates_total: usize, /// the total number of crates that have finished priming pub crates_done: usize, + pub work_type: &'static str, } pub fn parallel_prime_caches( @@ -47,41 +49,32 @@ pub fn parallel_prime_caches( }; enum ParallelPrimeCacheWorkerProgress { - BeginCrate { crate_id: CrateId, crate_name: String }, + BeginCrate { crate_id: CrateId, crate_name: Symbol }, EndCrate { crate_id: CrateId }, } + // We split off def map computation from other work, + // as the def map is the relevant one. Once the defmaps are computed + // the project is ready to go, the other indices are just nice to have for some IDE features. + #[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone)] + enum PrimingPhase { + DefMap, + ImportMap, + CrateSymbols, + } + let (work_sender, progress_receiver) = { let (progress_sender, progress_receiver) = crossbeam_channel::unbounded(); let (work_sender, work_receiver) = crossbeam_channel::unbounded(); - let graph = graph.clone(); - let local_roots = db.local_roots(); let prime_caches_worker = move |db: Snapshot<RootDatabase>| { - while let Ok((crate_id, crate_name)) = work_receiver.recv() { + while let Ok((crate_id, crate_name, kind)) = work_receiver.recv() { progress_sender .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?; - // Compute the DefMap and possibly ImportMap - let file_id = graph[crate_id].root_file_id; - let root_id = db.file_source_root(file_id); - if db.source_root(root_id).is_library { - db.crate_def_map(crate_id); - } else { - // This also computes the DefMap - db.import_map(crate_id); - } - - // Compute the symbol search index. - // This primes the cache for `ide_db::symbol_index::world_symbols()`. - // - // We do this for workspace crates only (members of local_roots), because doing it - // for all dependencies could be *very* unnecessarily slow in a large project. - // - // FIXME: We should do it unconditionally if the configuration is set to default to - // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we - // would need to pipe that configuration information down here. - if local_roots.contains(&root_id) { - db.crate_symbols(crate_id.into()); + match kind { + PrimingPhase::DefMap => _ = db.crate_def_map(crate_id), + PrimingPhase::ImportMap => _ = db.import_map(crate_id), + PrimingPhase::CrateSymbols => _ = db.crate_symbols(crate_id.into()), } progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?; @@ -112,16 +105,34 @@ pub fn parallel_prime_caches( let mut crates_currently_indexing = FxIndexMap::with_capacity_and_hasher(num_worker_threads, Default::default()); + let mut additional_phases = vec![]; + while crates_done < crates_total { db.unwind_if_cancelled(); for crate_id in &mut crates_to_prime { - work_sender - .send(( - crate_id, - graph[crate_id].display_name.as_deref().unwrap_or_default().to_owned(), - )) - .ok(); + let krate = &graph[crate_id]; + let name = krate + .display_name + .as_deref() + .cloned() + .unwrap_or_else(|| Symbol::integer(crate_id.into_raw().into_u32() as usize)); + if krate.origin.is_lang() { + additional_phases.push((crate_id, name.clone(), PrimingPhase::ImportMap)); + } else if krate.origin.is_local() { + // Compute the symbol search index. + // This primes the cache for `ide_db::symbol_index::world_symbols()`. + // + // We do this for workspace crates only (members of local_roots), because doing it + // for all dependencies could be *very* unnecessarily slow in a large project. + // + // FIXME: We should do it unconditionally if the configuration is set to default to + // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we + // would need to pipe that configuration information down here. + additional_phases.push((crate_id, name.clone(), PrimingPhase::CrateSymbols)); + } + + work_sender.send((crate_id, name, PrimingPhase::DefMap)).ok(); } // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision @@ -153,6 +164,50 @@ pub fn parallel_prime_caches( crates_currently_indexing: crates_currently_indexing.values().cloned().collect(), crates_done, crates_total, + work_type: "Indexing", + }; + + cb(progress); + } + + let mut crates_done = 0; + let crates_total = additional_phases.len(); + for w in additional_phases.into_iter().sorted_by_key(|&(_, _, phase)| phase) { + work_sender.send(w).ok(); + } + + while crates_done < crates_total { + db.unwind_if_cancelled(); + + // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision + // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or + // if this thread exits, and closes the work channel. + let worker_progress = match progress_receiver.recv_timeout(Duration::from_millis(10)) { + Ok(p) => p, + Err(crossbeam_channel::RecvTimeoutError::Timeout) => { + continue; + } + Err(crossbeam_channel::RecvTimeoutError::Disconnected) => { + // our workers may have died from a cancelled task, so we'll check and re-raise here. + db.unwind_if_cancelled(); + break; + } + }; + match worker_progress { + ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name } => { + crates_currently_indexing.insert(crate_id, crate_name); + } + ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => { + crates_currently_indexing.swap_remove(&crate_id); + crates_done += 1; + } + }; + + let progress = ParallelPrimeCachesProgress { + crates_currently_indexing: crates_currently_indexing.values().cloned().collect(), + crates_done, + crates_total, + work_type: "Populating symbols", }; cb(progress); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 7fc563a4241..7963e8ae4f7 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -354,6 +354,7 @@ impl Definition { hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()), + hir::GenericDef::Static(it) => it.source(db).map(|src| src.syntax().cloned()), }; return match def { Some(def) => SearchScope::file_range( diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index e5ce10a771e..bb4c289c908 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -193,11 +193,9 @@ impl<DB> std::ops::Deref for Snap<DB> { // `rust-analyzer.workspace.symbol.search.kind` settings. Symbols prefixed // with `__` are hidden from the search results unless configured otherwise. // -// |=== -// | Editor | Shortcut -// -// | VS Code | kbd:[Ctrl+T] -// |=== +// | Editor | Shortcut | +// |---------|-----------| +// | VS Code | <kbd>Ctrl+T</kbd> pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { let _p = tracing::info_span!("world_symbols", query = ?query.query).entered(); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs index bbdeb7cf085..246330e6efa 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -12,7 +12,7 @@ use crate::{ // Diagnostic: incorrect-ident-case // -// This diagnostic is triggered if an item name doesn't follow https://doc.rust-lang.org/1.0.0/style/style/naming/README.html[Rust naming convention]. +// This diagnostic is triggered if an item name doesn't follow [Rust naming convention](https://doc.rust-lang.org/1.0.0/style/style/naming/README.html). pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Diagnostic { let code = match d.expected_case { CaseType::LowerSnakeCase => DiagnosticCode::RustcLint("non_snake_case"), @@ -936,6 +936,7 @@ fn func() { fn override_lint_level() { check_diagnostics( r#" +#![allow(unused_variables)] #[warn(nonstandard_style)] fn foo() { let BAR; @@ -992,6 +993,7 @@ struct QUX; const foo: i32 = 0; fn BAR() { let BAZ; + _ = BAZ; } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs index c7cdcf49820..5730508436d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs @@ -1129,4 +1129,39 @@ fn main() { "#, ); } + + #[test] + fn regression_18682() { + check_diagnostics( + r#" +//- minicore: coerce_unsized +struct Flexible { + body: [u8], +} + +trait Field { + type Type: ?Sized; +} + +impl Field for Flexible { + type Type = [u8]; +} + +trait KnownLayout { + type MaybeUninit: ?Sized; +} + + +impl<T> KnownLayout for [T] { + type MaybeUninit = [T]; +} + +struct ZerocopyKnownLayoutMaybeUninit(<<Flexible as Field>::Type as KnownLayout>::MaybeUninit); + +fn test(ptr: *mut [u8]) -> *mut ZerocopyKnownLayoutMaybeUninit { + ptr as *mut _ +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 7126617cdee..0520bb3fe9b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -40,7 +40,7 @@ pub(crate) fn mismatched_arg_count( Diagnostic::new( DiagnosticCode::RustcHardError("E0107"), message, - invalid_args_range(ctx, d.call_expr.map(AstPtr::wrap_left), d.expected, d.found), + invalid_args_range(ctx, d.call_expr, d.expected, d.found), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 8117401a534..323a5723d4a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -1,5 +1,5 @@ use hir::db::ExpandDatabase; -use hir::{HirFileIdExt, UnsafetyReason}; +use hir::{HirFileIdExt, UnsafeLint, UnsafetyReason}; use ide_db::text_edit::TextEdit; use ide_db::{assists::Assist, source_change::SourceChange}; use syntax::{ast, SyntaxNode}; @@ -11,10 +11,10 @@ use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; // // This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block. pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Diagnostic { - let code = if d.only_lint { - DiagnosticCode::RustcLint("unsafe_op_in_unsafe_fn") - } else { - DiagnosticCode::RustcHardError("E0133") + let code = match d.lint { + UnsafeLint::HardError => DiagnosticCode::RustcHardError("E0133"), + UnsafeLint::UnsafeOpInUnsafeFn => DiagnosticCode::RustcLint("unsafe_op_in_unsafe_fn"), + UnsafeLint::DeprecatedSafe2024 => DiagnosticCode::RustcLint("deprecated_safe_2024"), }; let operation = display_unsafety_reason(d.reason); Diagnostic::new_with_syntax_node_ptr( @@ -585,25 +585,59 @@ fn main() { r#" //- /ed2021.rs crate:ed2021 edition:2021 #[rustc_deprecated_safe_2024] -unsafe fn safe() -> u8 { +unsafe fn deprecated_safe() -> u8 { 0 } + //- /ed2024.rs crate:ed2024 edition:2024 #[rustc_deprecated_safe_2024] -unsafe fn not_safe() -> u8 { +unsafe fn deprecated_safe() -> u8 { 0 } -//- /main.rs crate:main deps:ed2021,ed2024 + +//- /dep1.rs crate:dep1 deps:ed2021,ed2024 edition:2021 +fn main() { + ed2021::deprecated_safe(); + ed2024::deprecated_safe(); +} + +//- /dep2.rs crate:dep2 deps:ed2021,ed2024 edition:2024 +fn main() { + ed2021::deprecated_safe(); + // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 error: call to unsafe function is unsafe and requires an unsafe function or block + ed2024::deprecated_safe(); + // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 error: call to unsafe function is unsafe and requires an unsafe function or block +} + +//- /dep3.rs crate:dep3 deps:ed2021,ed2024 edition:2021 +#![warn(deprecated_safe)] + fn main() { - ed2021::safe(); - ed2024::not_safe(); - //^^^^^^^^^^^^^^^^^^💡 error: call to unsafe function is unsafe and requires an unsafe function or block + ed2021::deprecated_safe(); + // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 warn: call to unsafe function is unsafe and requires an unsafe function or block + ed2024::deprecated_safe(); + // ^^^^^^^^^^^^^^^^^^^^^^^^^💡 warn: call to unsafe function is unsafe and requires an unsafe function or block } "#, ) } #[test] + fn orphan_unsafe_format_args() { + // Checks that we don't place orphan arguments for formatting under an unsafe block. + check_diagnostics( + r#" +//- minicore: fmt +fn foo() { + let p = 0xDEADBEEF as *const i32; + format_args!("", *p); + // ^^ error: dereference of raw pointer is unsafe and requires an unsafe function or block +} + "#, + ); + } + + #[test] fn unsafe_op_in_unsafe_fn_allowed_by_default_in_edition_2021() { check_diagnostics( r#" @@ -812,4 +846,36 @@ fn main() { "#, ) } + + #[test] + fn target_feature() { + check_diagnostics( + r#" +#[target_feature(enable = "avx")] +fn foo() {} + +#[target_feature(enable = "avx,avx2")] +fn bar() { + foo(); +} + +fn baz() { + foo(); + // ^^^^^ 💡 error: call to unsafe function is unsafe and requires an unsafe function or block +} + "#, + ); + } + + #[test] + fn unsafe_fn_ptr_call() { + check_diagnostics( + r#" +fn f(it: unsafe fn()){ + it(); + // ^^^^ 💡 error: call to unsafe function is unsafe and requires an unsafe function or block +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 13979791444..0e3c4c7aa36 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -831,13 +831,14 @@ fn f() { #[test] fn or_pattern() { - // FIXME: `None` is inferred as unknown here for some reason check_diagnostics( r#" //- minicore: option fn f(_: i32) {} fn main() { let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)) else { return }; + //^^^^^ 💡 warn: variable does not need to be mutable + f(x); } "#, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs new file mode 100644 index 00000000000..ccf51723418 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs @@ -0,0 +1,59 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: parenthesized-generic-args-without-fn-trait +// +// This diagnostic is shown when a `Fn`-trait-style generic parameters (`Trait(A, B) -> C`) +// was used on non-`Fn` trait/type. +pub(crate) fn parenthesized_generic_args_without_fn_trait( + ctx: &DiagnosticsContext<'_>, + d: &hir::ParenthesizedGenericArgsWithoutFnTrait, +) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0214"), + "parenthesized type parameters may only be used with a `Fn` trait", + d.args.map(Into::into), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn fn_traits_work() { + check_diagnostics( + r#" +//- minicore: async_fn, fn +fn foo< + A: Fn(), + B: FnMut() -> i32, + C: FnOnce(&str, bool), + D: AsyncFn::(u32) -> u32, + E: AsyncFnMut(), + F: AsyncFnOnce() -> bool, +>() {} + "#, + ); + } + + #[test] + fn non_fn_trait() { + check_diagnostics( + r#" +struct Struct<T>(T); +enum Enum<T> { EnumVariant(T) } +type TypeAlias<T> = bool; + +type Foo = TypeAlias() -> bool; + // ^^ error: parenthesized type parameters may only be used with a `Fn` trait + +fn foo(_a: Struct(i32)) { + // ^^^^^ error: parenthesized type parameters may only be used with a `Fn` trait + let _ = <Enum::(u32)>::EnumVariant(0); + // ^^^^^^^ error: parenthesized type parameters may only be used with a `Fn` trait +} + "#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 56afb38cc81..7cf8282d052 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{db::ExpandDatabase, CallableKind, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; +use hir::{db::ExpandDatabase, CallableKind, ClosureStyle, HirDisplay, HirFileIdExt, InFile}; use ide_db::{ famous_defs::FamousDefs, source_change::{SourceChange, SourceChangeBuilder}, @@ -88,7 +88,7 @@ fn add_reference( let range = ctx.sema.diagnostics_display_range((*expr_ptr).map(|it| it.into())); let (_, mutability) = d.expected.as_reference()?; - let actual_with_ref = Type::reference(&d.actual, mutability); + let actual_with_ref = d.actual.add_reference(mutability); if !actual_with_ref.could_coerce_to(ctx.sema.db, &d.expected) { return None; } @@ -1235,4 +1235,25 @@ fn f() { "#, ); } + + #[test] + fn complex_enum_variant_non_ref_pat() { + check_diagnostics( + r#" +enum Enum { Variant } + +trait Trait { + type Assoc; +} +impl Trait for () { + type Assoc = Enum; +} + +fn foo(v: &Enum) { + let <Enum>::Variant = v; + let <() as Trait>::Assoc::Variant = v; +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 4accd181ca4..dfb03eee732 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -1,5 +1,6 @@ use std::iter; +use either::Either; use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union}; use ide_db::text_edit::TextEdit; use ide_db::{ @@ -41,7 +42,7 @@ pub(crate) fn unresolved_field( ), adjusted_display_range(ctx, d.expr, &|expr| { Some( - match expr { + match expr.left()? { ast::Expr::MethodCallExpr(it) => it.name_ref(), ast::Expr::FieldExpr(it) => it.name_ref(), _ => None, @@ -72,7 +73,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<A fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Assist> { // Get the FileRange of the invalid field access let root = ctx.sema.db.parse_or_expand(d.expr.file_id); - let expr = d.expr.value.to_node(&root); + let expr = d.expr.value.to_node(&root).left()?; let error_range = ctx.sema.original_range_opt(expr.syntax())?; let field_name = d.name.as_str(); @@ -263,7 +264,7 @@ fn record_field_layout( // FIXME: We should fill out the call here, move the cursor and trigger signature help fn method_fix( ctx: &DiagnosticsContext<'_>, - expr_ptr: &InFile<AstPtr<ast::Expr>>, + expr_ptr: &InFile<AstPtr<Either<ast::Expr, ast::Pat>>>, ) -> Option<Assist> { let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let expr = expr_ptr.value.to_node(&root); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 4ab649cc162..e4de107249b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, AssocItem, FileRange, HirDisplay, InFile}; +use hir::{db::ExpandDatabase, FileRange, HirDisplay, InFile}; use ide_db::text_edit::TextEdit; use ide_db::{ assists::{Assist, AssistId, AssistKind}, @@ -35,7 +35,7 @@ pub(crate) fn unresolved_method( ), adjusted_display_range(ctx, d.expr, &|expr| { Some( - match expr { + match expr.left()? { ast::Expr::MethodCallExpr(it) => it.name_ref(), ast::Expr::FieldExpr(it) => it.name_ref(), _ => None, @@ -85,7 +85,7 @@ fn field_fix( let expr_ptr = &d.expr; let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id); let expr = expr_ptr.value.to_node(&root); - let (file_id, range) = match expr { + let (file_id, range) = match expr.left()? { ast::Expr::MethodCallExpr(mcall) => { let FileRange { range, file_id } = ctx.sema.original_range_opt(mcall.receiver()?.syntax())?; @@ -112,12 +112,12 @@ fn field_fix( } fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Assist> { - if let Some(assoc_item_id) = d.assoc_func_with_same_name { + if let Some(f) = d.assoc_func_with_same_name { let db = ctx.sema.db; let expr_ptr = &d.expr; let root = db.parse_or_expand(expr_ptr.file_id); - let expr: ast::Expr = expr_ptr.value.to_node(&root); + let expr: ast::Expr = expr_ptr.value.to_node(&root).left()?; let call = ast::MethodCallExpr::cast(expr.syntax().clone())?; let range = InFile::new(expr_ptr.file_id, call.syntax().text_range()) @@ -127,30 +127,25 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) - let receiver = call.receiver()?; let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original; - let need_to_take_receiver_as_first_arg = match hir::AssocItem::from(assoc_item_id) { - AssocItem::Function(f) => { - let assoc_fn_params = f.assoc_fn_params(db); - if assoc_fn_params.is_empty() { - false - } else { - assoc_fn_params - .first() - .map(|first_arg| { - // For generic type, say `Box`, take `Box::into_raw(b: Self)` as example, - // type of `b` is `Self`, which is `Box<T, A>`, containing unspecified generics. - // However, type of `receiver` is specified, it could be `Box<i32, Global>` or something like that, - // so `first_arg.ty() == receiver_type` evaluate to `false` here. - // Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard, - // apply `.as_adt()` over `Box<T, A>` or `Box<i32, Global>` gets `Box`, so we get `true` here. - - // FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver` - first_arg.ty() == receiver_type - || first_arg.ty().as_adt() == receiver_type.as_adt() - }) - .unwrap_or(false) - } - } - _ => false, + let assoc_fn_params = f.assoc_fn_params(db); + let need_to_take_receiver_as_first_arg = if assoc_fn_params.is_empty() { + false + } else { + assoc_fn_params + .first() + .map(|first_arg| { + // For generic type, say `Box`, take `Box::into_raw(b: Self)` as example, + // type of `b` is `Self`, which is `Box<T, A>`, containing unspecified generics. + // However, type of `receiver` is specified, it could be `Box<i32, Global>` or something like that, + // so `first_arg.ty() == receiver_type` evaluate to `false` here. + // Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard, + // apply `.as_adt()` over `Box<T, A>` or `Box<i32, Global>` gets `Box`, so we get `true` here. + + // FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver` + first_arg.ty() == receiver_type + || first_arg.ty().as_adt() == receiver_type.as_adt() + }) + .unwrap_or(false) }; let mut receiver_type_adt_name = diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs index 67ece566941..d5caf4de336 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs @@ -263,4 +263,17 @@ fn main() { "#, ); } + + // regression test as we used to panic in this scenario + #[test] + fn unknown_struct_pattern_param_type() { + check_diagnostics( + r#" +struct S { field : u32 } +fn f(S { field }: error) { + // ^^^^^ 💡 warn: unused variable +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 50c91a69602..3ea41aa7e85 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -43,6 +43,7 @@ mod handlers { pub(crate) mod mutability_errors; pub(crate) mod no_such_field; pub(crate) mod non_exhaustive_let; + pub(crate) mod parenthesized_generic_args_without_fn_trait; pub(crate) mod private_assoc_item; pub(crate) mod private_field; pub(crate) mod remove_trailing_return; @@ -466,7 +467,12 @@ pub fn semantic_diagnostics( Some(it) => it, None => continue, }, - AnyDiagnostic::GenericArgsProhibited(d) => handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d) + AnyDiagnostic::GenericArgsProhibited(d) => { + handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d) + } + AnyDiagnostic::ParenthesizedGenericArgsWithoutFnTrait(d) => { + handlers::parenthesized_generic_args_without_fn_trait::parenthesized_generic_args_without_fn_trait(&ctx, &d) + } }; res.push(d) } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index 6b654f89345..889258c94c5 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -33,12 +33,10 @@ // // Supported constraints: // -// |=== -// | Constraint | Restricts placeholder -// -// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) -// | not(a) | Negates the constraint `a` -// |=== +// | Constraint | Restricts placeholder | +// |---------------|------------------------| +// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) | +// | not(a) | Negates the constraint `a` | // // Available via the command `rust-analyzer.ssr`. // @@ -52,11 +50,9 @@ // String::from((y + 5).foo(z)) // ``` // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: Structural Search Replace** -// |=== +// | Editor | Action Name | +// |---------|--------------| +// | VS Code | **rust-analyzer: Structural Search Replace** | // // Also available as an assist, by writing a comment containing the structural // search and replace rule. You will only see the assist if the comment can diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs index 18f866eb9fc..e47891bbdfe 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs @@ -1,6 +1,6 @@ use hir::{HasSource, InFile, InRealFile, Semantics}; use ide_db::{ - defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxHashSet, + defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxIndexSet, RootDatabase, }; use itertools::Itertools; @@ -21,7 +21,7 @@ mod fn_references; // Provides user with annotations above items for looking up references or impl blocks // and running/debugging binaries. // -// image::https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png[] +//  #[derive(Debug, Hash, PartialEq, Eq)] pub struct Annotation { pub range: TextRange, @@ -55,7 +55,7 @@ pub(crate) fn annotations( config: &AnnotationConfig, file_id: FileId, ) -> Vec<Annotation> { - let mut annotations = FxHashSet::default(); + let mut annotations = FxIndexSet::default(); if config.annotate_runnables { for runnable in runnables(db, file_id) { @@ -170,7 +170,12 @@ pub(crate) fn annotations( })); } - annotations.into_iter().sorted_by_key(|a| (a.range.start(), a.range.end())).collect() + annotations + .into_iter() + .sorted_by_key(|a| { + (a.range.start(), a.range.end(), matches!(a.kind, AnnotationKind::Runnable(..))) + }) + .collect() } pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation { @@ -537,6 +542,20 @@ fn main() { }, Annotation { range: 69..73, + kind: HasReferences { + pos: FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 69, + }, + data: Some( + [], + ), + }, + }, + Annotation { + range: 69..73, kind: Runnable( Runnable { use_name_in_title: false, @@ -559,20 +578,6 @@ fn main() { }, ), }, - Annotation { - range: 69..73, - kind: HasReferences { - pos: FilePositionWrapper { - file_id: FileId( - 0, - ), - offset: 69, - }, - data: Some( - [], - ), - }, - }, ] "#]], ); @@ -719,6 +724,20 @@ fn main() { }, Annotation { range: 61..65, + kind: HasReferences { + pos: FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 61, + }, + data: Some( + [], + ), + }, + }, + Annotation { + range: 61..65, kind: Runnable( Runnable { use_name_in_title: false, @@ -741,20 +760,6 @@ fn main() { }, ), }, - Annotation { - range: 61..65, - kind: HasReferences { - pos: FilePositionWrapper { - file_id: FileId( - 0, - ), - offset: 61, - }, - data: Some( - [], - ), - }, - }, ] "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index cfd8919730a..e35e47e7471 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -122,11 +122,9 @@ pub(crate) fn remove_links(markdown: &str) -> String { // The simplest way to use this feature is via the context menu. Right-click on // the selected item. The context menu opens. Select **Open Docs**. // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: Open Docs** -// |=== +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Open Docs** | pub(crate) fn external_docs( db: &RootDatabase, FilePosition { file_id, offset }: FilePosition, diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index 0ad894427b2..ad4308e06a1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -19,13 +19,11 @@ pub struct ExpandedMacro { // // Shows the full macro expansion of the macro at the current caret position. // -// |=== -// | Editor | Action Name +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Expand macro recursively at caret** | // -// | VS Code | **rust-analyzer: Expand macro recursively at caret** -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[] +//  pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> { let sema = Semantics::new(db); let file = sema.parse_guess_edition(position.file_id); diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs index 3d49082f285..76414854e91 100644 --- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs +++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs @@ -17,13 +17,11 @@ use crate::FileRange; // Extends or shrinks the current selection to the encompassing syntactic construct // (expression, statement, item, module, etc). It works with multiple cursors. // -// |=== -// | Editor | Shortcut +// | Editor | Shortcut | +// |---------|----------| +// | VS Code | <kbd>Alt+Shift+→</kbd>, <kbd>Alt+Shift+←</kbd> | // -// | VS Code | kbd:[Alt+Shift+→], kbd:[Alt+Shift+←] -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif[] +//  pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { let sema = Semantics::new(db); let src = sema.parse_guess_edition(frange.file_id); diff --git a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs index 37b3cb03b33..5ed21444307 100644 --- a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs +++ b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs @@ -14,13 +14,11 @@ pub struct CrateInfo { // // Shows a view tree with all the dependencies of this project // -// |=== -// | Editor | Panel Name +// | Editor | Panel Name | +// |---------|------------| +// | VS Code | **Rust Dependencies** | // -// | VS Code | **Rust Dependencies** -// |=== -// -// image::https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png[] +//  pub(crate) fn fetch_crates(db: &RootDatabase) -> FxIndexSet<CrateInfo> { let crate_graph = db.crate_graph(); crate_graph diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs index 50977ee840c..52fbab6fa12 100644 --- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs +++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs @@ -31,14 +31,11 @@ pub enum StructureNodeKind { // * draw breadcrumbs to describe the context around the cursor // * draw outline of the file // -// |=== -// | Editor | Shortcut +// | Editor | Shortcut | +// |---------|----------| +// | VS Code | <kbd>Ctrl+Shift+O</kbd> | // -// | VS Code | kbd:[Ctrl+Shift+O] -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif[] - +//  pub(crate) fn file_structure(file: &SourceFile) -> Vec<StructureNode> { let mut res = Vec::new(); let mut stack = Vec::new(); diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index d18732a6b84..60a904233a9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -31,13 +31,11 @@ use syntax::{ // // For outline modules, this will navigate to the source file of the module. // -// |=== -// | Editor | Shortcut +// | Editor | Shortcut | +// |---------|----------| +// | VS Code | <kbd>F12</kbd> | // -// | VS Code | kbd:[F12] -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[] +//  pub(crate) fn goto_definition( db: &RootDatabase, FilePosition { file_id, offset }: FilePosition, @@ -3274,4 +3272,56 @@ fn f() { "#, ); } + + #[test] + fn use_inside_body() { + check( + r#" +fn main() { + mod nice_module { + pub(super) struct NiceStruct; + // ^^^^^^^^^^ + } + + use nice_module::NiceStruct$0; + + let _ = NiceStruct; +} + "#, + ); + } + + #[test] + fn shadow_builtin_type_by_module() { + check( + r#" +mod Foo{ +pub mod str { + // ^^^ + pub fn foo() {} +} +} + +fn main() { + use Foo::str; + let s = st$0r::foo(); +} +"#, + ); + } + + #[test] + fn not_goto_module_because_str_is_builtin_type() { + check( + r#" +mod str { +pub fn foo() {} +} + +fn main() { + let s = st$0r::f(); +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs index e926378367e..e1d834b5d1c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs @@ -12,13 +12,11 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; // // Navigates to the impl items of types. // -// |=== -// | Editor | Shortcut +// | Editor | Shortcut | +// |---------|----------| +// | VS Code | <kbd>Ctrl+F12</kbd> // -// | VS Code | kbd:[Ctrl+F12] -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[] +//  pub(crate) fn goto_implementation( db: &RootDatabase, FilePosition { file_id, offset }: FilePosition, diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index 2610d6c8863..ddc274a8303 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -8,13 +8,11 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; // // Navigates to the type of an identifier. // -// |=== -// | Editor | Action Name +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **Go to Type Definition** | // -// | VS Code | **Go to Type Definition** -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[] +//  pub(crate) fn goto_type_definition( db: &RootDatabase, FilePosition { file_id, offset }: FilePosition, diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 612bc36f628..6463206596a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -43,12 +43,12 @@ pub struct HighlightRelatedConfig { // // Highlights constructs related to the thing under the cursor: // -// . if on an identifier, highlights all references to that identifier in the current file -// .. additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope -// . if on an `async` or `await` token, highlights all yield points for that async context -// . if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context -// . if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context -// . if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure. +// 1. if on an identifier, highlights all references to that identifier in the current file +// * additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope +// 1. if on an `async` or `await` token, highlights all yield points for that async context +// 1. if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context +// 1. if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context +// 1. if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure. // // Note: `?`, `|` and `->` do not currently trigger this behavior in the VSCode editor. pub(crate) fn highlight_related( diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 9d4c103fc2e..95a720e7e45 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -118,7 +118,7 @@ pub struct HoverResult { // Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code. // Focusing is usually hovering with a mouse, but can also be triggered with a shortcut. // -// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[] +//  pub(crate) fn hover( db: &RootDatabase, frange @ FileRange { file_id, range }: FileRange, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 40f3406b72d..c996230c3a1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -434,6 +434,7 @@ fn definition_owner_name(db: &RootDatabase, def: Definition, edition: Edition) - None => it.name(db), } } + hir::GenericDef::Static(it) => Some(it.name(db)), }, Definition::DeriveHelper(derive_helper) => Some(derive_helper.derive().name(db)), d => { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 1f723c85df7..63039b1cd34 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -59,7 +59,7 @@ mod range_exclusive; // // Note: inlay hints for function argument names are heuristically omitted to reduce noise and will not appear if // any of the -// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99[following criteria] +// [following criteria](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99) // are met: // // * the parameter name is a suffix of the function's name @@ -68,13 +68,13 @@ mod range_exclusive; // of argument with _ splitting it off // * the parameter name starts with `ra_fixture` // * the parameter name is a -// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200[well known name] +// [well known name](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200) // in a unary function // * the parameter name is a -// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201[single character] +// [single character](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201) // in a unary function // -// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[] +//  pub(crate) fn inlay_hints( db: &RootDatabase, file_id: FileId, @@ -294,6 +294,7 @@ pub struct InlayHintsConfig { pub param_names_for_lifetime_elision_hints: bool, pub hide_named_constructor_hints: bool, pub hide_closure_initialization_hints: bool, + pub hide_closure_parameter_hints: bool, pub range_exclusive_hints: bool, pub closure_style: ClosureStyle, pub max_length: Option<usize>, @@ -860,6 +861,7 @@ mod tests { binding_mode_hints: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, + hide_closure_parameter_hints: false, closure_style: ClosureStyle::ImplFn, param_names_for_lifetime_elision_hints: false, max_length: None, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 2acd4021cc1..d3b95750f7e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -13,11 +13,7 @@ use ide_db::famous_defs::FamousDefs; use ide_db::text_edit::TextEditBuilder; use span::EditionedFileId; -use stdx::never; -use syntax::{ - ast::{self, make, AstNode}, - ted, -}; +use syntax::ast::{self, prec::ExprPrecedence, AstNode}; use crate::{ AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintLabel, InlayHintLabelPart, @@ -104,12 +100,14 @@ pub(super) fn hints( }; let iter: &mut dyn Iterator<Item = _> = iter.as_mut().either(|it| it as _, |it| it as _); + let mut has_adjustments = false; let mut allow_edit = !postfix; for Adjustment { source, target, kind } in iter { if source == target { cov_mark::hit!(same_type_adjustment); continue; } + has_adjustments = true; // FIXME: Add some nicer tooltips to each of these let (text, coercion) = match kind { @@ -172,6 +170,10 @@ pub(super) fn hints( }; if postfix { &mut post } else { &mut pre }.label.append_part(label); } + if !has_adjustments { + return None; + } + if !postfix && needs_inner_parens { pre.label.append_str("("); } @@ -254,71 +256,31 @@ fn mode_and_needs_parens_for_adjustment_hints( /// Returns whatever we need to add parentheses on the inside and/or outside of `expr`, /// if we are going to add (`postfix`) adjustments hints to it. fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) { - // This is a very miserable pile of hacks... - // - // `Expr::needs_parens_in` requires that the expression is the child of the other expression, - // that is supposed to be its parent. - // - // But we want to check what would happen if we add `*`/`.*` to the inner expression. - // To check for inner we need `` expr.needs_parens_in(`*expr`) ``, - // to check for outer we need `` `*expr`.needs_parens_in(parent) ``, - // where "expr" is the `expr` parameter, `*expr` is the edited `expr`, - // and "parent" is the parent of the original expression... - // - // For this we utilize mutable trees, which is a HACK, but it works. - // - // FIXME: comeup with a better API for `needs_parens_in`, so that we don't have to do *this* - - // Make `&expr`/`expr?` - let dummy_expr = { - // `make::*` function go through a string, so they parse wrongly. - // for example `` make::expr_try(`|| a`) `` would result in a - // `|| (a?)` and not `(|| a)?`. - // - // Thus we need dummy parens to preserve the relationship we want. - // The parens are then simply ignored by the following code. - let dummy_paren = make::expr_paren(expr.clone()); - if postfix { - make::expr_try(dummy_paren) - } else { - make::expr_ref(dummy_paren, false) - } - }; - - // Do the dark mutable tree magic. - // This essentially makes `dummy_expr` and `expr` switch places (families), - // so that `expr`'s parent is not `dummy_expr`'s parent. - let dummy_expr = dummy_expr.clone_for_update(); - let expr = expr.clone_for_update(); - ted::replace(expr.syntax(), dummy_expr.syntax()); - - let parent = dummy_expr.syntax().parent(); - let Some(expr) = (|| { - if postfix { - let ast::Expr::TryExpr(e) = &dummy_expr else { return None }; - let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None }; - - e.expr() - } else { - let ast::Expr::RefExpr(e) = &dummy_expr else { return None }; - let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None }; - - e.expr() - } - })() else { - never!("broken syntax tree?\n{:?}\n{:?}", expr, dummy_expr); - return (true, true); - }; - - // At this point - // - `parent` is the parent of the original expression - // - `dummy_expr` is the original expression wrapped in the operator we want (`*`/`.*`) - // - `expr` is the clone of the original expression (with `dummy_expr` as the parent) - - let needs_outer_parens = parent.is_some_and(|p| dummy_expr.needs_parens_in(p)); - let needs_inner_parens = expr.needs_parens_in(dummy_expr.syntax().clone()); - - (needs_outer_parens, needs_inner_parens) + let prec = expr.precedence(); + if postfix { + // postfix ops have higher precedence than any other operator, so we need to wrap + // any inner expression that is below (except for jumps if they don't have a value) + let needs_inner_parens = prec < ExprPrecedence::Unambiguous && { + prec != ExprPrecedence::Jump || !expr.is_ret_like_with_no_value() + }; + // given we are the higher precedence, no parent expression will have stronger requirements + let needs_outer_parens = false; + (needs_outer_parens, needs_inner_parens) + } else { + // We need to wrap all binary like things, thats everything below prefix except for jumps + let needs_inner_parens = prec < ExprPrecedence::Prefix && prec != ExprPrecedence::Jump; + let parent = expr + .syntax() + .parent() + .and_then(ast::Expr::cast) + // if we are already wrapped, great, no need to wrap again + .filter(|it| !matches!(it, ast::Expr::ParenExpr(_))) + .map(|it| it.precedence()); + // if we have no parent, we don't need outer parens to disambiguate + // otherwise anything with higher precedence than what we insert needs to wrap us + let needs_outer_parens = parent.is_some_and(|prec| prec > ExprPrecedence::Prefix); + (needs_outer_parens, needs_inner_parens) + } } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 01a1a4545c4..c2986a9aa66 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -36,6 +36,9 @@ pub(super) fn hints( if it.ty().is_some() { return None; } + if config.hide_closure_parameter_hints && it.syntax().ancestors().nth(2).is_none_or(|n| matches!(ast::Expr::cast(n), Some(ast::Expr::ClosureExpr(_)))) { + return None; + } Some(it.colon_token()) }, ast::LetStmt(it) => { @@ -950,6 +953,36 @@ fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 { } #[test] + fn skip_closure_parameter_hints() { + check_with_config( + InlayHintsConfig { + type_hints: true, + hide_closure_parameter_hints: true, + ..DISABLED_CONFIG + }, + r#" +//- minicore: fn +struct Foo; +impl Foo { + fn foo(self: Self) {} + fn bar(self: &Self) {} +} +fn main() { + let closure = |x, y| x + y; + // ^^^^^^^ impl Fn(i32, i32) -> {unknown} + closure(2, 3); + let point = (10, 20); + // ^^^^^ (i32, i32) + let (x, y) = point; + // ^ i32 ^ i32 + Foo::foo(Foo); + Foo::bar(&Foo); +} +"#, + ); + } + + #[test] fn hint_truncation() { check_with_config( InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG }, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs index 3e91618d08e..9b981c0a3ac 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs @@ -1,4 +1,4 @@ -//! Implementation of "closure return type" inlay hints. +//! Implementation of "closure captures" inlay hints. //! //! Tests live in [`bind_pat`][super::bind_pat] module. use ide_db::famous_defs::FamousDefs; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs index 2bc91b68ed8..652dff0bc56 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs @@ -7,7 +7,7 @@ use crate::{InlayHint, InlayHintsConfig}; pub(super) fn extern_block_hints( acc: &mut Vec<InlayHint>, - FamousDefs(_sema, _): &FamousDefs<'_, '_>, + FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, _file_id: EditionedFileId, extern_block: ast::ExternBlock, @@ -16,6 +16,7 @@ pub(super) fn extern_block_hints( return None; } let abi = extern_block.abi()?; + sema.to_def(&extern_block)?; acc.push(InlayHint { range: abi.syntax().text_range(), position: crate::InlayHintPosition::Before, @@ -33,7 +34,7 @@ pub(super) fn extern_block_hints( pub(super) fn fn_hints( acc: &mut Vec<InlayHint>, - FamousDefs(_sema, _): &FamousDefs<'_, '_>, + FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, _file_id: EditionedFileId, fn_: &ast::Fn, @@ -43,14 +44,16 @@ pub(super) fn fn_hints( if !implicit_unsafe { return None; } - let fn_ = fn_.fn_token()?; - acc.push(item_hint(config, extern_block, fn_)); + let fn_token = fn_.fn_token()?; + if sema.to_def(fn_).is_some_and(|def| def.extern_block(sema.db).is_some()) { + acc.push(item_hint(config, extern_block, fn_token)); + } Some(()) } pub(super) fn static_hints( acc: &mut Vec<InlayHint>, - FamousDefs(_sema, _): &FamousDefs<'_, '_>, + FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, _file_id: EditionedFileId, static_: &ast::Static, @@ -60,8 +63,10 @@ pub(super) fn static_hints( if !implicit_unsafe { return None; } - let static_ = static_.static_token()?; - acc.push(item_hint(config, extern_block, static_)); + let static_token = static_.static_token()?; + if sema.to_def(static_).is_some_and(|def| def.extern_block(sema.db).is_some()) { + acc.push(item_hint(config, extern_block, static_token)); + } Some(()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index 27c7c3d4981..390139d214e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -54,7 +54,8 @@ pub(super) fn hints( }; let range = match terminator.span { MirSpan::ExprId(e) => match source_map.expr_syntax(e) { - Ok(s) => { + // don't show inlay hint for macro + Ok(s) if !s.file_id.is_macro() => { let root = &s.file_syntax(sema.db); let expr = s.value.to_node(root); let expr = expr.syntax(); @@ -69,11 +70,11 @@ pub(super) fn hints( } } } - Err(_) => continue, + _ => continue, }, MirSpan::PatId(p) => match source_map.pat_syntax(p) { - Ok(s) => s.value.text_range(), - Err(_) => continue, + Ok(s) if !s.file_id.is_macro() => s.value.text_range(), + _ => continue, }, MirSpan::BindingId(b) => { match source_map @@ -81,13 +82,13 @@ pub(super) fn hints( .iter() .find_map(|p| source_map.pat_syntax(*p).ok()) { - Some(s) => s.value.text_range(), - None => continue, + Some(s) if !s.file_id.is_macro() => s.value.text_range(), + _ => continue, } } MirSpan::SelfParam => match source_map.self_param_syntax() { - Some(s) => s.value.text_range(), - None => continue, + Some(s) if !s.file_id.is_macro() => s.value.text_range(), + _ => continue, }, MirSpan::Unknown => continue, }; @@ -231,4 +232,25 @@ mod tests { "#, ); } + + #[test] + fn ignore_inlay_hint_for_macro_call() { + check_with_config( + ONLY_DROP_CONFIG, + r#" + struct X; + + macro_rules! my_macro { + () => {{ + let bbb = X; + bbb + }}; + } + + fn test() -> X { + my_macro!() + } +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret.rs b/src/tools/rust-analyzer/crates/ide/src/interpret.rs index e0fdc3dd6f9..ae11072e34b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/interpret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/interpret.rs @@ -7,11 +7,9 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange}; // Feature: Interpret A Function, Static Or Const. // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: Interpret** -// |=== +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Interpret** | pub(crate) fn interpret(db: &RootDatabase, position: FilePosition) -> String { match find_and_interpret(db, position) { Some((duration, mut result)) => { diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs index e4670177ecf..ea18a97070c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs +++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs @@ -21,17 +21,13 @@ pub struct JoinLinesConfig { // // Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces. // -// See -// https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif[this gif] -// for the cases handled specially by joined lines. +// See [this gif](https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif) for the cases handled specially by joined lines. // -// |=== -// | Editor | Action Name +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Join lines** | // -// | VS Code | **rust-analyzer: Join lines** -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[] +//  pub(crate) fn join_lines( config: &JoinLinesConfig, file: &SourceFile, diff --git a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs index 57356152836..67346ea9cf9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs @@ -9,13 +9,11 @@ use syntax::{ // moves cursor to the matching brace. It uses the actual parser to determine // braces, so it won't confuse generics with comparisons. // -// |=== -// | Editor | Action Name +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Find matching brace** | // -// | VS Code | **rust-analyzer: Find matching brace** -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[] +//  pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> { const BRACES: &[SyntaxKind] = &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]]; diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index d97c12ebafb..66ea49a98a0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -289,7 +289,10 @@ fn def_to_non_local_moniker( definition: Definition, from_crate: Crate, ) -> Option<Moniker> { - let module = definition.module(db)?; + let module = match definition { + Definition::Module(module) if module.is_crate_root() => module, + _ => definition.module(db)?, + }; let krate = module.krate(); let edition = krate.edition(db); @@ -322,12 +325,18 @@ fn def_to_non_local_moniker( name: name.display(db, edition).to_string(), desc: def_to_kind(db, def).into(), }); - } else if reverse_description.is_empty() { - // Don't allow the last descriptor to be absent. - return None; } else { match def { - Definition::Module(module) if module.is_crate_root() => {} + Definition::Module(module) if module.is_crate_root() => { + // only include `crate` namespace by itself because we prefer + // `rust-analyzer cargo foo . bar/` over `rust-analyzer cargo foo . crate/bar/` + if reverse_description.is_empty() { + reverse_description.push(MonikerDescriptor { + name: "crate".to_owned(), + desc: MonikerDescriptorKind::Namespace, + }); + } + } _ => { tracing::error!(?def, "Encountered enclosing definition with no name"); } @@ -340,6 +349,9 @@ fn def_to_non_local_moniker( }; def = next_def; } + if reverse_description.is_empty() { + return None; + } reverse_description.reverse(); let description = reverse_description; diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs index b0df9257ba1..3fb3a788b91 100644 --- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs +++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs @@ -17,14 +17,12 @@ pub enum Direction { // // Move item under cursor or selection up and down. // -// |=== -// | Editor | Action Name -// +// | Editor | Action Name | +// |---------|-------------| // | VS Code | **rust-analyzer: Move item up** // | VS Code | **rust-analyzer: Move item down** -// |=== // -// image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[] +//  pub(crate) fn move_item( db: &RootDatabase, range: FileRange, diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs index 7a0c28d925a..6d82f9b0634 100644 --- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs +++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs @@ -15,13 +15,11 @@ use crate::NavigationTarget; // // Navigates to the parent module of the current module. // -// |=== -// | Editor | Action Name +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Locate parent module** | // -// | VS Code | **rust-analyzer: Locate parent module** -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[] +//  /// This returns `Vec` because a module may be included from several places. pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index b1079312d3b..069818d50e7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -43,13 +43,11 @@ pub struct Declaration { // // Shows all references of the item at the cursor location // -// |=== -// | Editor | Shortcut +// | Editor | Shortcut | +// |---------|----------| +// | VS Code | <kbd>Shift+Alt+F12</kbd> | // -// | VS Code | kbd:[Shift+Alt+F12] -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif[] +//  pub(crate) fn find_all_refs( sema: &Semantics<'_, RootDatabase>, position: FilePosition, diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index 07dfd83c4eb..3e8295e3f08 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -71,13 +71,11 @@ pub(crate) fn prepare_rename( // // Renames the item below the cursor and all of its references // -// |=== -// | Editor | Shortcut +// | Editor | Shortcut | +// |---------|----------| +// | VS Code | <kbd>F2</kbd> | // -// | VS Code | kbd:[F2] -// |=== -// -// image::https://user-images.githubusercontent.com/48062697/113065582-055aae80-91b1-11eb-8ade-2b58e6d81883.gif[] +//  pub(crate) fn rename( db: &RootDatabase, position: FilePosition, @@ -2003,13 +2001,11 @@ impl Foo { "foo", r#" fn f($0self) -> i32 { - use self as _; self.i } "#, r#" fn f(foo: _) -> i32 { - use self as _; foo.i } "#, @@ -2017,6 +2013,26 @@ fn f(foo: _) -> i32 { } #[test] + fn no_type_value_ns_confuse() { + // Test that we don't rename items from different namespaces. + check( + "bar", + r#" +struct foo {} +fn f(foo$0: i32) -> i32 { + use foo as _; +} +"#, + r#" +struct foo {} +fn f(bar: i32) -> i32 { + use foo as _; +} +"#, + ); + } + + #[test] fn test_self_in_path_to_parameter() { check( "foo", diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 32edacee51c..509ae3204c3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -4,8 +4,8 @@ use arrayvec::ArrayVec; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; use hir::{ - db::HirDatabase, sym, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, HirFileIdExt, - ModPath, Name, PathKind, Semantics, Symbol, + db::HirDatabase, sym, symbols::FxIndexSet, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, + HasSource, HirFileIdExt, ModPath, Name, PathKind, Semantics, Symbol, }; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::{ @@ -13,7 +13,7 @@ use ide_db::{ documentation::docs_from_attrs, helpers::visit_file_defs, search::{FileReferenceNode, SearchScope}, - FilePosition, FxHashMap, FxHashSet, RootDatabase, SymbolKind, + FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind, }; use itertools::Itertools; use smallvec::SmallVec; @@ -61,8 +61,8 @@ pub enum RunnableKind { #[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] enum RunnableDiscKind { - Test, TestMod, + Test, DocTest, Bench, Bin, @@ -119,19 +119,18 @@ impl Runnable { // location**. Super useful for repeatedly running just a single test. Do bind this // to a shortcut! // -// |=== -// | Editor | Action Name +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Run** | // -// | VS Code | **rust-analyzer: Run** -// |=== -// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[] +//  pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { let sema = Semantics::new(db); let mut res = Vec::new(); // Record all runnables that come from macro expansions here instead. // In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables. - let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default(); + let mut in_macro_expansion = FxIndexMap::<hir::HirFileId, Vec<Runnable>>::default(); let mut add_opt = |runnable: Option<Runnable>, def| { if let Some(runnable) = runnable.filter(|runnable| runnable.nav.file_id == file_id) { if let Some(def) = def { @@ -183,20 +182,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { r }) })); - res.sort_by(|Runnable { nav, kind, .. }, Runnable { nav: nav_b, kind: kind_b, .. }| { - // full_range.start < focus_range.start < name, should give us a decent unique ordering - nav.full_range - .start() - .cmp(&nav_b.full_range.start()) - .then_with(|| { - let t_0 = || TextSize::from(0); - nav.focus_range - .map_or_else(t_0, |it| it.start()) - .cmp(&nav_b.focus_range.map_or_else(t_0, |it| it.start())) - }) - .then_with(|| kind.disc().cmp(&kind_b.disc())) - .then_with(|| nav.name.cmp(&nav_b.name)) - }); + res.sort_by(cmp_runnables); res } @@ -207,23 +193,39 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { // The simplest way to use this feature is via the context menu. Right-click on // the selected item. The context menu opens. Select **Peek Related Tests**. // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: Peek Related Tests** -// |=== +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Peek Related Tests** | pub(crate) fn related_tests( db: &RootDatabase, position: FilePosition, search_scope: Option<SearchScope>, ) -> Vec<Runnable> { let sema = Semantics::new(db); - let mut res: FxHashSet<Runnable> = FxHashSet::default(); + let mut res: FxIndexSet<Runnable> = FxIndexSet::default(); let syntax = sema.parse_guess_edition(position.file_id).syntax().clone(); find_related_tests(&sema, &syntax, position, search_scope, &mut res); - res.into_iter().collect() + res.into_iter().sorted_by(cmp_runnables).collect() +} + +fn cmp_runnables( + Runnable { nav, kind, .. }: &Runnable, + Runnable { nav: nav_b, kind: kind_b, .. }: &Runnable, +) -> std::cmp::Ordering { + // full_range.start < focus_range.start < name, should give us a decent unique ordering + nav.full_range + .start() + .cmp(&nav_b.full_range.start()) + .then_with(|| { + let t_0 = || TextSize::from(0); + nav.focus_range + .map_or_else(t_0, |it| it.start()) + .cmp(&nav_b.focus_range.map_or_else(t_0, |it| it.start())) + }) + .then_with(|| kind.disc().cmp(&kind_b.disc())) + .then_with(|| nav.name.cmp(&nav_b.name)) } fn find_related_tests( @@ -231,7 +233,7 @@ fn find_related_tests( syntax: &SyntaxNode, position: FilePosition, search_scope: Option<SearchScope>, - tests: &mut FxHashSet<Runnable>, + tests: &mut FxIndexSet<Runnable>, ) { // FIXME: why is this using references::find_defs, this should use ide_db::search let defs = match references::find_defs(sema, syntax, position.offset) { @@ -271,7 +273,7 @@ fn find_related_tests_in_module( syntax: &SyntaxNode, fn_def: &ast::Fn, parent_module: &hir::Module, - tests: &mut FxHashSet<Runnable>, + tests: &mut FxIndexSet<Runnable>, ) { let fn_name = match fn_def.name() { Some(it) => it, @@ -1231,8 +1233,8 @@ gen_main!(); "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..315, name: \"\", kind: Module })", "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 267..292, focus_range: 271..276, name: \"tests\", kind: Module, description: \"mod tests\" })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 283..290, name: \"foo_test\", kind: Function })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)", "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)", "(Bin, NavigationTarget { file_id: FileId(0), full_range: 302..314, name: \"main\", kind: Function })", ] "#]], @@ -1261,10 +1263,10 @@ foo!(); "#, expect![[r#" [ + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)", "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)", "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)", "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)", ] "#]], ); @@ -1504,18 +1506,18 @@ mod tests { file_id: FileId( 0, ), - full_range: 121..185, - focus_range: 136..145, - name: "foo2_test", + full_range: 52..115, + focus_range: 67..75, + name: "foo_test", kind: Function, }, NavigationTarget { file_id: FileId( 0, ), - full_range: 52..115, - focus_range: 67..75, - name: "foo_test", + full_range: 121..185, + focus_range: 136..145, + name: "foo2_test", kind: Function, }, ] diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index f8c60418eb0..f9972116004 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -321,7 +321,9 @@ fn signature_help_for_generics( format_to!(res.signature, "type {}", it.name(db).display(db, edition)); } // These don't have generic args that can be specified - hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None, + hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) | hir::GenericDef::Static(_) => { + return None + } } let params = generics_def.params(sema.db); diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 8050a38b3ca..07553a87d28 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -154,6 +154,7 @@ impl StaticIndex<'_> { implicit_drop_hints: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, + hide_closure_parameter_hints: false, closure_style: hir::ClosureStyle::ImplFn, param_names_for_lifetime_elision_hints: false, binding_mode_hints: false, @@ -169,10 +170,10 @@ impl StaticIndex<'_> { .unwrap(); // hovers let sema = hir::Semantics::new(self.db); - let tokens_or_nodes = sema.parse_guess_edition(file_id).syntax().clone(); + let root = sema.parse_guess_edition(file_id).syntax().clone(); let edition = sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); - let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it { + let tokens = root.descendants_with_tokens().filter_map(|it| match it { syntax::NodeOrToken::Node(_) => None, syntax::NodeOrToken::Token(it) => Some(it), }); @@ -194,24 +195,19 @@ impl StaticIndex<'_> { ) }); let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] }; - for token in tokens { - let range = token.text_range(); - let node = token.parent().unwrap(); - let def = match get_definition(&sema, token.clone()) { - Some(it) => it, - None => continue, - }; + + let mut add_token = |def: Definition, range: TextRange, scope_node: &SyntaxNode| { let id = if let Some(it) = self.def_map.get(&def) { *it } else { let it = self.tokens.insert(TokenStaticData { - documentation: documentation_for_definition(&sema, def, &node), + documentation: documentation_for_definition(&sema, def, scope_node), hover: Some(hover_for_definition( &sema, file_id, def, None, - &node, + scope_node, None, false, &hover_config, @@ -240,6 +236,22 @@ impl StaticIndex<'_> { }, }); result.tokens.push((range, id)); + }; + + if let Some(module) = sema.file_to_module_def(file_id) { + let def = Definition::Module(module); + let range = root.text_range(); + add_token(def, range, &root); + } + + for token in tokens { + let range = token.text_range(); + let node = token.parent().unwrap(); + let def = match get_definition(&sema, token.clone()) { + Some(it) => it, + None => continue, + }; + add_token(def, range, &node); } self.files.push(result); } @@ -300,6 +312,10 @@ mod tests { let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for f in s.files { for (range, _) in f.tokens { + if range.start() == TextSize::from(0) { + // ignore whole file range corresponding to module definition + continue; + } let it = FileRange { file_id: f.file_id, range }; if !range_set.contains(&it) { panic!("additional range {it:?}"); diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index 9e823daa2be..b0022cfac76 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -29,12 +29,11 @@ use triomphe::Arc; // // Shows internal statistic about memory usage of rust-analyzer. // -// |=== -// | Editor | Action Name +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Status** | // -// | VS Code | **rust-analyzer: Status** -// |=== -// image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[] +//  pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String { let mut buf = String::new(); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index f53f0aec098..1853e3a3407 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -76,113 +76,118 @@ pub struct HighlightConfig { // We also give special modifier for `mut` and `&mut` local variables. // // -// .Token Tags +// #### Token Tags // // Rust-analyzer currently emits the following token tags: // // - For items: -// + -// [horizontal] -// attribute:: Emitted for attribute macros. -// enum:: Emitted for enums. -// function:: Emitted for free-standing functions. -// derive:: Emitted for derive macros. -// macro:: Emitted for function-like macros. -// method:: Emitted for associated functions, also knowns as methods. -// namespace:: Emitted for modules. -// struct:: Emitted for structs. -// trait:: Emitted for traits. -// typeAlias:: Emitted for type aliases and `Self` in `impl`s. -// union:: Emitted for unions. +// +// | | | +// |-----------|--------------------------------| +// | attribute | Emitted for attribute macros. | +// |enum| Emitted for enums. | +// |function| Emitted for free-standing functions. | +// |derive| Emitted for derive macros. | +// |macro| Emitted for function-like macros. | +// |method| Emitted for associated functions, also knowns as methods. | +// |namespace| Emitted for modules. | +// |struct| Emitted for structs.| +// |trait| Emitted for traits.| +// |typeAlias| Emitted for type aliases and `Self` in `impl`s.| +// |union| Emitted for unions.| // // - For literals: -// + -// [horizontal] -// boolean:: Emitted for the boolean literals `true` and `false`. -// character:: Emitted for character literals. -// number:: Emitted for numeric literals. -// string:: Emitted for string literals. -// escapeSequence:: Emitted for escaped sequences inside strings like `\n`. -// formatSpecifier:: Emitted for format specifiers `{:?}` in `format!`-like macros. +// +// | | | +// |-----------|--------------------------------| +// | boolean| Emitted for the boolean literals `true` and `false`.| +// | character| Emitted for character literals.| +// | number| Emitted for numeric literals.| +// | string| Emitted for string literals.| +// | escapeSequence| Emitted for escaped sequences inside strings like `\n`.| +// | formatSpecifier| Emitted for format specifiers `{:?}` in `format!`-like macros.| // // - For operators: -// + -// [horizontal] -// operator:: Emitted for general operators. -// arithmetic:: Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`. -// bitwise:: Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`. -// comparison:: Emitted for the comparison operators `>`, `<`, `==`, `>=`, `<=`, `!=`. -// logical:: Emitted for the logical operators `||`, `&&`, `!`. +// +// | | | +// |-----------|--------------------------------| +// |operator| Emitted for general operators.| +// |arithmetic| Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.| +// |bitwise| Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.| +// |comparison| Emitted for the comparison oerators `>`, `<`, `==`, `>=`, `<=`, `!=`.| +// |logical| Emitted for the logical operatos `||`, `&&`, `!`.| // // - For punctuation: -// + -// [horizontal] -// punctuation:: Emitted for general punctuation. -// attributeBracket:: Emitted for attribute invocation brackets, that is the `#[` and `]` tokens. -// angle:: Emitted for `<>` angle brackets. -// brace:: Emitted for `{}` braces. -// bracket:: Emitted for `[]` brackets. -// parenthesis:: Emitted for `()` parentheses. -// colon:: Emitted for the `:` token. -// comma:: Emitted for the `,` token. -// dot:: Emitted for the `.` token. -// semi:: Emitted for the `;` token. -// macroBang:: Emitted for the `!` token in macro calls. // -// //- +// | | | +// |-----------|--------------------------------| +// |punctuation| Emitted for general punctuation.| +// |attributeBracket| Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.| +// |angle| Emitted for `<>` angle brackets.| +// |brace| Emitted for `{}` braces.| +// |bracket| Emitted for `[]` brackets.| +// |parenthesis| Emitted for `()` parentheses.| +// |colon| Emitted for the `:` token.| +// |comma| Emitted for the `,` token.| +// |dot| Emitted for the `.` token.| +// |semi| Emitted for the `;` token.| +// |macroBang| Emitted for the `!` token in macro calls.| // -// [horizontal] -// builtinAttribute:: Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example. -// builtinType:: Emitted for builtin types like `u32`, `str` and `f32`. -// comment:: Emitted for comments. -// constParameter:: Emitted for const parameters. -// deriveHelper:: Emitted for derive helper attributes. -// enumMember:: Emitted for enum variants. -// generic:: Emitted for generic tokens that have no mapping. -// keyword:: Emitted for keywords. -// label:: Emitted for labels. -// lifetime:: Emitted for lifetimes. -// parameter:: Emitted for non-self function parameters. -// property:: Emitted for struct and union fields. -// selfKeyword:: Emitted for the self function parameter and self path-specifier. -// selfTypeKeyword:: Emitted for the Self type parameter. -// toolModule:: Emitted for tool modules. -// typeParameter:: Emitted for type parameters. -// unresolvedReference:: Emitted for unresolved references, names that rust-analyzer can't find the definition of. -// variable:: Emitted for locals, constants and statics. +//- // +// | | | +// |-----------|--------------------------------| +// |builtinAttribute| Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.| +// |builtinType| Emitted for builtin types like `u32`, `str` and `f32`.| +// |comment| Emitted for comments.| +// |constParameter| Emitted for const parameters.| +// |deriveHelper| Emitted for derive helper attributes.| +// |enumMember| Emitted for enum variants.| +// |generic| Emitted for generic tokens that have no mapping.| +// |keyword| Emitted for keywords.| +// |label| Emitted for labels.| +// |lifetime| Emitted for lifetimes.| +// |parameter| Emitted for non-self function parameters.| +// |property| Emitted for struct and union fields.| +// |selfKeyword| Emitted for the self function parameter and self path-specifier.| +// |selfTypeKeyword| Emitted for the Self type parameter.| +// |toolModule| Emitted for tool modules.| +// |typeParameter| Emitted for type parameters.| +// |unresolvedReference| Emitted for unresolved references, names that rust-analyzer can't find the definition of.| +// |variable| Emitted for locals, constants and statics.| // -// .Token Modifiers +// +// #### Token Modifiers // // Token modifiers allow to style some elements in the source code more precisely. // // Rust-analyzer currently emits the following token modifiers: // -// [horizontal] -// async:: Emitted for async functions and the `async` and `await` keywords. -// attribute:: Emitted for tokens inside attributes. -// callable:: Emitted for locals whose types implements one of the `Fn*` traits. -// constant:: Emitted for consts. -// consuming:: Emitted for locals that are being consumed when use in a function call. -// controlFlow:: Emitted for control-flow related tokens, this includes the `?` operator. -// crateRoot:: Emitted for crate names, like `serde` and `crate`. -// declaration:: Emitted for names of definitions, like `foo` in `fn foo() {}`. -// defaultLibrary:: Emitted for items from built-in crates (std, core, alloc, test and proc_macro). -// documentation:: Emitted for documentation comments. -// injected:: Emitted for doc-string injected highlighting like rust source blocks in documentation. -// intraDocLink:: Emitted for intra doc links in doc-strings. -// library:: Emitted for items that are defined outside of the current crate. -// macro:: Emitted for tokens inside macro calls. -// mutable:: Emitted for mutable locals and statics as well as functions taking `&mut self`. -// public:: Emitted for items that are from the current crate and are `pub`. -// reference:: Emitted for locals behind a reference and functions taking `self` by reference. -// static:: Emitted for "static" functions, also known as functions that do not take a `self` param, as well as statics and consts. -// trait:: Emitted for associated trait items. -// unsafe:: Emitted for unsafe operations, like unsafe function calls, as well as the `unsafe` token. -// +// | | | +// |-----------|--------------------------------| +// |async| Emitted for async functions and the `async` and `await` keywords.| +// |attribute| Emitted for tokens inside attributes.| +// |callable| Emitted for locals whose types implements one of the `Fn*` traits.| +// |constant| Emitted for const.| +// |consuming| Emitted for locals that are being consumed when use in a function call.| +// |controlFlow| Emitted for control-flow related tokens, this includes th `?` operator.| +// |crateRoot| Emitted for crate names, like `serde` and `crate.| +// |declaration| Emitted for names of definitions, like `foo` in `fn foo(){}`.| +// |defaultLibrary| Emitted for items from built-in crates (std, core, allc, test and proc_macro).| +// |documentation| Emitted for documentation comment.| +// |injected| Emitted for doc-string injected highlighting like rust source blocks in documentation.| +// |intraDocLink| Emitted for intra doc links in doc-string.| +// |library| Emitted for items that are defined outside of the current crae.| +// |macro| Emitted for tokens inside macro call.| +// |mutable| Emitted for mutable locals and statics as well as functions taking `&mut self`.| +// |public| Emitted for items that are from the current crate and are `pub.| +// |reference| Emitted for locals behind a reference and functions taking self` by reference.| +// |static| Emitted for "static" functions, also known as functions that d not take a `self` param, as well as statics and consts.| +// |trait| Emitted for associated trait item.| +// |unsafe| Emitted for unsafe operations, like unsafe function calls, as ell as the `unsafe` token.| // -// image::https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png[] -// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[] +//  +//  pub(crate) fn highlight( db: &RootDatabase, config: HighlightConfig, @@ -478,7 +483,15 @@ fn traverse( { continue; } - highlight_format_string(hl, sema, krate, &string, &expanded_string, range); + highlight_format_string( + hl, + sema, + krate, + &string, + &expanded_string, + range, + file_id.edition(), + ); if !string.is_raw() { highlight_escape_string(hl, &string, range.start()); @@ -526,6 +539,7 @@ fn traverse( &mut bindings_shadow_count, config.syntactic_name_ref_highlighting, name_like, + file_id.edition(), ), NodeOrToken::Token(token) => { highlight::token(sema, token, file_id.edition()).zip(Some(None)) diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs index 7234108701a..43a6bdad7e9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs @@ -4,6 +4,7 @@ use ide_db::{ syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier}, SymbolKind, }; +use span::Edition; use syntax::{ast, TextRange}; use crate::{ @@ -18,6 +19,7 @@ pub(super) fn highlight_format_string( string: &ast::String, expanded_string: &ast::String, range: TextRange, + edition: Edition, ) { if is_format_string(expanded_string) { // FIXME: Replace this with the HIR info we have now. @@ -39,7 +41,7 @@ pub(super) fn highlight_format_string( if let Some(res) = res { stack.add(HlRange { range, - highlight: highlight_def(sema, krate, Definition::from(res)), + highlight: highlight_def(sema, krate, Definition::from(res), edition), binding_hash: None, }) } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 22a2fe4e9eb..194fde11601 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -58,6 +58,7 @@ pub(super) fn name_like( bindings_shadow_count: &mut FxHashMap<hir::Name, u32>, syntactic_name_ref_highlighting: bool, name_like: ast::NameLike, + edition: Edition, ) -> Option<(Highlight, Option<u64>)> { let mut binding_hash = None; let highlight = match name_like { @@ -68,16 +69,17 @@ pub(super) fn name_like( &mut binding_hash, syntactic_name_ref_highlighting, name_ref, + edition, ), ast::NameLike::Name(name) => { - highlight_name(sema, bindings_shadow_count, &mut binding_hash, krate, name) + highlight_name(sema, bindings_shadow_count, &mut binding_hash, krate, name, edition) } ast::NameLike::Lifetime(lifetime) => match IdentClass::classify_lifetime(sema, &lifetime) { Some(IdentClass::NameClass(NameClass::Definition(def))) => { - highlight_def(sema, krate, def) | HlMod::Definition + highlight_def(sema, krate, def, edition) | HlMod::Definition } Some(IdentClass::NameRefClass(NameRefClass::Definition(def, _))) => { - highlight_def(sema, krate, def) + highlight_def(sema, krate, def, edition) } // FIXME: Fallback for 'static and '_, as we do not resolve these yet _ => SymbolKind::LifetimeParam.into(), @@ -234,16 +236,17 @@ fn highlight_name_ref( binding_hash: &mut Option<u64>, syntactic_name_ref_highlighting: bool, name_ref: ast::NameRef, + edition: Edition, ) -> Highlight { let db = sema.db; - if let Some(res) = highlight_method_call_by_name_ref(sema, krate, &name_ref) { + if let Some(res) = highlight_method_call_by_name_ref(sema, krate, &name_ref, edition) { return res; } let name_class = match NameRefClass::classify(sema, &name_ref) { Some(name_kind) => name_kind, None if syntactic_name_ref_highlighting => { - return highlight_name_ref_by_syntax(name_ref, sema, krate) + return highlight_name_ref_by_syntax(name_ref, sema, krate, edition) } // FIXME: This is required for helper attributes used by proc-macros, as those do not map down // to anything when used. @@ -267,7 +270,7 @@ fn highlight_name_ref( *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) }; - let mut h = highlight_def(sema, krate, def); + let mut h = highlight_def(sema, krate, def, edition); match def { Definition::Local(local) if is_consumed_lvalue(name_ref.syntax(), &local, db) => { @@ -305,7 +308,7 @@ fn highlight_name_ref( h } NameRefClass::FieldShorthand { field_ref, .. } => { - highlight_def(sema, krate, field_ref.into()) + highlight_def(sema, krate, field_ref.into(), edition) } NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => { let mut h = HlTag::Symbol(SymbolKind::Module).into(); @@ -341,6 +344,7 @@ fn highlight_name( binding_hash: &mut Option<u64>, krate: hir::Crate, name: ast::Name, + edition: Edition, ) -> Highlight { let name_kind = NameClass::classify(sema, &name); if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind { @@ -351,7 +355,7 @@ fn highlight_name( }; match name_kind { Some(NameClass::Definition(def)) => { - let mut h = highlight_def(sema, krate, def) | HlMod::Definition; + let mut h = highlight_def(sema, krate, def, edition) | HlMod::Definition; if let Definition::Trait(trait_) = &def { if trait_.is_unsafe(sema.db) { h |= HlMod::Unsafe; @@ -359,7 +363,7 @@ fn highlight_name( } h } - Some(NameClass::ConstReference(def)) => highlight_def(sema, krate, def), + Some(NameClass::ConstReference(def)) => highlight_def(sema, krate, def, edition), Some(NameClass::PatFieldShorthand { field_ref, .. }) => { let mut h = HlTag::Symbol(SymbolKind::Field).into(); if let hir::VariantDef::Union(_) = field_ref.parent_def(sema.db) { @@ -379,6 +383,7 @@ pub(super) fn highlight_def( sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, def: Definition, + edition: Edition, ) -> Highlight { let db = sema.db; let mut h = match def { @@ -427,7 +432,12 @@ pub(super) fn highlight_def( } } - if func.is_unsafe_to_call(db) { + // FIXME: Passing `None` here means not-unsafe functions with `#[target_feature]` will be + // highlighted as unsafe, even when the current target features set is a superset (RFC 2396). + // We probably should consider checking the current function, but I found no easy way to do + // that (also I'm worried about perf). There's also an instance below. + // FIXME: This should be the edition of the call. + if func.is_unsafe_to_call(db, None, edition) { h |= HlMod::Unsafe; } if func.is_async(db) { @@ -575,21 +585,23 @@ fn highlight_method_call_by_name_ref( sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, name_ref: &ast::NameRef, + edition: Edition, ) -> Option<Highlight> { let mc = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?; - highlight_method_call(sema, krate, &mc) + highlight_method_call(sema, krate, &mc, edition) } fn highlight_method_call( sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, method_call: &ast::MethodCallExpr, + edition: Edition, ) -> Option<Highlight> { let func = sema.resolve_method_call(method_call)?; let mut h = SymbolKind::Method.into(); - if func.is_unsafe_to_call(sema.db) || sema.is_unsafe_method_call(method_call) { + if func.is_unsafe_to_call(sema.db, None, edition) || sema.is_unsafe_method_call(method_call) { h |= HlMod::Unsafe; } if func.is_async(sema.db) { @@ -665,6 +677,12 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight { STATIC => SymbolKind::Static, IDENT_PAT => SymbolKind::Local, FORMAT_ARGS_ARG => SymbolKind::Local, + RENAME => SymbolKind::Local, + MACRO_RULES => SymbolKind::Macro, + CONST_PARAM => SymbolKind::ConstParam, + SELF_PARAM => SymbolKind::SelfParam, + TRAIT_ALIAS => SymbolKind::TraitAlias, + ASM_OPERAND_NAMED => SymbolKind::Local, _ => return default.into(), }; @@ -675,6 +693,7 @@ fn highlight_name_ref_by_syntax( name: ast::NameRef, sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, + edition: Edition, ) -> Highlight { let default = HlTag::UnresolvedReference; @@ -684,8 +703,9 @@ fn highlight_name_ref_by_syntax( }; match parent.kind() { + EXTERN_CRATE => HlTag::Symbol(SymbolKind::Module) | HlMod::CrateRoot, METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent) - .and_then(|it| highlight_method_call(sema, krate, &it)) + .and_then(|it| highlight_method_call(sema, krate, &it, edition)) .unwrap_or_else(|| SymbolKind::Method.into()), FIELD_EXPR => { let h = HlTag::Symbol(SymbolKind::Field); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index 5ff96ae2a74..eb77c14c2a5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html @@ -50,6 +50,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd <span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span> <span class="comment documentation">//! ```</span> +<span class="comment documentation">//! ```rust</span> +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span> +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">std</span><span class="semicolon injected">;</span> +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span> +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">alloc</span><span class="semicolon injected">;</span> +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">proc_macro</span><span class="semicolon injected">;</span> +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">test</span><span class="semicolon injected">;</span> +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">Krate</span><span class="semicolon injected">;</span> +<span class="comment documentation">//! ```</span> <span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span> <span class="comment documentation">/// ```</span> diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html index 9be7c92fc79..9477d0d1b87 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -48,17 +48,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd <pre><code><span class="keyword">use</span> <span class="module">inner</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="module declaration">inner_mod</span><span class="brace">}</span><span class="semicolon">;</span> <span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span><span class="brace">}</span> -<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">ops</span> <span class="brace">{</span> - <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_once"</span><span class="attribute_bracket attribute">]</span> - <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnOnce</span><span class="angle"><</span><span class="type_param declaration">Args</span><span class="angle">></span> <span class="brace">{</span><span class="brace">}</span> - - <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_mut"</span><span class="attribute_bracket attribute">]</span> - <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnMut</span><span class="angle"><</span><span class="type_param declaration">Args</span><span class="angle">></span><span class="colon">:</span> <span class="trait public">FnOnce</span><span class="angle"><</span><span class="type_param">Args</span><span class="angle">></span> <span class="brace">{</span><span class="brace">}</span> - - <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn"</span><span class="attribute_bracket attribute">]</span> - <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">Fn</span><span class="angle"><</span><span class="type_param declaration">Args</span><span class="angle">></span><span class="colon">:</span> <span class="trait public">FnMut</span><span class="angle"><</span><span class="type_param">Args</span><span class="angle">></span> <span class="brace">{</span><span class="brace">}</span> -<span class="brace">}</span> - <span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span> <span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span> <span class="brace">}</span> @@ -125,8 +114,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd <span class="const_param const">FOO</span> <span class="brace">}</span> -<span class="keyword">use</span> <span class="module public">ops</span><span class="operator">::</span><span class="trait public">Fn</span><span class="semicolon">;</span> -<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle"><</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait public">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-></span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">></span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span> +<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">ops</span><span class="operator">::</span><span class="trait default_library library">Fn</span><span class="semicolon">;</span> +<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle"><</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait default_library library">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-></span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">></span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span> <span class="value_param callable">f</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">}</span> diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index 0a7e273950d..1794d7dbfe2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -82,6 +82,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd <span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="colon">:</span>literal<span class="parenthesis">)</span> <span class="operator">=</span><span class="operator">></span> <span class="brace">{</span><span class="brace">{</span>stringify<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="semicolon">;</span> format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="brace">}</span><span class="brace">}</span><span class="semicolon">;</span> <span class="brace">}</span> +<span class="keyword">use</span> <span class="unresolved_reference">foo</span><span class="operator">::</span><span class="unresolved_reference">bar</span> <span class="keyword">as</span> <span class="variable declaration">baz</span><span class="semicolon">;</span> +<span class="keyword">trait</span> <span class="trait_alias declaration">Bar</span> <span class="operator">=</span> <span class="unresolved_reference">Baz</span><span class="semicolon">;</span> +<span class="keyword">trait</span> <span class="trait_alias declaration">Foo</span> <span class="operator">=</span> <span class="trait_alias">Bar</span><span class="semicolon">;</span> + <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span> <span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\n</span><span class="char_literal">'</span><span class="semicolon">;</span> <span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\t</span><span class="char_literal">'</span><span class="semicolon">;</span> diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index af52b33de64..3775265f234 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -136,22 +136,11 @@ use self::foo as bar; fn test_highlighting() { check_highlighting( r#" -//- minicore: derive, copy +//- minicore: derive, copy, fn //- /main.rs crate:main deps:foo use inner::{self as inner_mod}; mod inner {} -pub mod ops { - #[lang = "fn_once"] - pub trait FnOnce<Args> {} - - #[lang = "fn_mut"] - pub trait FnMut<Args>: FnOnce<Args> {} - - #[lang = "fn"] - pub trait Fn<Args>: FnMut<Args> {} -} - struct Foo { x: u32, } @@ -218,7 +207,7 @@ fn const_param<const FOO: usize>() -> usize { FOO } -use ops::Fn; +use core::ops::Fn; fn baz<F: Fn() -> ()>(f: F) { f() } @@ -466,6 +455,10 @@ macro_rules! reuse_twice { ($literal:literal) => {{stringify!($literal); format_args!($literal)}}; } +use foo::bar as baz; +trait Bar = Baz; +trait Foo = Bar; + fn main() { let a = '\n'; let a = '\t'; @@ -718,6 +711,15 @@ fn test_highlight_doc_comment() { //! fn test() {} //! ``` +//! ```rust +//! extern crate self; +//! extern crate std; +//! extern crate core; +//! extern crate alloc; +//! extern crate proc_macro; +//! extern crate test; +//! extern crate Krate; +//! ``` mod outline_module; /// ``` @@ -1080,6 +1082,9 @@ pub struct Struct; ); } +// Rainbow highlighting uses a deterministic hash (fxhash) but the hashing does differ +// depending on the pointer width so only runs this on 64-bit targets. +#[cfg(target_pointer_width = "64")] #[test] fn test_rainbow_highlighting() { check_highlighting( diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs index 47d75f1c957..8c9dd051452 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs @@ -51,16 +51,15 @@ struct ExtendedTextEdit { // - typing `{` in a use item adds a closing `}` in the right place // - typing `>` to complete a return type `->` will insert a whitespace after it // -// VS Code:: +// #### VS Code // // Add the following to `settings.json`: -// [source,json] -// ---- +// ```json // "editor.formatOnType": true, -// ---- +// ``` // -// image::https://user-images.githubusercontent.com/48062697/113166163-69758500-923a-11eb-81ee-eb33ec380399.gif[] -// image::https://user-images.githubusercontent.com/48062697/113171066-105c2000-923f-11eb-87ab-f4a263346567.gif[] +//  +//  pub(crate) fn on_char_typed( db: &RootDatabase, position: FilePosition, diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs index e249c38c73d..c6d1c283f4e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs @@ -16,12 +16,12 @@ use ide_db::text_edit::TextEdit; // Feature: On Enter // -// rust-analyzer can override kbd:[Enter] key to make it smarter: +// rust-analyzer can override <kbd>Enter</kbd> key to make it smarter: // -// - kbd:[Enter] inside triple-slash comments automatically inserts `///` -// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//` -// - kbd:[Enter] inside `//!` doc comments automatically inserts `//!` -// - kbd:[Enter] after `{` indents contents and closing `}` of single-line block +// - <kbd>Enter</kbd> inside triple-slash comments automatically inserts `///` +// - <kbd>Enter</kbd> in the middle or after a trailing space in `//` inserts `//` +// - <kbd>Enter</kbd> inside `//!` doc comments automatically inserts `//!` +// - <kbd>Enter</kbd> after `{` indents contents and closing `}` of single-line block // // This action needs to be assigned to shortcut explicitly. // @@ -29,29 +29,27 @@ use ide_db::text_edit::TextEdit; // Similarly, if rust-analyzer crashes or stops responding, `Enter` might not work. // In that case, you can still press `Shift-Enter` to insert a newline. // -// VS Code:: +// #### VS Code // // Add the following to `keybindings.json`: -// [source,json] -// ---- +// ```json // { // "key": "Enter", // "command": "rust-analyzer.onEnter", // "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust" // } -// ---- +// ```` // // When using the Vim plugin: -// [source,json] -// ---- +// ```json // { // "key": "Enter", // "command": "rust-analyzer.onEnter", // "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust && vim.mode == 'Insert'" // } -// ---- +// ```` // -// image::https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif[] +//  pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> { let parse = db.parse(EditionedFileId::current_edition(position.file_id)); let file = parse.tree(); diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs index 9ff099f479e..eb6eb7da1e9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs @@ -12,11 +12,9 @@ use triomphe::Arc; // // Only workspace crates are included, no crates.io dependencies or sysroot crates. // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: View Crate Graph** -// |=== +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: View Crate Graph** | pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> { let crate_graph = db.crate_graph(); let crates_to_render = crate_graph @@ -86,7 +84,8 @@ impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph { } fn node_label(&'a self, n: &CrateId) -> LabelText<'a> { - let name = self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| name); + let name = + self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| name.as_str()); LabelText::LabelStr(name.into()) } } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs index fe532f4cc55..bfdf9d0f337 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs @@ -4,12 +4,11 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode}; // Feature: View Hir // -// |=== -// | Editor | Action Name -// +// | Editor | Action Name | +// |---------|--------------| // | VS Code | **rust-analyzer: View Hir** -// |=== -// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[] +// +//  pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String { body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs index a6352b99d4f..67c241cbb91 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs @@ -6,11 +6,9 @@ use span::EditionedFileId; // // Displays the ItemTree of the currently open file, for debugging. // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: Debug ItemTree** -// |=== +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Debug ItemTree** | pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String { let sema = Semantics::new(db); let file_id = sema diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs index ff74e05e943..edb83bc4eac 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs @@ -75,11 +75,9 @@ impl FieldOrTupleIdx { // // Displays the recursive memory layout of a datatype. // -// |=== -// | Editor | Action Name -// -// | VS Code | **rust-analyzer: View Memory Layout** -// |=== +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: View Memory Layout** | pub(crate) fn view_memory_layout( db: &RootDatabase, position: FilePosition, diff --git a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs index 7a228375d5e..aa4ff64a819 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs @@ -4,11 +4,9 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode}; // Feature: View Mir // -// |=== -// | Editor | Action Name -// +// | Editor | Action Name | +// |---------|-------------| // | VS Code | **rust-analyzer: View Mir** -// |=== pub(crate) fn view_mir(db: &RootDatabase, position: FilePosition) -> String { body_mir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs index 218ee15a7dd..407720864bf 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs @@ -1,41 +1,50 @@ use hir::Semantics; -use ide_db::{FileId, RootDatabase}; -use span::TextRange; +use ide_db::{ + line_index::{LineCol, LineIndex}, + FileId, LineIndexDatabase, RootDatabase, +}; +use span::{TextRange, TextSize}; use stdx::format_to; use syntax::{ ast::{self, IsString}, AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, WalkEvent, }; +use triomphe::Arc; // Feature: Show Syntax Tree // // Shows a tree view with the syntax tree of the current file // -// |=== -// | Editor | Panel Name -// -// | VS Code | **Rust Syntax Tree** -// |=== +// | Editor | Panel Name | +// |---------|-------------| +// | VS Code | **Rust Syntax Tree** | pub(crate) fn view_syntax_tree(db: &RootDatabase, file_id: FileId) -> String { let sema = Semantics::new(db); + let line_index = db.line_index(file_id); let parse = sema.parse_guess_edition(file_id); - syntax_node_to_json(parse.syntax(), None) + + let ctx = SyntaxTreeCtx { line_index, in_string: None }; + + syntax_node_to_json(parse.syntax(), &ctx) } -fn syntax_node_to_json(node: &SyntaxNode, ctx: Option<InStringCtx>) -> String { +fn syntax_node_to_json(node: &SyntaxNode, ctx: &SyntaxTreeCtx) -> String { let mut result = String::new(); for event in node.preorder_with_tokens() { match event { WalkEvent::Enter(it) => { let kind = it.kind(); - let (text_range, inner_range_str) = match &ctx { - Some(ctx) => { + let (text_range, inner_range_str) = match &ctx.in_string { + Some(in_string) => { + let start_pos = TextPosition::new(&ctx.line_index, it.text_range().start()); + let end_pos = TextPosition::new(&ctx.line_index, it.text_range().end()); + let inner_start: u32 = it.text_range().start().into(); - let inner_end: u32 = it.text_range().end().into(); + let inner_end: u32 = it.text_range().start().into(); - let mut true_start = inner_start + ctx.offset; - let mut true_end = inner_end + ctx.offset; - for pos in &ctx.marker_positions { + let mut true_start = inner_start + in_string.offset; + let mut true_end = inner_end + in_string.offset; + for pos in &in_string.marker_positions { if *pos >= inner_end { break; } @@ -48,39 +57,33 @@ fn syntax_node_to_json(node: &SyntaxNode, ctx: Option<InStringCtx>) -> String { let true_range = TextRange::new(true_start.into(), true_end.into()); - ( - true_range, - format!( - r#","istart":{:?},"iend":{:?}"#, - it.text_range().start(), - it.text_range().end() - ), - ) + (true_range, format!(r#","istart":{start_pos},"iend":{end_pos}"#,)) } None => (it.text_range(), "".to_owned()), }; - let start = text_range.start(); - let end = text_range.end(); + + let start = TextPosition::new(&ctx.line_index, text_range.start()); + let end = TextPosition::new(&ctx.line_index, text_range.end()); match it { NodeOrToken::Node(_) => { format_to!( result, - r#"{{"type":"Node","kind":"{kind:?}","start":{start:?},"end":{end:?}{inner_range_str},"children":["# + r#"{{"type":"Node","kind":"{kind:?}","start":{start},"end":{end}{inner_range_str},"children":["# ); } NodeOrToken::Token(token) => { let comma = if token.next_sibling_or_token().is_some() { "," } else { "" }; - match parse_rust_string(token) { + match parse_rust_string(token, ctx) { Some(parsed) => { format_to!( result, - r#"{{"type":"Node","kind":"{kind:?}","start":{start:?},"end":{end:?}{inner_range_str},"children":[{parsed}]}}{comma}"# + r#"{{"type":"Node","kind":"{kind:?}","start":{start},"end":{end}{inner_range_str},"children":[{parsed}]}}{comma}"# ); } None => format_to!( result, - r#"{{"type":"Token","kind":"{kind:?}","start":{start:?},"end":{end:?}{inner_range_str}}}{comma}"# + r#"{{"type":"Token","kind":"{kind:?}","start":{start},"end":{end}{inner_range_str}}}{comma}"# ), } } @@ -99,7 +102,26 @@ fn syntax_node_to_json(node: &SyntaxNode, ctx: Option<InStringCtx>) -> String { result } -fn parse_rust_string(token: SyntaxToken) -> Option<String> { +struct TextPosition { + offset: TextSize, + line: u32, + col: u32, +} + +impl std::fmt::Display for TextPosition { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "[{:?},{},{}]", self.offset, self.line, self.col) + } +} + +impl TextPosition { + pub(crate) fn new(line_index: &LineIndex, offset: TextSize) -> Self { + let LineCol { line, col } = line_index.line_col(offset); + Self { offset, line, col } + } +} + +fn parse_rust_string(token: SyntaxToken, ctx: &SyntaxTreeCtx) -> Option<String> { let string_node = ast::String::cast(token)?; let text = string_node.value().ok()?; @@ -128,13 +150,20 @@ fn parse_rust_string(token: SyntaxToken) -> Option<String> { return None; } - Some(syntax_node_to_json( - node, - Some(InStringCtx { + let ctx = SyntaxTreeCtx { + line_index: ctx.line_index.clone(), + in_string: Some(InStringCtx { offset: string_node.text_range_between_quotes()?.start().into(), marker_positions, }), - )) + }; + + Some(syntax_node_to_json(node, &ctx)) +} + +struct SyntaxTreeCtx { + line_index: Arc<LineIndex>, + in_string: Option<InStringCtx>, } struct InStringCtx { @@ -160,7 +189,7 @@ mod tests { check( r#"fn foo() {}"#, expect![[ - r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":11,"children":[{"type":"Node","kind":"FN","start":0,"end":11,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":6,"children":[{"type":"Token","kind":"IDENT","start":3,"end":6}]},{"type":"Node","kind":"PARAM_LIST","start":6,"end":8,"children":[{"type":"Token","kind":"L_PAREN","start":6,"end":7},{"type":"Token","kind":"R_PAREN","start":7,"end":8}]},{"type":"Token","kind":"WHITESPACE","start":8,"end":9},{"type":"Node","kind":"BLOCK_EXPR","start":9,"end":11,"children":[{"type":"Node","kind":"STMT_LIST","start":9,"end":11,"children":[{"type":"Token","kind":"L_CURLY","start":9,"end":10},{"type":"Token","kind":"R_CURLY","start":10,"end":11}]}]}]}]}"# + r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[11,0,11],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[11,0,11],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[6,0,6],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[6,0,6]}]},{"type":"Node","kind":"PARAM_LIST","start":[6,0,6],"end":[8,0,8],"children":[{"type":"Token","kind":"L_PAREN","start":[6,0,6],"end":[7,0,7]},{"type":"Token","kind":"R_PAREN","start":[7,0,7],"end":[8,0,8]}]},{"type":"Token","kind":"WHITESPACE","start":[8,0,8],"end":[9,0,9]},{"type":"Node","kind":"BLOCK_EXPR","start":[9,0,9],"end":[11,0,11],"children":[{"type":"Node","kind":"STMT_LIST","start":[9,0,9],"end":[11,0,11],"children":[{"type":"Token","kind":"L_CURLY","start":[9,0,9],"end":[10,0,10]},{"type":"Token","kind":"R_CURLY","start":[10,0,10],"end":[11,0,11]}]}]}]}]}"# ]], ); @@ -173,7 +202,7 @@ fn test() { ", ""); }"#, expect![[ - r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":60,"children":[{"type":"Node","kind":"FN","start":0,"end":60,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":60,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":60,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":58,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":57,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":57,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":57,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":52,"children":[{"type":"Node","kind":"SOURCE_FILE","start":25,"end":51,"istart":0,"iend":26,"children":[{"type":"Token","kind":"WHITESPACE","start":25,"end":30,"istart":0,"iend":5},{"type":"Node","kind":"FN","start":30,"end":46,"istart":5,"iend":21,"children":[{"type":"Token","kind":"FN_KW","start":30,"end":32,"istart":5,"iend":7},{"type":"Token","kind":"WHITESPACE","start":32,"end":33,"istart":7,"iend":8},{"type":"Node","kind":"NAME","start":33,"end":36,"istart":8,"iend":11,"children":[{"type":"Token","kind":"IDENT","start":33,"end":36,"istart":8,"iend":11}]},{"type":"Node","kind":"PARAM_LIST","start":36,"end":38,"istart":11,"iend":13,"children":[{"type":"Token","kind":"L_PAREN","start":36,"end":37,"istart":11,"iend":12},{"type":"Token","kind":"R_PAREN","start":37,"end":38,"istart":12,"iend":13}]},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":13,"iend":14},{"type":"Node","kind":"BLOCK_EXPR","start":39,"end":46,"istart":14,"iend":21,"children":[{"type":"Node","kind":"STMT_LIST","start":39,"end":46,"istart":14,"iend":21,"children":[{"type":"Token","kind":"L_CURLY","start":39,"end":40,"istart":14,"iend":15},{"type":"Token","kind":"WHITESPACE","start":40,"end":45,"istart":15,"iend":20},{"type":"Token","kind":"R_CURLY","start":45,"end":46,"istart":20,"iend":21}]}]}]},{"type":"Token","kind":"WHITESPACE","start":46,"end":51,"istart":21,"iend":26}]}]},{"type":"Token","kind":"COMMA","start":52,"end":53},{"type":"Token","kind":"WHITESPACE","start":53,"end":54},{"type":"Token","kind":"STRING","start":54,"end":56},{"type":"Token","kind":"R_PAREN","start":56,"end":57}]}]}]},{"type":"Token","kind":"SEMICOLON","start":57,"end":58}]},{"type":"Token","kind":"WHITESPACE","start":58,"end":59},{"type":"Token","kind":"R_CURLY","start":59,"end":60}]}]}]}]}"# + r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[60,5,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[60,5,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[60,5,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[60,5,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[58,4,11],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[57,4,10],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[57,4,10],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[57,4,10],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[52,4,5],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[26,2,0],"children":[{"type":"Token","kind":"WHITESPACE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[5,0,5]},{"type":"Node","kind":"FN","start":[30,2,4],"end":[30,2,4],"istart":[5,0,5],"iend":[21,1,9],"children":[{"type":"Token","kind":"FN_KW","start":[30,2,4],"end":[30,2,4],"istart":[5,0,5],"iend":[7,0,7]},{"type":"Token","kind":"WHITESPACE","start":[32,2,6],"end":[32,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Node","kind":"NAME","start":[33,2,7],"end":[33,2,7],"istart":[8,0,8],"iend":[11,0,11],"children":[{"type":"Token","kind":"IDENT","start":[33,2,7],"end":[33,2,7],"istart":[8,0,8],"iend":[11,0,11]}]},{"type":"Node","kind":"PARAM_LIST","start":[36,2,10],"end":[36,2,10],"istart":[11,0,11],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_PAREN","start":[36,2,10],"end":[36,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_PAREN","start":[37,2,11],"end":[37,2,11],"istart":[12,1,0],"iend":[13,1,1]}]},{"type":"Token","kind":"WHITESPACE","start":[38,2,12],"end":[38,2,12],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"BLOCK_EXPR","start":[39,2,13],"end":[39,2,13],"istart":[14,1,2],"iend":[21,1,9],"children":[{"type":"Node","kind":"STMT_LIST","start":[39,2,13],"end":[39,2,13],"istart":[14,1,2],"iend":[21,1,9],"children":[{"type":"Token","kind":"L_CURLY","start":[39,2,13],"end":[39,2,13],"istart":[14,1,2],"iend":[15,1,3]},{"type":"Token","kind":"WHITESPACE","start":[40,2,14],"end":[40,2,14],"istart":[15,1,3],"iend":[20,1,8]},{"type":"Token","kind":"R_CURLY","start":[45,3,4],"end":[45,3,4],"istart":[20,1,8],"iend":[21,1,9]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[46,3,5],"end":[46,3,5],"istart":[21,1,9],"iend":[26,2,0]}]}]},{"type":"Token","kind":"COMMA","start":[52,4,5],"end":[53,4,6]},{"type":"Token","kind":"WHITESPACE","start":[53,4,6],"end":[54,4,7]},{"type":"Token","kind":"STRING","start":[54,4,7],"end":[56,4,9]},{"type":"Token","kind":"R_PAREN","start":[56,4,9],"end":[57,4,10]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[57,4,10],"end":[58,4,11]}]},{"type":"Token","kind":"WHITESPACE","start":[58,4,11],"end":[59,5,0]},{"type":"Token","kind":"R_CURLY","start":[59,5,0],"end":[60,5,1]}]}]}]}]}"# ]], ) } @@ -190,7 +219,7 @@ fn bar() { ", ""); }"#, expect![[ - r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":65,"children":[{"type":"Node","kind":"FN","start":0,"end":65,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":65,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":65,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":63,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":62,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":62,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":62,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":57,"children":[{"type":"Node","kind":"SOURCE_FILE","start":25,"end":56,"istart":0,"iend":31,"children":[{"type":"Token","kind":"WHITESPACE","start":25,"end":26,"istart":0,"iend":1},{"type":"Node","kind":"FN","start":26,"end":38,"istart":1,"iend":13,"children":[{"type":"Token","kind":"FN_KW","start":26,"end":28,"istart":1,"iend":3},{"type":"Token","kind":"WHITESPACE","start":28,"end":29,"istart":3,"iend":4},{"type":"Node","kind":"NAME","start":29,"end":32,"istart":4,"iend":7,"children":[{"type":"Token","kind":"IDENT","start":29,"end":32,"istart":4,"iend":7}]},{"type":"Node","kind":"PARAM_LIST","start":32,"end":34,"istart":7,"iend":9,"children":[{"type":"Token","kind":"L_PAREN","start":32,"end":33,"istart":7,"iend":8},{"type":"Token","kind":"R_PAREN","start":33,"end":34,"istart":8,"iend":9}]},{"type":"Token","kind":"WHITESPACE","start":34,"end":35,"istart":9,"iend":10},{"type":"Node","kind":"BLOCK_EXPR","start":35,"end":38,"istart":10,"iend":13,"children":[{"type":"Node","kind":"STMT_LIST","start":35,"end":38,"istart":10,"iend":13,"children":[{"type":"Token","kind":"L_CURLY","start":35,"end":36,"istart":10,"iend":11},{"type":"Token","kind":"WHITESPACE","start":36,"end":37,"istart":11,"iend":12},{"type":"Token","kind":"R_CURLY","start":37,"end":38,"istart":12,"iend":13}]}]}]},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":13,"iend":14},{"type":"Node","kind":"FN","start":39,"end":51,"istart":14,"iend":26,"children":[{"type":"Token","kind":"FN_KW","start":39,"end":41,"istart":14,"iend":16},{"type":"Token","kind":"WHITESPACE","start":41,"end":42,"istart":16,"iend":17},{"type":"Node","kind":"NAME","start":42,"end":45,"istart":17,"iend":20,"children":[{"type":"Token","kind":"IDENT","start":42,"end":45,"istart":17,"iend":20}]},{"type":"Node","kind":"PARAM_LIST","start":45,"end":47,"istart":20,"iend":22,"children":[{"type":"Token","kind":"L_PAREN","start":45,"end":46,"istart":20,"iend":21},{"type":"Token","kind":"R_PAREN","start":46,"end":47,"istart":21,"iend":22}]},{"type":"Token","kind":"WHITESPACE","start":47,"end":48,"istart":22,"iend":23},{"type":"Node","kind":"BLOCK_EXPR","start":48,"end":51,"istart":23,"iend":26,"children":[{"type":"Node","kind":"STMT_LIST","start":48,"end":51,"istart":23,"iend":26,"children":[{"type":"Token","kind":"L_CURLY","start":48,"end":49,"istart":23,"iend":24},{"type":"Token","kind":"WHITESPACE","start":49,"end":50,"istart":24,"iend":25},{"type":"Token","kind":"R_CURLY","start":50,"end":51,"istart":25,"iend":26}]}]}]},{"type":"Token","kind":"WHITESPACE","start":51,"end":56,"istart":26,"iend":31}]}]},{"type":"Token","kind":"COMMA","start":57,"end":58},{"type":"Token","kind":"WHITESPACE","start":58,"end":59},{"type":"Token","kind":"STRING","start":59,"end":61},{"type":"Token","kind":"R_PAREN","start":61,"end":62}]}]}]},{"type":"Token","kind":"SEMICOLON","start":62,"end":63}]},{"type":"Token","kind":"WHITESPACE","start":63,"end":64},{"type":"Token","kind":"R_CURLY","start":64,"end":65}]}]}]}]}"# + r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[65,7,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[65,7,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[65,7,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[65,7,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[63,6,11],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[62,6,10],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[62,6,10],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[62,6,10],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[57,6,5],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[31,2,5],"children":[{"type":"Token","kind":"WHITESPACE","start":[25,1,13],"end":[25,1,13],"istart":[0,0,0],"iend":[1,0,1]},{"type":"Node","kind":"FN","start":[26,2,0],"end":[26,2,0],"istart":[1,0,1],"iend":[13,1,1],"children":[{"type":"Token","kind":"FN_KW","start":[26,2,0],"end":[26,2,0],"istart":[1,0,1],"iend":[3,0,3]},{"type":"Token","kind":"WHITESPACE","start":[28,2,2],"end":[28,2,2],"istart":[3,0,3],"iend":[4,0,4]},{"type":"Node","kind":"NAME","start":[29,2,3],"end":[29,2,3],"istart":[4,0,4],"iend":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[29,2,3],"end":[29,2,3],"istart":[4,0,4],"iend":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[32,2,6],"end":[32,2,6],"istart":[7,0,7],"iend":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[32,2,6],"end":[32,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[33,2,7],"end":[33,2,7],"istart":[8,0,8],"iend":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[34,2,8],"end":[34,2,8],"istart":[9,0,9],"iend":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[35,2,9],"end":[35,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[35,2,9],"end":[35,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_CURLY","start":[35,2,9],"end":[35,2,9],"istart":[10,0,10],"iend":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[36,2,10],"end":[36,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_CURLY","start":[37,3,0],"end":[37,3,0],"istart":[12,1,0],"iend":[13,1,1]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[38,3,1],"end":[38,3,1],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"FN","start":[39,4,0],"end":[39,4,0],"istart":[14,1,2],"iend":[26,2,0],"children":[{"type":"Token","kind":"FN_KW","start":[39,4,0],"end":[39,4,0],"istart":[14,1,2],"iend":[16,1,4]},{"type":"Token","kind":"WHITESPACE","start":[41,4,2],"end":[41,4,2],"istart":[16,1,4],"iend":[17,1,5]},{"type":"Node","kind":"NAME","start":[42,4,3],"end":[42,4,3],"istart":[17,1,5],"iend":[20,1,8],"children":[{"type":"Token","kind":"IDENT","start":[42,4,3],"end":[42,4,3],"istart":[17,1,5],"iend":[20,1,8]}]},{"type":"Node","kind":"PARAM_LIST","start":[45,4,6],"end":[45,4,6],"istart":[20,1,8],"iend":[22,1,10],"children":[{"type":"Token","kind":"L_PAREN","start":[45,4,6],"end":[45,4,6],"istart":[20,1,8],"iend":[21,1,9]},{"type":"Token","kind":"R_PAREN","start":[46,4,7],"end":[46,4,7],"istart":[21,1,9],"iend":[22,1,10]}]},{"type":"Token","kind":"WHITESPACE","start":[47,4,8],"end":[47,4,8],"istart":[22,1,10],"iend":[23,1,11]},{"type":"Node","kind":"BLOCK_EXPR","start":[48,4,9],"end":[48,4,9],"istart":[23,1,11],"iend":[26,2,0],"children":[{"type":"Node","kind":"STMT_LIST","start":[48,4,9],"end":[48,4,9],"istart":[23,1,11],"iend":[26,2,0],"children":[{"type":"Token","kind":"L_CURLY","start":[48,4,9],"end":[48,4,9],"istart":[23,1,11],"iend":[24,1,12]},{"type":"Token","kind":"WHITESPACE","start":[49,4,10],"end":[49,4,10],"istart":[24,1,12],"iend":[25,1,13]},{"type":"Token","kind":"R_CURLY","start":[50,5,0],"end":[50,5,0],"istart":[25,1,13],"iend":[26,2,0]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[51,5,1],"end":[51,5,1],"istart":[26,2,0],"iend":[31,2,5]}]}]},{"type":"Token","kind":"COMMA","start":[57,6,5],"end":[58,6,6]},{"type":"Token","kind":"WHITESPACE","start":[58,6,6],"end":[59,6,7]},{"type":"Token","kind":"STRING","start":[59,6,7],"end":[61,6,9]},{"type":"Token","kind":"R_PAREN","start":[61,6,9],"end":[62,6,10]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[62,6,10],"end":[63,6,11]}]},{"type":"Token","kind":"WHITESPACE","start":[63,6,11],"end":[64,7,0]},{"type":"Token","kind":"R_CURLY","start":[64,7,0],"end":[65,7,1]}]}]}]}]}"# ]], ); @@ -205,7 +234,7 @@ fn bar() { "#, ""); }"###, expect![[ - r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":68,"children":[{"type":"Node","kind":"FN","start":0,"end":68,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":68,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":68,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":66,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":65,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":65,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":65,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":60,"children":[{"type":"Node","kind":"SOURCE_FILE","start":27,"end":58,"istart":0,"iend":31,"children":[{"type":"Token","kind":"WHITESPACE","start":27,"end":28,"istart":0,"iend":1},{"type":"Node","kind":"FN","start":28,"end":40,"istart":1,"iend":13,"children":[{"type":"Token","kind":"FN_KW","start":28,"end":30,"istart":1,"iend":3},{"type":"Token","kind":"WHITESPACE","start":30,"end":31,"istart":3,"iend":4},{"type":"Node","kind":"NAME","start":31,"end":34,"istart":4,"iend":7,"children":[{"type":"Token","kind":"IDENT","start":31,"end":34,"istart":4,"iend":7}]},{"type":"Node","kind":"PARAM_LIST","start":34,"end":36,"istart":7,"iend":9,"children":[{"type":"Token","kind":"L_PAREN","start":34,"end":35,"istart":7,"iend":8},{"type":"Token","kind":"R_PAREN","start":35,"end":36,"istart":8,"iend":9}]},{"type":"Token","kind":"WHITESPACE","start":36,"end":37,"istart":9,"iend":10},{"type":"Node","kind":"BLOCK_EXPR","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Node","kind":"STMT_LIST","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Token","kind":"L_CURLY","start":37,"end":38,"istart":10,"iend":11},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":11,"iend":12},{"type":"Token","kind":"R_CURLY","start":39,"end":40,"istart":12,"iend":13}]}]}]},{"type":"Token","kind":"WHITESPACE","start":40,"end":41,"istart":13,"iend":14},{"type":"Node","kind":"FN","start":41,"end":53,"istart":14,"iend":26,"children":[{"type":"Token","kind":"FN_KW","start":41,"end":43,"istart":14,"iend":16},{"type":"Token","kind":"WHITESPACE","start":43,"end":44,"istart":16,"iend":17},{"type":"Node","kind":"NAME","start":44,"end":47,"istart":17,"iend":20,"children":[{"type":"Token","kind":"IDENT","start":44,"end":47,"istart":17,"iend":20}]},{"type":"Node","kind":"PARAM_LIST","start":47,"end":49,"istart":20,"iend":22,"children":[{"type":"Token","kind":"L_PAREN","start":47,"end":48,"istart":20,"iend":21},{"type":"Token","kind":"R_PAREN","start":48,"end":49,"istart":21,"iend":22}]},{"type":"Token","kind":"WHITESPACE","start":49,"end":50,"istart":22,"iend":23},{"type":"Node","kind":"BLOCK_EXPR","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Node","kind":"STMT_LIST","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Token","kind":"L_CURLY","start":50,"end":51,"istart":23,"iend":24},{"type":"Token","kind":"WHITESPACE","start":51,"end":52,"istart":24,"iend":25},{"type":"Token","kind":"R_CURLY","start":52,"end":53,"istart":25,"iend":26}]}]}]},{"type":"Token","kind":"WHITESPACE","start":53,"end":58,"istart":26,"iend":31}]}]},{"type":"Token","kind":"COMMA","start":60,"end":61},{"type":"Token","kind":"WHITESPACE","start":61,"end":62},{"type":"Token","kind":"STRING","start":62,"end":64},{"type":"Token","kind":"R_PAREN","start":64,"end":65}]}]}]},{"type":"Token","kind":"SEMICOLON","start":65,"end":66}]},{"type":"Token","kind":"WHITESPACE","start":66,"end":67},{"type":"Token","kind":"R_CURLY","start":67,"end":68}]}]}]}]}"# + r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[68,7,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[68,7,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[68,7,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[68,7,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[66,6,12],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[65,6,11],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[65,6,11],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[65,6,11],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[60,6,6],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[31,2,3],"children":[{"type":"Token","kind":"WHITESPACE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[1,0,1]},{"type":"Node","kind":"FN","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[13,1,1],"children":[{"type":"Token","kind":"FN_KW","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[3,0,3]},{"type":"Token","kind":"WHITESPACE","start":[30,2,2],"end":[30,2,2],"istart":[3,0,3],"iend":[4,0,4]},{"type":"Node","kind":"NAME","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[35,2,7],"end":[35,2,7],"istart":[8,0,8],"iend":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[36,2,8],"end":[36,2,8],"istart":[9,0,9],"iend":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_CURLY","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[38,2,10],"end":[38,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_CURLY","start":[39,3,0],"end":[39,3,0],"istart":[12,1,0],"iend":[13,1,1]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[40,3,1],"end":[40,3,1],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"FN","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[26,1,14],"children":[{"type":"Token","kind":"FN_KW","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[16,1,4]},{"type":"Token","kind":"WHITESPACE","start":[43,4,2],"end":[43,4,2],"istart":[16,1,4],"iend":[17,1,5]},{"type":"Node","kind":"NAME","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8],"children":[{"type":"Token","kind":"IDENT","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8]}]},{"type":"Node","kind":"PARAM_LIST","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[22,1,10],"children":[{"type":"Token","kind":"L_PAREN","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[21,1,9]},{"type":"Token","kind":"R_PAREN","start":[48,4,7],"end":[48,4,7],"istart":[21,1,9],"iend":[22,1,10]}]},{"type":"Token","kind":"WHITESPACE","start":[49,4,8],"end":[49,4,8],"istart":[22,1,10],"iend":[23,1,11]},{"type":"Node","kind":"BLOCK_EXPR","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Node","kind":"STMT_LIST","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Token","kind":"L_CURLY","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[24,1,12]},{"type":"Token","kind":"WHITESPACE","start":[51,4,10],"end":[51,4,10],"istart":[24,1,12],"iend":[25,1,13]},{"type":"Token","kind":"R_CURLY","start":[52,5,0],"end":[52,5,0],"istart":[25,1,13],"iend":[26,1,14]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[53,5,1],"end":[53,5,1],"istart":[26,1,14],"iend":[31,2,3]}]}]},{"type":"Token","kind":"COMMA","start":[60,6,6],"end":[61,6,7]},{"type":"Token","kind":"WHITESPACE","start":[61,6,7],"end":[62,6,8]},{"type":"Token","kind":"STRING","start":[62,6,8],"end":[64,6,10]},{"type":"Token","kind":"R_PAREN","start":[64,6,10],"end":[65,6,11]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[65,6,11],"end":[66,6,12]}]},{"type":"Token","kind":"WHITESPACE","start":[66,6,12],"end":[67,7,0]},{"type":"Token","kind":"R_CURLY","start":[67,7,0],"end":[68,7,1]}]}]}]}]}"# ]], ); @@ -219,7 +248,7 @@ fn bar() { }"$0#, ""); }"###, expect![[ - r#"{"type":"Node","kind":"SOURCE_FILE","start":0,"end":63,"children":[{"type":"Node","kind":"FN","start":0,"end":63,"children":[{"type":"Token","kind":"FN_KW","start":0,"end":2},{"type":"Token","kind":"WHITESPACE","start":2,"end":3},{"type":"Node","kind":"NAME","start":3,"end":7,"children":[{"type":"Token","kind":"IDENT","start":3,"end":7}]},{"type":"Node","kind":"PARAM_LIST","start":7,"end":9,"children":[{"type":"Token","kind":"L_PAREN","start":7,"end":8},{"type":"Token","kind":"R_PAREN","start":8,"end":9}]},{"type":"Token","kind":"WHITESPACE","start":9,"end":10},{"type":"Node","kind":"BLOCK_EXPR","start":10,"end":63,"children":[{"type":"Node","kind":"STMT_LIST","start":10,"end":63,"children":[{"type":"Token","kind":"L_CURLY","start":10,"end":11},{"type":"Token","kind":"WHITESPACE","start":11,"end":16},{"type":"Node","kind":"EXPR_STMT","start":16,"end":61,"children":[{"type":"Node","kind":"MACRO_EXPR","start":16,"end":60,"children":[{"type":"Node","kind":"MACRO_CALL","start":16,"end":60,"children":[{"type":"Node","kind":"PATH","start":16,"end":22,"children":[{"type":"Node","kind":"PATH_SEGMENT","start":16,"end":22,"children":[{"type":"Node","kind":"NAME_REF","start":16,"end":22,"children":[{"type":"Token","kind":"IDENT","start":16,"end":22}]}]}]},{"type":"Token","kind":"BANG","start":22,"end":23},{"type":"Node","kind":"TOKEN_TREE","start":23,"end":60,"children":[{"type":"Token","kind":"L_PAREN","start":23,"end":24},{"type":"Node","kind":"STRING","start":24,"end":55,"children":[{"type":"Node","kind":"SOURCE_FILE","start":27,"end":53,"istart":0,"iend":26,"children":[{"type":"Token","kind":"WHITESPACE","start":27,"end":28,"istart":0,"iend":1},{"type":"Node","kind":"FN","start":28,"end":40,"istart":1,"iend":13,"children":[{"type":"Token","kind":"FN_KW","start":28,"end":30,"istart":1,"iend":3},{"type":"Token","kind":"WHITESPACE","start":30,"end":31,"istart":3,"iend":4},{"type":"Node","kind":"NAME","start":31,"end":34,"istart":4,"iend":7,"children":[{"type":"Token","kind":"IDENT","start":31,"end":34,"istart":4,"iend":7}]},{"type":"Node","kind":"PARAM_LIST","start":34,"end":36,"istart":7,"iend":9,"children":[{"type":"Token","kind":"L_PAREN","start":34,"end":35,"istart":7,"iend":8},{"type":"Token","kind":"R_PAREN","start":35,"end":36,"istart":8,"iend":9}]},{"type":"Token","kind":"WHITESPACE","start":36,"end":37,"istart":9,"iend":10},{"type":"Node","kind":"BLOCK_EXPR","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Node","kind":"STMT_LIST","start":37,"end":40,"istart":10,"iend":13,"children":[{"type":"Token","kind":"L_CURLY","start":37,"end":38,"istart":10,"iend":11},{"type":"Token","kind":"WHITESPACE","start":38,"end":39,"istart":11,"iend":12},{"type":"Token","kind":"R_CURLY","start":39,"end":40,"istart":12,"iend":13}]}]}]},{"type":"Token","kind":"WHITESPACE","start":40,"end":41,"istart":13,"iend":14},{"type":"Node","kind":"FN","start":41,"end":53,"istart":14,"iend":26,"children":[{"type":"Token","kind":"FN_KW","start":41,"end":43,"istart":14,"iend":16},{"type":"Token","kind":"WHITESPACE","start":43,"end":44,"istart":16,"iend":17},{"type":"Node","kind":"NAME","start":44,"end":47,"istart":17,"iend":20,"children":[{"type":"Token","kind":"IDENT","start":44,"end":47,"istart":17,"iend":20}]},{"type":"Node","kind":"PARAM_LIST","start":47,"end":49,"istart":20,"iend":22,"children":[{"type":"Token","kind":"L_PAREN","start":47,"end":48,"istart":20,"iend":21},{"type":"Token","kind":"R_PAREN","start":48,"end":49,"istart":21,"iend":22}]},{"type":"Token","kind":"WHITESPACE","start":49,"end":50,"istart":22,"iend":23},{"type":"Node","kind":"BLOCK_EXPR","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Node","kind":"STMT_LIST","start":50,"end":53,"istart":23,"iend":26,"children":[{"type":"Token","kind":"L_CURLY","start":50,"end":51,"istart":23,"iend":24},{"type":"Token","kind":"WHITESPACE","start":51,"end":52,"istart":24,"iend":25},{"type":"Token","kind":"R_CURLY","start":52,"end":53,"istart":25,"iend":26}]}]}]}]}]},{"type":"Token","kind":"COMMA","start":55,"end":56},{"type":"Token","kind":"WHITESPACE","start":56,"end":57},{"type":"Token","kind":"STRING","start":57,"end":59},{"type":"Token","kind":"R_PAREN","start":59,"end":60}]}]}]},{"type":"Token","kind":"SEMICOLON","start":60,"end":61}]},{"type":"Token","kind":"WHITESPACE","start":61,"end":62},{"type":"Token","kind":"R_CURLY","start":62,"end":63}]}]}]}]}"# + r#"{"type":"Node","kind":"SOURCE_FILE","start":[0,0,0],"end":[63,6,1],"children":[{"type":"Node","kind":"FN","start":[0,0,0],"end":[63,6,1],"children":[{"type":"Token","kind":"FN_KW","start":[0,0,0],"end":[2,0,2]},{"type":"Token","kind":"WHITESPACE","start":[2,0,2],"end":[3,0,3]},{"type":"Node","kind":"NAME","start":[3,0,3],"end":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[3,0,3],"end":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[7,0,7],"end":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[7,0,7],"end":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[8,0,8],"end":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[9,0,9],"end":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[10,0,10],"end":[63,6,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[10,0,10],"end":[63,6,1],"children":[{"type":"Token","kind":"L_CURLY","start":[10,0,10],"end":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[11,0,11],"end":[16,1,4]},{"type":"Node","kind":"EXPR_STMT","start":[16,1,4],"end":[61,5,9],"children":[{"type":"Node","kind":"MACRO_EXPR","start":[16,1,4],"end":[60,5,8],"children":[{"type":"Node","kind":"MACRO_CALL","start":[16,1,4],"end":[60,5,8],"children":[{"type":"Node","kind":"PATH","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"PATH_SEGMENT","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Node","kind":"NAME_REF","start":[16,1,4],"end":[22,1,10],"children":[{"type":"Token","kind":"IDENT","start":[16,1,4],"end":[22,1,10]}]}]}]},{"type":"Token","kind":"BANG","start":[22,1,10],"end":[23,1,11]},{"type":"Node","kind":"TOKEN_TREE","start":[23,1,11],"end":[60,5,8],"children":[{"type":"Token","kind":"L_PAREN","start":[23,1,11],"end":[24,1,12]},{"type":"Node","kind":"STRING","start":[24,1,12],"end":[55,5,3],"children":[{"type":"Node","kind":"SOURCE_FILE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[26,1,14],"children":[{"type":"Token","kind":"WHITESPACE","start":[27,1,15],"end":[27,1,15],"istart":[0,0,0],"iend":[1,0,1]},{"type":"Node","kind":"FN","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[13,1,1],"children":[{"type":"Token","kind":"FN_KW","start":[28,2,0],"end":[28,2,0],"istart":[1,0,1],"iend":[3,0,3]},{"type":"Token","kind":"WHITESPACE","start":[30,2,2],"end":[30,2,2],"istart":[3,0,3],"iend":[4,0,4]},{"type":"Node","kind":"NAME","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7],"children":[{"type":"Token","kind":"IDENT","start":[31,2,3],"end":[31,2,3],"istart":[4,0,4],"iend":[7,0,7]}]},{"type":"Node","kind":"PARAM_LIST","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[9,0,9],"children":[{"type":"Token","kind":"L_PAREN","start":[34,2,6],"end":[34,2,6],"istart":[7,0,7],"iend":[8,0,8]},{"type":"Token","kind":"R_PAREN","start":[35,2,7],"end":[35,2,7],"istart":[8,0,8],"iend":[9,0,9]}]},{"type":"Token","kind":"WHITESPACE","start":[36,2,8],"end":[36,2,8],"istart":[9,0,9],"iend":[10,0,10]},{"type":"Node","kind":"BLOCK_EXPR","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Node","kind":"STMT_LIST","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[13,1,1],"children":[{"type":"Token","kind":"L_CURLY","start":[37,2,9],"end":[37,2,9],"istart":[10,0,10],"iend":[11,0,11]},{"type":"Token","kind":"WHITESPACE","start":[38,2,10],"end":[38,2,10],"istart":[11,0,11],"iend":[12,1,0]},{"type":"Token","kind":"R_CURLY","start":[39,3,0],"end":[39,3,0],"istart":[12,1,0],"iend":[13,1,1]}]}]}]},{"type":"Token","kind":"WHITESPACE","start":[40,3,1],"end":[40,3,1],"istart":[13,1,1],"iend":[14,1,2]},{"type":"Node","kind":"FN","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[26,1,14],"children":[{"type":"Token","kind":"FN_KW","start":[41,4,0],"end":[41,4,0],"istart":[14,1,2],"iend":[16,1,4]},{"type":"Token","kind":"WHITESPACE","start":[43,4,2],"end":[43,4,2],"istart":[16,1,4],"iend":[17,1,5]},{"type":"Node","kind":"NAME","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8],"children":[{"type":"Token","kind":"IDENT","start":[44,4,3],"end":[44,4,3],"istart":[17,1,5],"iend":[20,1,8]}]},{"type":"Node","kind":"PARAM_LIST","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[22,1,10],"children":[{"type":"Token","kind":"L_PAREN","start":[47,4,6],"end":[47,4,6],"istart":[20,1,8],"iend":[21,1,9]},{"type":"Token","kind":"R_PAREN","start":[48,4,7],"end":[48,4,7],"istart":[21,1,9],"iend":[22,1,10]}]},{"type":"Token","kind":"WHITESPACE","start":[49,4,8],"end":[49,4,8],"istart":[22,1,10],"iend":[23,1,11]},{"type":"Node","kind":"BLOCK_EXPR","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Node","kind":"STMT_LIST","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[26,1,14],"children":[{"type":"Token","kind":"L_CURLY","start":[50,4,9],"end":[50,4,9],"istart":[23,1,11],"iend":[24,1,12]},{"type":"Token","kind":"WHITESPACE","start":[51,4,10],"end":[51,4,10],"istart":[24,1,12],"iend":[25,1,13]},{"type":"Token","kind":"R_CURLY","start":[52,5,0],"end":[52,5,0],"istart":[25,1,13],"iend":[26,1,14]}]}]}]}]}]},{"type":"Token","kind":"COMMA","start":[55,5,3],"end":[56,5,4]},{"type":"Token","kind":"WHITESPACE","start":[56,5,4],"end":[57,5,5]},{"type":"Token","kind":"STRING","start":[57,5,5],"end":[59,5,7]},{"type":"Token","kind":"R_PAREN","start":[59,5,7],"end":[60,5,8]}]}]}]},{"type":"Token","kind":"SEMICOLON","start":[60,5,8],"end":[61,5,9]}]},{"type":"Token","kind":"WHITESPACE","start":[61,5,9],"end":[62,6,0]},{"type":"Token","kind":"R_CURLY","start":[62,6,0],"end":[63,6,1]}]}]}]}]}"# ]], ); } diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index ae1c6efe0cb..be0de6c9366 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -13,15 +13,35 @@ use crate::{ macro_rules! define_symbols { (@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => { - // Ideally we would be emitting `const` here, but then we no longer have stable addresses - // which is what we are relying on for equality! In the future if consts can refer to - // statics we should swap these for `const`s and have the string literal being pointed - // to be statics to refer to such that their address is stable. + // We define symbols as both `const`s and `static`s because some const code requires const symbols, + // but code from before the transition relies on the lifetime of the predefined symbols and making them + // `const`s make it error (because now they're temporaries). In the future we probably should only + // use consts. + + /// Predefined symbols as `const`s (instead of the default `static`s). + pub mod consts { + use super::{Symbol, TaggedArcPtr}; + + // The strings should be in `static`s so that symbol equality holds. + $( + pub const $name: Symbol = { + static SYMBOL_STR: &str = stringify!($name); + Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) } + }; + )* + $( + pub const $alias: Symbol = { + static SYMBOL_STR: &str = $value; + Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) } + }; + )* + } + $( - pub static $name: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&stringify!($name)) }; + pub static $name: Symbol = consts::$name; )* $( - pub static $alias: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&$value) }; + pub static $alias: Symbol = consts::$alias; )* @@ -347,6 +367,7 @@ define_symbols! { option, Option, Ord, + Ordering, Output, CallRefFuture, CallOnceFuture, @@ -427,6 +448,7 @@ define_symbols! { rustc_layout_scalar_valid_range_start, rustc_legacy_const_generics, rustc_macro_transparency, + rustc_paren_sugar, rustc_reallocator, rustc_reservation_impl, rustc_safe_intrinsic, @@ -458,6 +480,8 @@ define_symbols! { system, sysv64, Target, + target_feature, + enable, termination, test_case, test, @@ -479,6 +503,7 @@ define_symbols! { u64, u8, unadjusted, + unknown, Unknown, unpin, unreachable_2015, diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 5654c04a592..67ee9d11199 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -94,7 +94,9 @@ pub fn load_workspace( let contents = loader.load_sync(path); let path = vfs::VfsPath::from(path.to_path_buf()); vfs.set_file_contents(path.clone(), contents); - vfs.file_id(&path) + vfs.file_id(&path).and_then(|(file_id, excluded)| { + (excluded == vfs::FileExcluded::No).then_some(file_id) + }) }, extra_env, ); diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs index 389c01933c9..fe1316c9bfd 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs @@ -678,6 +678,8 @@ fn path_expr(p: &mut Parser<'_>, r: Restrictions) -> (CompletedMarker, BlockLike // S { x }; // S { x, y: 32, }; // S { x, y: 32, ..Default::default() }; +// S { x, y: 32, .. }; +// S { .. }; // S { x: ::default() }; // TupleStruct { 0: 1 }; // } @@ -709,6 +711,8 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { // fn main() { // S { field ..S::default() } // S { 0 ..S::default() } + // S { field .. } + // S { 0 .. } // } name_ref_or_index(p); p.error("expected `:`"); @@ -739,7 +743,13 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { // S { .. } = S {}; // } - // We permit `.. }` on the left-hand side of a destructuring assignment. + // test struct_initializer_with_defaults + // fn foo() { + // let _s = S { .. }; + // } + + // We permit `.. }` on the left-hand side of a destructuring assignment + // or defaults values. if !p.at(T!['}']) { expr(p); @@ -750,6 +760,12 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { // S { ..x, a: 0 } // } + // test_err comma_after_default_values_syntax + // fn foo() { + // S { .., }; + // S { .., a: 0 } + // } + // Do not bump, so we can support additional fields after this comma. p.error("cannot use a comma after the base struct"); } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs index 21078175c0e..9a16c9db6da 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs @@ -135,6 +135,11 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) { name(p); p.expect(T![:]); types::type_(p); + // test record_field_default_values + // struct S { f: f32 = 0.0 } + if p.eat(T![=]) { + expressions::expr(p); + } m.complete(p, RECORD_FIELD); } else { m.abandon(p); diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 318f71a2d4d..79900425a17 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -1,4 +1,4 @@ -//! Generated by `cargo codegen grammar`, do not edit by hand. +//! Generated by `cargo xtask codegen grammar`, do not edit by hand. #![allow(bad_style, missing_docs, unreachable_pub)] use crate::Edition; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index b9f87b6af24..c8ea8c547a9 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -482,6 +482,10 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/record_field_attrs.rs"); } #[test] + fn record_field_default_values() { + run_and_expect_no_errors("test_data/parser/inline/ok/record_field_default_values.rs"); + } + #[test] fn record_field_list() { run_and_expect_no_errors("test_data/parser/inline/ok/record_field_list.rs"); } @@ -544,6 +548,10 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rs"); } #[test] + fn struct_initializer_with_defaults() { + run_and_expect_no_errors("test_data/parser/inline/ok/struct_initializer_with_defaults.rs"); + } + #[test] fn struct_item() { run_and_expect_no_errors("test_data/parser/inline/ok/struct_item.rs"); } #[test] fn trait_alias() { run_and_expect_no_errors("test_data/parser/inline/ok/trait_alias.rs"); } @@ -719,6 +727,10 @@ mod err { ); } #[test] + fn comma_after_default_values_syntax() { + run_and_expect_errors("test_data/parser/inline/err/comma_after_default_values_syntax.rs"); + } + #[test] fn crate_visibility_empty_recover() { run_and_expect_errors("test_data/parser/inline/err/crate_visibility_empty_recover.rs"); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast new file mode 100644 index 00000000000..feb617e1aa2 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast @@ -0,0 +1,59 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + DOT2 ".." + ERROR + COMMA "," + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + DOT2 ".." + ERROR + COMMA "," + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "a" + COLON ":" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 21: expected expression +error 36: expected expression +error 37: expected COMMA diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rs new file mode 100644 index 00000000000..f1ecdf89fab --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rs @@ -0,0 +1,4 @@ +fn foo() { + S { .., }; + S { .., a: 0 } +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast index 08ae906421c..12b4e233e30 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rast @@ -44,6 +44,56 @@ SOURCE_FILE WHITESPACE " " R_CURLY "}" WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + INT_NUMBER "0" + WHITESPACE " " + DOT2 ".." + CALL_EXPR + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "default" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + DOT2 ".." + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n " RECORD_EXPR PATH PATH_SEGMENT @@ -58,20 +108,6 @@ SOURCE_FILE INT_NUMBER "0" WHITESPACE " " DOT2 ".." - CALL_EXPR - PATH_EXPR - PATH - PATH - PATH_SEGMENT - NAME_REF - IDENT "S" - COLON2 "::" - PATH_SEGMENT - NAME_REF - IDENT "default" - ARG_LIST - L_PAREN "(" - R_PAREN ")" WHITESPACE " " R_CURLY "}" WHITESPACE "\n" @@ -82,3 +118,9 @@ error 25: expected COMMA error 42: expected SEMICOLON error 52: expected `:` error 52: expected COMMA +error 69: expected SEMICOLON +error 83: expected `:` +error 83: expected COMMA +error 88: expected SEMICOLON +error 98: expected `:` +error 98: expected COMMA diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs index 65398ccb88e..416cd763fdb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/record_literal_before_ellipsis_recovery.rs @@ -1,4 +1,6 @@ fn main() { S { field ..S::default() } S { 0 ..S::default() } + S { field .. } + S { 0 .. } } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rast new file mode 100644 index 00000000000..33088f2cabf --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rast @@ -0,0 +1,28 @@ +SOURCE_FILE + STRUCT + STRUCT_KW "struct" + WHITESPACE " " + NAME + IDENT "S" + WHITESPACE " " + RECORD_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_FIELD + NAME + IDENT "f" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "f32" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + FLOAT_NUMBER "0.0" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs new file mode 100644 index 00000000000..d7b38944a8a --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs @@ -0,0 +1 @@ +struct S { f: f32 = 0.0 } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rast index 00948c322f4..b868da55bce 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rast @@ -131,6 +131,53 @@ SOURCE_FILE L_CURLY "{" WHITESPACE " " RECORD_EXPR_FIELD + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + COMMA "," + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "y" + COLON ":" + WHITESPACE " " + LITERAL + INT_NUMBER "32" + COMMA "," + WHITESPACE " " + DOT2 ".." + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + DOT2 ".." + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD NAME_REF IDENT "x" COLON ":" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rs index 86411fbb7dc..42895f759b2 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_lit.rs @@ -3,6 +3,8 @@ fn foo() { S { x }; S { x, y: 32, }; S { x, y: 32, ..Default::default() }; + S { x, y: 32, .. }; + S { .. }; S { x: ::default() }; TupleStruct { 0: 1 }; } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rast new file mode 100644 index 00000000000..987e219ae82 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rast @@ -0,0 +1,39 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + LET_STMT + LET_KW "let" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "_s" + WHITESPACE " " + EQ "=" + WHITESPACE " " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + DOT2 ".." + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rs new file mode 100644 index 00000000000..e08204f94c4 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/struct_initializer_with_defaults.rs @@ -0,0 +1,3 @@ +fn foo() { + let _s = S { .. }; +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index ba1fcd8e336..569070766f1 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -21,18 +21,16 @@ pub(crate) fn run() -> io::Result<()> { } } - let read_request = - |buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf); - + let mut buf = String::new(); + let mut read_request = || msg::Request::read(read_json, &mut io::stdin().lock(), &mut buf); let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock()); let env = EnvSnapshot::default(); - let mut srv = proc_macro_srv::ProcMacroSrv::new(&env); - let mut buf = String::new(); + let srv = proc_macro_srv::ProcMacroSrv::new(&env); let mut span_mode = SpanMode::Id; - while let Some(req) = read_request(&mut buf)? { + while let Some(req) = read_request()? { let res = match req { msg::Request::ListMacros { dylib_path } => { msg::Response::ListMacros(srv.list_macros(&dylib_path).map(|macros| { diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index 7ae75713ebf..f28821b4afc 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -35,6 +35,7 @@ use std::{ ffi::OsString, fs, path::{Path, PathBuf}, + sync::{Arc, Mutex, PoisonError}, thread, }; @@ -53,7 +54,7 @@ pub enum ProcMacroKind { pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION"); pub struct ProcMacroSrv<'env> { - expanders: HashMap<Utf8PathBuf, dylib::Expander>, + expanders: Mutex<HashMap<Utf8PathBuf, Arc<dylib::Expander>>>, env: &'env EnvSnapshot, } @@ -67,7 +68,7 @@ const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; impl ProcMacroSrv<'_> { pub fn expand<S: ProcMacroSrvSpan>( - &mut self, + &self, lib: impl AsRef<Utf8Path>, env: Vec<(String, String)>, current_dir: Option<impl AsRef<Path>>, @@ -118,29 +119,37 @@ impl ProcMacroSrv<'_> { } pub fn list_macros( - &mut self, + &self, dylib_path: &Utf8Path, ) -> Result<Vec<(String, ProcMacroKind)>, String> { let expander = self.expander(dylib_path)?; Ok(expander.list_macros()) } - fn expander(&mut self, path: &Utf8Path) -> Result<&dylib::Expander, String> { + fn expander(&self, path: &Utf8Path) -> Result<Arc<dylib::Expander>, String> { let expander = || { - dylib::Expander::new(path) - .map_err(|err| format!("Cannot create expander for {path}: {err}",)) + let expander = dylib::Expander::new(path) + .map_err(|err| format!("Cannot create expander for {path}: {err}",)); + expander.map(Arc::new) }; - Ok(match self.expanders.entry(path.to_path_buf()) { - Entry::Vacant(v) => v.insert(expander()?), - Entry::Occupied(mut e) => { - let time = fs::metadata(path).and_then(|it| it.modified()).ok(); - if Some(e.get().modified_time()) != time { - e.insert(expander()?); + Ok( + match self + .expanders + .lock() + .unwrap_or_else(PoisonError::into_inner) + .entry(path.to_path_buf()) + { + Entry::Vacant(v) => v.insert(expander()?).clone(), + Entry::Occupied(mut e) => { + let time = fs::metadata(path).and_then(|it| it.modified()).ok(); + if Some(e.get().modified_time()) != time { + e.insert(expander()?); + } + e.get().clone() } - e.into_mut() - } - }) + }, + ) } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 4ce4544243a..1b085520d56 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -107,7 +107,7 @@ fn assert_expand_impl( pub(crate) fn list() -> Vec<String> { let dylib_path = proc_macro_test_dylib_path(); let env = EnvSnapshot::default(); - let mut srv = ProcMacroSrv::new(&env); + let srv = ProcMacroSrv::new(&env); let res = srv.list_macros(&dylib_path).unwrap(); res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect() } diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index e4a61134620..b5f4e43a115 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -277,6 +277,9 @@ impl CargoWorkspace { /// Fetches the metadata for the given `cargo_toml` manifest. /// A successful result may contain another metadata error if the initial fetching failed but /// the `--no-deps` retry succeeded. + /// + /// The sysroot is used to set the `RUSTUP_TOOLCHAIN` env var when invoking cargo + /// to ensure that the rustup proxy uses the correct toolchain. pub fn fetch_metadata( cargo_toml: &ManifestPath, current_dir: &AbsPath, diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index fc1fd7b877f..0c734474682 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -260,19 +260,19 @@ fn parse_cfg(s: &str) -> Result<cfg::CfgAtom, String> { } #[derive(Clone, Debug, PartialEq, Eq)] -pub enum SysrootSourceWorkspaceConfig { +pub enum RustSourceWorkspaceConfig { CargoMetadata(CargoMetadataConfig), Stitched, } -impl Default for SysrootSourceWorkspaceConfig { +impl Default for RustSourceWorkspaceConfig { fn default() -> Self { - SysrootSourceWorkspaceConfig::default_cargo() + RustSourceWorkspaceConfig::default_cargo() } } -impl SysrootSourceWorkspaceConfig { +impl RustSourceWorkspaceConfig { pub fn default_cargo() -> Self { - SysrootSourceWorkspaceConfig::CargoMetadata(Default::default()) + RustSourceWorkspaceConfig::CargoMetadata(Default::default()) } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index a3963967610..feee40a1fc9 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -508,5 +508,5 @@ fn serialize_crate_name<S>(name: &CrateName, se: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { - se.serialize_str(name) + se.serialize_str(name.as_str()) } diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 8f633d24be9..544ba43ba66 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -22,38 +22,40 @@ use toolchain::{probe_for_binary, Tool}; use crate::{ cargo_workspace::CargoMetadataConfig, utf8_stdout, CargoWorkspace, ManifestPath, - SysrootSourceWorkspaceConfig, + RustSourceWorkspaceConfig, }; #[derive(Debug, Clone, PartialEq, Eq)] pub struct Sysroot { root: Option<AbsPathBuf>, - src_root: Option<AbsPathBuf>, - workspace: SysrootWorkspace, + rust_lib_src_root: Option<AbsPathBuf>, + workspace: RustLibSrcWorkspace, error: Option<String>, } #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum SysrootWorkspace { +pub enum RustLibSrcWorkspace { Workspace(CargoWorkspace), Stitched(Stitched), Empty, } #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) struct Stitched { - crates: Arena<SysrootCrateData>, +pub struct Stitched { + crates: Arena<RustLibSrcCrateData>, } -impl ops::Index<SysrootCrate> for Stitched { - type Output = SysrootCrateData; - fn index(&self, index: SysrootCrate) -> &SysrootCrateData { +impl ops::Index<RustLibSrcCrate> for Stitched { + type Output = RustLibSrcCrateData; + fn index(&self, index: RustLibSrcCrate) -> &RustLibSrcCrateData { &self.crates[index] } } impl Stitched { - pub(crate) fn public_deps(&self) -> impl Iterator<Item = (CrateName, SysrootCrate, bool)> + '_ { + pub(crate) fn public_deps( + &self, + ) -> impl Iterator<Item = (CrateName, RustLibSrcCrate, bool)> + '_ { // core is added as a dependency before std in order to // mimic rustcs dependency order [("core", true), ("alloc", false), ("std", true), ("test", false)].into_iter().filter_map( @@ -63,32 +65,37 @@ impl Stitched { ) } - pub(crate) fn proc_macro(&self) -> Option<SysrootCrate> { + pub(crate) fn proc_macro(&self) -> Option<RustLibSrcCrate> { self.by_name("proc_macro") } - pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = SysrootCrate> + '_ { + pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = RustLibSrcCrate> + '_ { self.crates.iter().map(|(id, _data)| id) } - fn by_name(&self, name: &str) -> Option<SysrootCrate> { + fn by_name(&self, name: &str) -> Option<RustLibSrcCrate> { let (id, _data) = self.crates.iter().find(|(_id, data)| data.name == name)?; Some(id) } } -pub(crate) type SysrootCrate = Idx<SysrootCrateData>; +pub(crate) type RustLibSrcCrate = Idx<RustLibSrcCrateData>; #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) struct SysrootCrateData { +pub(crate) struct RustLibSrcCrateData { pub(crate) name: String, pub(crate) root: ManifestPath, - pub(crate) deps: Vec<SysrootCrate>, + pub(crate) deps: Vec<RustLibSrcCrate>, } impl Sysroot { pub const fn empty() -> Sysroot { - Sysroot { root: None, src_root: None, workspace: SysrootWorkspace::Empty, error: None } + Sysroot { + root: None, + rust_lib_src_root: None, + workspace: RustLibSrcWorkspace::Empty, + error: None, + } } /// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/` @@ -100,15 +107,15 @@ impl Sysroot { /// Returns the sysroot "source" directory, where stdlib sources are located, like: /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` - pub fn src_root(&self) -> Option<&AbsPath> { - self.src_root.as_deref() + pub fn rust_lib_src_root(&self) -> Option<&AbsPath> { + self.rust_lib_src_root.as_deref() } - pub fn is_empty(&self) -> bool { + pub fn is_rust_lib_src_empty(&self) -> bool { match &self.workspace { - SysrootWorkspace::Workspace(ws) => ws.packages().next().is_none(), - SysrootWorkspace::Stitched(stitched) => stitched.crates.is_empty(), - SysrootWorkspace::Empty => true, + RustLibSrcWorkspace::Workspace(ws) => ws.packages().next().is_none(), + RustLibSrcWorkspace::Stitched(stitched) => stitched.crates.is_empty(), + RustLibSrcWorkspace::Empty => true, } } @@ -118,13 +125,13 @@ impl Sysroot { pub fn num_packages(&self) -> usize { match &self.workspace { - SysrootWorkspace::Workspace(ws) => ws.packages().count(), - SysrootWorkspace::Stitched(c) => c.crates().count(), - SysrootWorkspace::Empty => 0, + RustLibSrcWorkspace::Workspace(ws) => ws.packages().count(), + RustLibSrcWorkspace::Stitched(c) => c.crates().count(), + RustLibSrcWorkspace::Empty => 0, } } - pub(crate) fn workspace(&self) -> &SysrootWorkspace { + pub(crate) fn workspace(&self) -> &RustLibSrcWorkspace { &self.workspace } } @@ -133,33 +140,33 @@ impl Sysroot { /// Attempts to discover the toolchain's sysroot from the given `dir`. pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Sysroot { let sysroot_dir = discover_sysroot_dir(dir, extra_env); - let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| { - discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env) + let rust_lib_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| { + discover_rust_lib_src_dir_or_add_component(sysroot_dir, dir, extra_env) }); - Sysroot::assemble(Some(sysroot_dir), sysroot_src_dir) + Sysroot::assemble(Some(sysroot_dir), rust_lib_src_dir) } pub fn discover_with_src_override( current_dir: &AbsPath, extra_env: &FxHashMap<String, String>, - sysroot_src_dir: AbsPathBuf, + rust_lib_src_dir: AbsPathBuf, ) -> Sysroot { let sysroot_dir = discover_sysroot_dir(current_dir, extra_env); - Sysroot::assemble(Some(sysroot_dir), Some(Ok(sysroot_src_dir))) + Sysroot::assemble(Some(sysroot_dir), Some(Ok(rust_lib_src_dir))) } - pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot { - let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir) + pub fn discover_rust_lib_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot { + let rust_lib_src_dir = discover_rust_lib_src_dir(&sysroot_dir) .ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}")); - Sysroot::assemble(Some(Ok(sysroot_dir)), Some(sysroot_src_dir)) + Sysroot::assemble(Some(Ok(sysroot_dir)), Some(rust_lib_src_dir)) } pub fn discover_rustc_src(&self) -> Option<ManifestPath> { get_rustc_src(self.root()?) } - pub fn new(sysroot_dir: Option<AbsPathBuf>, sysroot_src_dir: Option<AbsPathBuf>) -> Sysroot { - Self::assemble(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok)) + pub fn new(sysroot_dir: Option<AbsPathBuf>, rust_lib_src_dir: Option<AbsPathBuf>) -> Sysroot { + Self::assemble(sysroot_dir.map(Ok), rust_lib_src_dir.map(Ok)) } /// Returns a command to run a tool preferring the cargo proxies if the sysroot exists. @@ -200,7 +207,7 @@ impl Sysroot { fn assemble( sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>, - sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>, + rust_lib_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>, ) -> Sysroot { let mut errors = String::new(); let root = match sysroot_dir { @@ -211,8 +218,8 @@ impl Sysroot { } None => None, }; - let src_root = match sysroot_src_dir { - Some(Ok(sysroot_src_dir)) => Some(sysroot_src_dir), + let rust_lib_src_root = match rust_lib_src_dir { + Some(Ok(rust_lib_src_dir)) => Some(rust_lib_src_dir), Some(Err(e)) => { format_to!(errors, "{e}\n"); None @@ -221,24 +228,28 @@ impl Sysroot { }; Sysroot { root, - src_root, - workspace: SysrootWorkspace::Empty, + rust_lib_src_root, + workspace: RustLibSrcWorkspace::Empty, error: errors.is_empty().not().then_some(errors), } } - pub fn load_workspace(&mut self, sysroot_source_config: &SysrootSourceWorkspaceConfig) { - assert!(matches!(self.workspace, SysrootWorkspace::Empty), "workspace already loaded"); - let Self { root: _, src_root: Some(src_root), workspace, error: _ } = self else { return }; - if let SysrootSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config { + pub fn load_workspace( + &self, + sysroot_source_config: &RustSourceWorkspaceConfig, + ) -> Option<RustLibSrcWorkspace> { + assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded"); + let Self { root: _, rust_lib_src_root: Some(src_root), workspace: _, error: _ } = self + else { + return None; + }; + if let RustSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config { let library_manifest = ManifestPath::try_from(src_root.join("Cargo.toml")).unwrap(); if fs::metadata(&library_manifest).is_ok() { if let Some(loaded) = - Self::load_library_via_cargo(library_manifest, src_root, cargo_config) + self.load_library_via_cargo(library_manifest, src_root, cargo_config) { - *workspace = loaded; - self.load_core_check(); - return; + return Some(loaded); } } } @@ -255,7 +266,7 @@ impl Sysroot { .find(|it| fs::metadata(it).is_ok()); if let Some(root) = root { - stitched.crates.alloc(SysrootCrateData { + stitched.crates.alloc(RustLibSrcCrateData { name: name.into(), root, deps: Vec::new(), @@ -286,21 +297,23 @@ impl Sysroot { } } } - *workspace = SysrootWorkspace::Stitched(stitched); - self.load_core_check(); + Some(RustLibSrcWorkspace::Stitched(stitched)) } - fn load_core_check(&mut self) { + pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) { + self.workspace = workspace; if self.error.is_none() { - if let Some(src_root) = &self.src_root { + if let Some(src_root) = &self.rust_lib_src_root { let has_core = match &self.workspace { - SysrootWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"), - SysrootWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(), - SysrootWorkspace::Empty => true, + RustLibSrcWorkspace::Workspace(ws) => { + ws.packages().any(|p| ws[p].name == "core") + } + RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(), + RustLibSrcWorkspace::Empty => true, }; if !has_core { - let var_note = if env::var_os("RUST_SRC_PATH").is_some() { - " (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)" + let var_note = if env::var_os("rust_lib_src_PATH").is_some() { + " (env var `rust_lib_src_PATH` is set and may be incorrect, try unsetting it)" } else { ", try running `rustup component add rust-src` to possibly fix this" }; @@ -313,10 +326,11 @@ impl Sysroot { } fn load_library_via_cargo( + &self, library_manifest: ManifestPath, - sysroot_src_dir: &AbsPathBuf, + rust_lib_src_dir: &AbsPathBuf, cargo_config: &CargoMetadataConfig, - ) -> Option<SysrootWorkspace> { + ) -> Option<RustLibSrcWorkspace> { tracing::debug!("Loading library metadata: {library_manifest}"); let mut cargo_config = cargo_config.clone(); // the sysroot uses `public-dependency`, so we make cargo think it's a nightly @@ -327,9 +341,9 @@ impl Sysroot { let (mut res, _) = match CargoWorkspace::fetch_metadata( &library_manifest, - sysroot_src_dir, + rust_lib_src_dir, &cargo_config, - &Sysroot::empty(), + self, // Make sure we never attempt to write to the sysroot true, &|_| (), @@ -391,7 +405,7 @@ impl Sysroot { }); let cargo_workspace = CargoWorkspace::new(res, library_manifest, Default::default()); - Some(SysrootWorkspace::Workspace(cargo_workspace)) + Some(RustLibSrcWorkspace::Workspace(cargo_workspace)) } } @@ -407,36 +421,38 @@ fn discover_sysroot_dir( Ok(AbsPathBuf::assert(Utf8PathBuf::from(stdout))) } -fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> { - if let Ok(path) = env::var("RUST_SRC_PATH") { +fn discover_rust_lib_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> { + if let Ok(path) = env::var("rust_lib_src_PATH") { if let Ok(path) = AbsPathBuf::try_from(path.as_str()) { let core = path.join("core"); if fs::metadata(&core).is_ok() { - tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {path}"); + tracing::debug!("Discovered sysroot by rust_lib_src_PATH: {path}"); return Some(path); } - tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {core:?}), ignoring"); + tracing::debug!( + "rust_lib_src_PATH is set, but is invalid (no core: {core:?}), ignoring" + ); } else { - tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring"); + tracing::debug!("rust_lib_src_PATH is set, but is invalid, ignoring"); } } - get_rust_src(sysroot_path) + get_rust_lib_src(sysroot_path) } -fn discover_sysroot_src_dir_or_add_component( +fn discover_rust_lib_src_dir_or_add_component( sysroot_path: &AbsPathBuf, current_dir: &AbsPath, extra_env: &FxHashMap<String, String>, ) -> Result<AbsPathBuf> { - discover_sysroot_src_dir(sysroot_path) + discover_rust_lib_src_dir(sysroot_path) .or_else(|| { let mut rustup = toolchain::command(Tool::Rustup.prefer_proxy(), current_dir); rustup.envs(extra_env); rustup.args(["component", "add", "rust-src"]); tracing::info!("adding rust-src component by {:?}", rustup); utf8_stdout(&mut rustup).ok()?; - get_rust_src(sysroot_path) + get_rust_lib_src(sysroot_path) }) .ok_or_else(|| { tracing::error!(%sysroot_path, "can't load standard library, try installing `rust-src`"); @@ -461,11 +477,11 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> { } } -fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> { - let rust_src = sysroot_path.join("lib/rustlib/src/rust/library"); - tracing::debug!("checking sysroot library: {rust_src}"); - if fs::metadata(&rust_src).is_ok() { - Some(rust_src) +fn get_rust_lib_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> { + let rust_lib_src = sysroot_path.join("lib/rustlib/src/rust/library"); + tracing::debug!("checking sysroot library: {rust_lib_src}"); + if fs::metadata(&rust_lib_src).is_ok() { + Some(rust_lib_src) } else { None } diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index f1113831125..54eb0e3478a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -12,9 +12,9 @@ use span::FileId; use triomphe::Arc; use crate::{ - sysroot::SysrootWorkspace, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides, - ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, - SysrootSourceWorkspaceConfig, WorkspaceBuildScripts, + sysroot::RustLibSrcWorkspace, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides, + ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, RustSourceWorkspaceConfig, + Sysroot, WorkspaceBuildScripts, }; fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) { @@ -42,7 +42,6 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace { build_scripts: WorkspaceBuildScripts::default(), rustc: Err(None), error: None, - set_test: true, }, cfg_overrides: Default::default(), sysroot: Sysroot::empty(), @@ -50,6 +49,7 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace { toolchain: None, target_layout: Err("target_data_layout not loaded".into()), extra_includes: Vec::new(), + set_test: true, } } @@ -65,6 +65,7 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) { target_layout: Err(Arc::from("test has no data layout")), cfg_overrides: Default::default(), extra_includes: Vec::new(), + set_test: true, }; to_crate_graph(project_workspace, &mut Default::default()) } @@ -125,7 +126,10 @@ fn get_fake_sysroot() -> Sysroot { let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); let mut sysroot = Sysroot::new(Some(sysroot_dir), Some(sysroot_src_dir)); - sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo()); + let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo()); + if let Some(loaded_sysroot) = loaded_sysroot { + sysroot.set_workspace(loaded_sysroot); + } sysroot } @@ -230,7 +234,7 @@ fn rust_project_is_proc_macro_has_proc_macro_dep() { let crate_data = &crate_graph[crate_id]; // Assert that the project crate with `is_proc_macro` has a dependency // on the proc_macro sysroot crate. - crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap(); + crate_data.dependencies.iter().find(|&dep| *dep.name.deref() == sym::proc_macro).unwrap(); } #[test] @@ -271,15 +275,17 @@ fn smoke_test_real_sysroot_cargo() { AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))), &Default::default(), ); - sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo()); - assert!(matches!(sysroot.workspace(), SysrootWorkspace::Workspace(_))); + let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo()); + if let Some(loaded_sysroot) = loaded_sysroot { + sysroot.set_workspace(loaded_sysroot); + } + assert!(matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_))); let project_workspace = ProjectWorkspace { kind: ProjectWorkspaceKind::Cargo { cargo: cargo_workspace, build_scripts: WorkspaceBuildScripts::default(), rustc: Err(None), error: None, - set_test: true, }, sysroot, rustc_cfg: Vec::new(), @@ -287,6 +293,7 @@ fn smoke_test_real_sysroot_cargo() { toolchain: None, target_layout: Err("target_data_layout not loaded".into()), extra_includes: Vec::new(), + set_test: true, }; project_workspace.to_crate_graph( &mut { diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index dcd62753cb2..f5d46daa80f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -2,7 +2,7 @@ //! metadata` or `rust-project.json`) into representation stored in the salsa //! database -- `CrateGraph`. -use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync}; +use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread}; use anyhow::Context; use base_db::{ @@ -23,10 +23,10 @@ use crate::{ cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource}, env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, project_json::{Crate, CrateArrayIdx}, - sysroot::{SysrootCrate, SysrootWorkspace}, + sysroot::{RustLibSrcCrate, RustLibSrcWorkspace}, toolchain_info::{rustc_cfg, target_data_layout, target_tuple, version, QueryConfig}, CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package, - ProjectJson, ProjectManifest, Sysroot, SysrootSourceWorkspaceConfig, TargetData, TargetKind, + ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, }; use tracing::{debug, error, info}; @@ -64,6 +64,8 @@ pub struct ProjectWorkspace { pub cfg_overrides: CfgOverrides, /// Additional includes to add for the VFS. pub extra_includes: Vec<AbsPathBuf>, + /// Set `cfg(test)` for local crates + pub set_test: bool, } #[derive(Clone)] @@ -79,7 +81,6 @@ pub enum ProjectWorkspaceKind { /// The rustc workspace loaded for this workspace. An `Err(None)` means loading has been /// disabled or was otherwise not requested. rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>, - set_test: bool, }, /// Project workspace was specified using a `rust-project.json` file. Json(ProjectJson), @@ -98,7 +99,6 @@ pub enum ProjectWorkspaceKind { file: ManifestPath, /// Is this file a cargo script file? cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>, - set_test: bool, }, } @@ -113,9 +113,10 @@ impl fmt::Debug for ProjectWorkspace { target_layout, cfg_overrides, extra_includes, + set_test, } = self; match kind { - ProjectWorkspaceKind::Cargo { cargo, error: _, build_scripts, rustc, set_test } => f + ProjectWorkspaceKind::Cargo { cargo, error: _, build_scripts, rustc } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) .field("n_packages", &cargo.packages().len()) @@ -141,11 +142,12 @@ impl fmt::Debug for ProjectWorkspace { .field("toolchain", &toolchain) .field("data_layout", &target_layout) .field("n_cfg_overrides", &cfg_overrides.len()) - .field("n_extra_includes", &extra_includes.len()); + .field("n_extra_includes", &extra_includes.len()) + .field("set_test", set_test); debug_struct.finish() } - ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test } => f + ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script } => f .debug_struct("DetachedFiles") .field("file", &file) .field("cargo_script", &cargo_script.is_some()) @@ -186,7 +188,7 @@ impl ProjectWorkspace { let project_location = project_json.parent().to_path_buf(); let project_json: ProjectJson = ProjectJson::new(Some(project_json.clone()), &project_location, data); - ProjectWorkspace::load_inline(project_json, config) + ProjectWorkspace::load_inline(project_json, config, progress) } ProjectManifest::CargoScript(rust_file) => { ProjectWorkspace::load_detached_file(rust_file, config)? @@ -204,19 +206,33 @@ impl ProjectWorkspace { config: &CargoConfig, progress: &dyn Fn(String), ) -> Result<ProjectWorkspace, anyhow::Error> { - let mut sysroot = match (&config.sysroot, &config.sysroot_src) { + progress("Discovering sysroot".to_owned()); + let CargoConfig { + features, + rustc_source, + extra_args, + extra_env, + set_test, + cfg_overrides, + extra_includes, + sysroot, + sysroot_src, + target, + .. + } = config; + let mut sysroot = match (sysroot, sysroot_src) { (Some(RustLibSource::Discover), None) => { - Sysroot::discover(cargo_toml.parent(), &config.extra_env) + Sysroot::discover(cargo_toml.parent(), extra_env) } (Some(RustLibSource::Discover), Some(sysroot_src)) => { Sysroot::discover_with_src_override( cargo_toml.parent(), - &config.extra_env, + extra_env, sysroot_src.clone(), ) } (Some(RustLibSource::Path(path)), None) => { - Sysroot::discover_sysroot_src_dir(path.clone()) + Sysroot::discover_rust_lib_src_dir(path.clone()) } (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { Sysroot::new(Some(sysroot.clone()), Some(sysroot_src.clone())) @@ -224,100 +240,147 @@ impl ProjectWorkspace { (None, _) => Sysroot::empty(), }; - let rustc_dir = match &config.rustc_source { - Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) - .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), - Some(RustLibSource::Discover) => sysroot - .discover_rustc_src() - .ok_or_else(|| Some("Failed to discover rustc source for sysroot.".to_owned())), - None => Err(None), - }; - - tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot"); + tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot"); + progress("Querying project metadata".to_owned()); let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml); let targets = - target_tuple::get(toolchain_config, config.target.as_deref(), &config.extra_env) - .unwrap_or_default(); - let toolchain = version::get(toolchain_config, &config.extra_env) - .inspect_err(|e| { - tracing::error!(%e, - "failed fetching toolchain version for {cargo_toml:?} workspace" - ) - }) - .ok() - .flatten(); - let rustc_cfg = - rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), &config.extra_env); - let cfg_overrides = config.cfg_overrides.clone(); - let data_layout = target_data_layout::get( - toolchain_config, - targets.first().map(Deref::deref), - &config.extra_env, - ); - if let Err(e) = &data_layout { - tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace"); - } - sysroot.load_workspace(&SysrootSourceWorkspaceConfig::CargoMetadata( - sysroot_metadata_config(&config.extra_env, &targets), - )); + target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default(); + + // We spawn a bunch of processes to query various information about the workspace's + // toolchain and sysroot + // We can speed up loading a bit by spawning all of these processes in parallel (especially + // on systems were process spawning is delayed) + let join = thread::scope(|s| { + let workspace_dir = cargo_toml.parent(); + let toolchain = s.spawn(|| { + version::get(toolchain_config, extra_env) + .inspect_err(|e| { + tracing::error!(%e, + "failed fetching toolchain version for {cargo_toml:?} workspace" + ) + }) + .ok() + .flatten() + }); - let rustc = rustc_dir.and_then(|rustc_dir| { - info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source"); - match CargoWorkspace::fetch_metadata( - &rustc_dir, - cargo_toml.parent(), - &CargoMetadataConfig { - features: crate::CargoFeatures::default(), - targets: targets.clone(), - extra_args: config.extra_args.clone(), - extra_env: config.extra_env.clone(), - }, - &sysroot, - false, - progress, - ) { - Ok((meta, _error)) => { - let workspace = CargoWorkspace::new(meta, cargo_toml.clone(), Env::default()); - let build_scripts = WorkspaceBuildScripts::rustc_crates( - &workspace, - cargo_toml.parent(), - &config.extra_env, + let rustc_cfg = s.spawn(|| { + rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env) + }); + let data_layout = s.spawn(|| { + target_data_layout::get( + toolchain_config, + targets.first().map(Deref::deref), + extra_env, + ).inspect_err(|e| { + tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace") + }) + }); + + let rustc_dir = s.spawn(|| { + let rustc_dir = match rustc_source { + Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) + .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), + Some(RustLibSource::Discover) => { + sysroot.discover_rustc_src().ok_or_else(|| { + Some("Failed to discover rustc source for sysroot.".to_owned()) + }) + } + None => Err(None), + }; + rustc_dir.and_then(|rustc_dir| { + info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source"); + match CargoWorkspace::fetch_metadata( + &rustc_dir, + workspace_dir, + &CargoMetadataConfig { + features: crate::CargoFeatures::default(), + targets: targets.clone(), + extra_args: extra_args.clone(), + extra_env: extra_env.clone(), + }, &sysroot, - ); - Ok(Box::new((workspace, build_scripts))) - } - Err(e) => { - tracing::error!( - %e, - "Failed to read Cargo metadata from rustc source at {rustc_dir}", - ); - Err(Some(format!( - "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}" - ))) - } - } + false, + &|_| (), + ) { + Ok((meta, _error)) => { + let workspace = + CargoWorkspace::new(meta, cargo_toml.clone(), Env::default()); + let build_scripts = WorkspaceBuildScripts::rustc_crates( + &workspace, + workspace_dir, + extra_env, + &sysroot, + ); + Ok(Box::new((workspace, build_scripts))) + } + Err(e) => { + tracing::error!( + %e, + "Failed to read Cargo metadata from rustc source at {rustc_dir}", + ); + Err(Some(format!( + "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}" + ))) + } + } + }) + }); + + let cargo_metadata = s.spawn(|| { + CargoWorkspace::fetch_metadata( + cargo_toml, + workspace_dir, + &CargoMetadataConfig { + features: features.clone(), + targets: targets.clone(), + extra_args: extra_args.clone(), + extra_env: extra_env.clone(), + }, + &sysroot, + false, + &|_| (), + ) + }); + let loaded_sysroot = s.spawn(|| { + sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata( + sysroot_metadata_config(extra_env, &targets), + )) + }); + let cargo_config_extra_env = + s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot)); + thread::Result::Ok(( + toolchain.join()?, + rustc_cfg.join()?, + data_layout.join()?, + rustc_dir.join()?, + loaded_sysroot.join()?, + cargo_metadata.join()?, + cargo_config_extra_env.join()?, + )) }); - let (meta, error) = CargoWorkspace::fetch_metadata( - cargo_toml, - cargo_toml.parent(), - &CargoMetadataConfig { - features: config.features.clone(), - targets, - extra_args: config.extra_args.clone(), - extra_env: config.extra_env.clone(), - }, - &sysroot, - false, - progress, - ) - .with_context(|| { + let ( + toolchain, + rustc_cfg, + data_layout, + rustc, + loaded_sysroot, + cargo_metadata, + cargo_config_extra_env, + ) = match join { + Ok(it) => it, + Err(e) => std::panic::resume_unwind(e), + }; + + let (meta, error) = cargo_metadata.with_context(|| { format!( "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", ) })?; - let cargo_config_extra_env = cargo_config_env(cargo_toml, &config.extra_env, &sysroot); let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env); + if let Some(loaded_sysroot) = loaded_sysroot { + sysroot.set_workspace(loaded_sysroot); + } Ok(ProjectWorkspace { kind: ProjectWorkspaceKind::Cargo { @@ -325,35 +388,70 @@ impl ProjectWorkspace { build_scripts: WorkspaceBuildScripts::default(), rustc, error: error.map(Arc::new), - set_test: config.set_test, }, sysroot, rustc_cfg, - cfg_overrides, + cfg_overrides: cfg_overrides.clone(), toolchain, target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), - extra_includes: config.extra_includes.clone(), + extra_includes: extra_includes.clone(), + set_test: *set_test, }) } - pub fn load_inline(project_json: ProjectJson, config: &CargoConfig) -> ProjectWorkspace { + pub fn load_inline( + project_json: ProjectJson, + config: &CargoConfig, + progress: &dyn Fn(String), + ) -> ProjectWorkspace { + progress("Discovering sysroot".to_owned()); let mut sysroot = Sysroot::new(project_json.sysroot.clone(), project_json.sysroot_src.clone()); - sysroot.load_workspace(&SysrootSourceWorkspaceConfig::Stitched); + let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::Stitched); + if let Some(loaded_sysroot) = loaded_sysroot { + sysroot.set_workspace(loaded_sysroot); + } + + tracing::info!(workspace = %project_json.manifest_or_root(), src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot"); + progress("Querying project metadata".to_owned()); let query_config = QueryConfig::Rustc(&sysroot, project_json.path().as_ref()); - let toolchain = version::get(query_config, &config.extra_env).ok().flatten(); + let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env) + .unwrap_or_default(); + + // We spawn a bunch of processes to query various information about the workspace's + // toolchain and sysroot + // We can speed up loading a bit by spawning all of these processes in parallel (especially + // on systems were process spawning is delayed) + let join = thread::scope(|s| { + let toolchain = + s.spawn(|| version::get(query_config, &config.extra_env).ok().flatten()); + let rustc_cfg = s.spawn(|| { + rustc_cfg::get(query_config, targets.first().map(Deref::deref), &config.extra_env) + }); + let data_layout = s.spawn(|| { + target_data_layout::get( + query_config, + targets.first().map(Deref::deref), + &config.extra_env, + ) + }); + thread::Result::Ok((toolchain.join()?, rustc_cfg.join()?, data_layout.join()?)) + }); + + let (toolchain, rustc_cfg, target_layout) = match join { + Ok(it) => it, + Err(e) => std::panic::resume_unwind(e), + }; - let target = config.target.as_deref(); - let rustc_cfg = rustc_cfg::get(query_config, target, &config.extra_env); - let data_layout = target_data_layout::get(query_config, target, &config.extra_env); ProjectWorkspace { kind: ProjectWorkspaceKind::Json(project_json), sysroot, rustc_cfg, toolchain, - target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + target_layout: target_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), cfg_overrides: config.cfg_overrides.clone(), extra_includes: config.extra_includes.clone(), + set_test: config.set_test, } } @@ -363,7 +461,7 @@ impl ProjectWorkspace { ) -> anyhow::Result<ProjectWorkspace> { let dir = detached_file.parent(); let mut sysroot = match &config.sysroot { - Some(RustLibSource::Path(path)) => Sysroot::discover_sysroot_src_dir(path.clone()), + Some(RustLibSource::Path(path)) => Sysroot::discover_rust_lib_src_dir(path.clone()), Some(RustLibSource::Discover) => Sysroot::discover(dir, &config.extra_env), None => Sysroot::empty(), }; @@ -374,9 +472,12 @@ impl ProjectWorkspace { .unwrap_or_default(); let rustc_cfg = rustc_cfg::get(query_config, None, &config.extra_env); let data_layout = target_data_layout::get(query_config, None, &config.extra_env); - sysroot.load_workspace(&SysrootSourceWorkspaceConfig::CargoMetadata( + let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata( sysroot_metadata_config(&config.extra_env, &targets), )); + if let Some(loaded_sysroot) = loaded_sysroot { + sysroot.set_workspace(loaded_sysroot); + } let cargo_script = CargoWorkspace::fetch_metadata( detached_file, @@ -406,7 +507,6 @@ impl ProjectWorkspace { kind: ProjectWorkspaceKind::DetachedFile { file: detached_file.to_owned(), cargo: cargo_script, - set_test: config.set_test, }, sysroot, rustc_cfg, @@ -414,6 +514,7 @@ impl ProjectWorkspace { target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), cfg_overrides: config.cfg_overrides.clone(), extra_includes: config.extra_includes.clone(), + set_test: config.set_test, }) } @@ -545,7 +646,7 @@ impl ProjectWorkspace { pub fn to_roots(&self) -> Vec<PackageRoot> { let mk_sysroot = || { let mut r = match self.sysroot.workspace() { - SysrootWorkspace::Workspace(ws) => ws + RustLibSrcWorkspace::Workspace(ws) => ws .packages() .filter_map(|pkg| { if ws[pkg].is_local { @@ -566,12 +667,17 @@ impl ProjectWorkspace { Some(PackageRoot { is_local: false, include, exclude }) }) .collect(), - SysrootWorkspace::Stitched(_) | SysrootWorkspace::Empty => vec![], + RustLibSrcWorkspace::Stitched(_) | RustLibSrcWorkspace::Empty => vec![], }; r.push(PackageRoot { is_local: false, - include: self.sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(), + include: self + .sysroot + .rust_lib_src_root() + .map(|it| it.to_path_buf()) + .into_iter() + .collect(), exclude: Vec::new(), }); r @@ -593,7 +699,7 @@ impl ProjectWorkspace { .into_iter() .chain(mk_sysroot()) .collect::<Vec<_>>(), - ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _, set_test: _ } => { + ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _ } => { cargo .packages() .map(|pkg| { @@ -728,8 +834,9 @@ impl ProjectWorkspace { sysroot, extra_env, cfg_overrides, + self.set_test, ), - ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _, set_test } => { + ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _ } => { cargo_to_crate_graph( load, rustc.as_ref().map(|a| a.as_ref()).ok(), @@ -738,10 +845,10 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, - *set_test, + self.set_test, ) } - ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test, .. } => { + ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => { if let Some((cargo, build_scripts, _)) = cargo_script { cargo_to_crate_graph( &mut |path| load(path), @@ -751,7 +858,7 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, - *set_test, + self.set_test, ) } else { detached_file_to_crate_graph( @@ -760,7 +867,7 @@ impl ProjectWorkspace { file, sysroot, cfg_overrides, - *set_test, + self.set_test, ) } } @@ -782,34 +889,22 @@ impl ProjectWorkspace { } = other; (match (kind, o_kind) { ( - ProjectWorkspaceKind::Cargo { - cargo, - rustc, - build_scripts: _, - error: _, - set_test: _, - }, + ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts: _, error: _ }, ProjectWorkspaceKind::Cargo { cargo: o_cargo, rustc: o_rustc, build_scripts: _, error: _, - set_test: _, }, ) => cargo == o_cargo && rustc == o_rustc, (ProjectWorkspaceKind::Json(project), ProjectWorkspaceKind::Json(o_project)) => { project == o_project } ( - ProjectWorkspaceKind::DetachedFile { - file, - cargo: Some((cargo_script, _, _)), - set_test: _, - }, + ProjectWorkspaceKind::DetachedFile { file, cargo: Some((cargo_script, _, _)) }, ProjectWorkspaceKind::DetachedFile { file: o_file, cargo: Some((o_cargo_script, _, _)), - set_test: _, }, ) => file == o_file && cargo_script == o_cargo_script, _ => return false, @@ -837,13 +932,13 @@ fn project_json_to_crate_graph( sysroot: &Sysroot, extra_env: &FxHashMap<String, String>, override_cfg: &CfgOverrides, + set_test: bool, ) -> (CrateGraph, ProcMacroPaths) { let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let (crate_graph, proc_macros) = &mut res; let (public_deps, libproc_macro) = sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load); - let r_a_cfg_flag = CfgAtom::Flag(sym::rust_analyzer.clone()); let mut cfg_cache: FxHashMap<&str, Vec<CfgAtom>> = FxHashMap::default(); let idx_to_crate_id: FxHashMap<CrateArrayIdx, CrateId> = project @@ -862,6 +957,7 @@ fn project_json_to_crate_graph( proc_macro_dylib_path, is_proc_macro, repository, + is_workspace_member, .. }, file_id, @@ -879,19 +975,28 @@ fn project_json_to_crate_graph( None => &rustc_cfg, }; - let mut cfg_options = target_cfgs - .iter() - .chain(cfg.iter()) - .chain(iter::once(&r_a_cfg_flag)) - .cloned() - .collect(); - override_cfg.apply( - &mut cfg_options, - display_name - .as_ref() - .map(|it| it.canonical_name().as_str()) - .unwrap_or_default(), - ); + let cfg_options = { + let mut cfg_options: CfgOptions = + target_cfgs.iter().chain(cfg.iter()).cloned().collect(); + + if *is_workspace_member { + if set_test { + // Add test cfg for local crates + cfg_options.insert_atom(sym::test.clone()); + } + cfg_options.insert_atom(sym::rust_analyzer.clone()); + } + + override_cfg.apply( + &mut cfg_options, + display_name + .as_ref() + .map(|it| it.canonical_name().as_str()) + .unwrap_or_default(), + ); + cfg_options + }; + let crate_graph_crate_id = crate_graph.add_crate_root( file_id, *edition, @@ -1385,7 +1490,7 @@ fn sysroot_to_crate_graph( ) -> (SysrootPublicDeps, Option<CrateId>) { let _p = tracing::info_span!("sysroot_to_crate_graph").entered(); match sysroot.workspace() { - SysrootWorkspace::Workspace(cargo) => { + RustLibSrcWorkspace::Workspace(cargo) => { let (mut cg, mut pm) = cargo_to_crate_graph( load, None, @@ -1460,7 +1565,7 @@ fn sysroot_to_crate_graph( (SysrootPublicDeps { deps: pub_deps }, libproc_macro) } - SysrootWorkspace::Stitched(stitched) => { + RustLibSrcWorkspace::Stitched(stitched) => { let cfg_options = Arc::new({ let mut cfg_options = CfgOptions::default(); cfg_options.extend(rustc_cfg); @@ -1468,7 +1573,7 @@ fn sysroot_to_crate_graph( cfg_options.insert_atom(sym::miri.clone()); cfg_options }); - let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = stitched + let sysroot_crates: FxHashMap<RustLibSrcCrate, CrateId> = stitched .crates() .filter_map(|krate| { let file_id = load(&stitched[krate].root)?; @@ -1513,7 +1618,7 @@ fn sysroot_to_crate_graph( stitched.proc_macro().and_then(|it| sysroot_crates.get(&it).copied()); (public_deps, libproc_macro) } - SysrootWorkspace::Empty => (SysrootPublicDeps { deps: vec![] }, None), + RustLibSrcWorkspace::Empty => (SysrootPublicDeps { deps: vec![] }, None), } } diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt index 2026ab2b8c2..587d3c17827 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt @@ -420,6 +420,7 @@ "group1_other_cfg=other_config", "group2_cfg=yet_another_config", "rust_analyzer", + "test", "true", ], ), @@ -496,6 +497,7 @@ "group2_cfg=fourth_config", "group2_cfg=yet_another_config", "rust_analyzer", + "test", "true", "unrelated_cfg", ], diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index a0e14b8fcb2..00805c79bcc 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -417,6 +417,7 @@ cfg_options: CfgOptions( [ "rust_analyzer", + "test", "true", ], ), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index c24cbb4a311..b8ce2b7430b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -37,7 +37,7 @@ rustc-hash.workspace = true serde_json = { workspace = true, features = ["preserve_order"] } serde.workspace = true serde_derive.workspace = true -tenthash = "0.4.0" +tenthash = "1.0.0" num_cpus = "1.15.0" mimalloc = { version = "0.1.30", default-features = false, optional = true } lsp-server.workspace = true diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 18c27c84496..4fc6180920f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -13,7 +13,7 @@ use hir::{ ModuleDef, Name, }; use hir_def::{ - body::BodySourceMap, + expr_store::BodySourceMap, hir::{ExprId, PatId}, SyntheticSyntax, }; @@ -1072,6 +1072,7 @@ impl flags::AnalysisStats { param_names_for_lifetime_elision_hints: true, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, + hide_closure_parameter_hints: false, closure_style: hir::ClosureStyle::ImplFn, max_length: Some(25), closing_brace_hints_min_lines: Some(20), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs index 28f25975d64..6a3ceb640b9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs @@ -4,7 +4,7 @@ use project_model::{CargoConfig, RustLibSource}; use rustc_hash::FxHashSet; -use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; +use hir::{db::HirDatabase, sym, Crate, HirFileIdExt, Module}; use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity}; use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase}; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; @@ -60,7 +60,7 @@ impl flags::Diagnostics { let file_id = module.definition_source_file_id(db).original_file(db); if !visited_files.contains(&file_id) { let crate_name = - module.krate().display_name(db).as_deref().unwrap_or("unknown").to_owned(); + module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned(); println!( "processing crate: {crate_name}, module: {}", _vfs.file_path(file_id.into()) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 199f61e70f0..e9ca12deaf6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -12,8 +12,8 @@ use paths::Utf8PathBuf; use profile::StopWatch; use project_model::toolchain_info::{target_data_layout, QueryConfig}; use project_model::{ - CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource, Sysroot, - SysrootSourceWorkspaceConfig, + CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource, + RustSourceWorkspaceConfig, Sysroot, }; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; @@ -75,7 +75,11 @@ impl Tester { }; let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env); - sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo()); + let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo()); + if let Some(loaded_sysroot) = loaded_sysroot { + sysroot.set_workspace(loaded_sysroot); + } + let data_layout = target_data_layout::get( QueryConfig::Rustc(&sysroot, tmp_file.parent().unwrap().as_ref()), None, @@ -86,7 +90,6 @@ impl Tester { kind: ProjectWorkspaceKind::DetachedFile { file: ManifestPath::try_from(tmp_file).unwrap(), cargo: None, - set_test: true, }, sysroot, rustc_cfg: vec![], @@ -94,6 +97,7 @@ impl Tester { target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), cfg_overrides: Default::default(), extra_includes: vec![], + set_test: true, }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: false, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index dc0f722aae6..fe75872105a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -139,45 +139,42 @@ impl flags::Scip { let mut occurrences = Vec::new(); let mut symbols = Vec::new(); - tokens.into_iter().for_each(|(text_range, id)| { + for (text_range, id) in tokens.into_iter() { let token = si.tokens.get(id).unwrap(); - let (symbol, enclosing_symbol, is_inherent_impl) = - if let Some(TokenSymbols { symbol, enclosing_symbol, is_inherent_impl }) = - symbol_generator.token_symbols(id, token) - { - (symbol, enclosing_symbol, is_inherent_impl) - } else { - ("".to_owned(), None, false) - }; + let Some(TokenSymbols { symbol, enclosing_symbol, is_inherent_impl }) = + symbol_generator.token_symbols(id, token) + else { + // token did not have a moniker, so there is no reasonable occurrence to emit + // see ide::moniker::def_to_moniker + continue; + }; - if !symbol.is_empty() { - let is_defined_in_this_document = match token.definition { - Some(def) => def.file_id == file_id, - _ => false, - }; - if is_defined_in_this_document { - if token_ids_emitted.insert(id) { - // token_ids_emitted does deduplication. This checks that this results - // in unique emitted symbols, as otherwise references are ambiguous. - let should_emit = record_error_if_symbol_already_used( + let is_defined_in_this_document = match token.definition { + Some(def) => def.file_id == file_id, + _ => false, + }; + if is_defined_in_this_document { + if token_ids_emitted.insert(id) { + // token_ids_emitted does deduplication. This checks that this results + // in unique emitted symbols, as otherwise references are ambiguous. + let should_emit = record_error_if_symbol_already_used( + symbol.clone(), + is_inherent_impl, + relative_path.as_str(), + &line_index, + text_range, + ); + if should_emit { + symbols.push(compute_symbol_info( symbol.clone(), - is_inherent_impl, - relative_path.as_str(), - &line_index, - text_range, - ); - if should_emit { - symbols.push(compute_symbol_info( - symbol.clone(), - enclosing_symbol, - token, - )); - } + enclosing_symbol, + token, + )); } - } else { - token_ids_referenced.insert(id); } + } else { + token_ids_referenced.insert(id); } // If the range of the def and the range of the token are the same, this must be the definition. @@ -202,7 +199,7 @@ impl flags::Scip { special_fields: Default::default(), enclosing_range: Vec::new(), }); - }); + } if occurrences.is_empty() { continue; @@ -444,14 +441,14 @@ impl SymbolGenerator { MonikerResult::Moniker(moniker) => TokenSymbols { symbol: scip::symbol::format_symbol(moniker_to_symbol(moniker)), enclosing_symbol: None, - is_inherent_impl: moniker - .identifier - .description - .get(moniker.identifier.description.len() - 2) - .is_some_and(|descriptor| { + is_inherent_impl: match &moniker.identifier.description[..] { + // inherent impls are represented as impl#[SelfType] + [.., descriptor, _] => { descriptor.desc == MonikerDescriptorKind::Type && descriptor.name == "impl" - }), + } + _ => false, + }, }, MonikerResult::Local { enclosing_moniker } => { let local_symbol = scip::types::Symbol::new_local(local_count); @@ -549,7 +546,9 @@ mod test { continue; } for &(range, id) in &file.tokens { - if range.contains(offset - TextSize::from(1)) { + // check if cursor is within token, ignoring token for the module defined by the file (whose range is the whole file) + if range.start() != TextSize::from(0) && range.contains(offset - TextSize::from(1)) + { let token = si.tokens.get(id).unwrap(); found_symbol = match token.moniker.as_ref() { None => None, @@ -885,7 +884,7 @@ pub mod example_mod { ); let file = si.files.first().unwrap(); - let (_, token_id) = file.tokens.first().unwrap(); + let (_, token_id) = file.tokens.get(1).unwrap(); // first token is file module, second is `bar` let token = si.tokens.get(*token_id).unwrap(); assert_eq!(token.documentation.as_ref().map(|d| d.as_str()), Some("foo")); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs index 986bd018b42..021b1bff393 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs @@ -1,5 +1,5 @@ //! Reports references in code that the IDE layer cannot resolve. -use hir::{db::HirDatabase, AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics}; +use hir::{db::HirDatabase, sym, AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics}; use ide::{AnalysisHost, RootDatabase, TextRange}; use ide_db::{ base_db::{SourceDatabase, SourceRootDatabase}, @@ -66,7 +66,7 @@ impl flags::UnresolvedReferences { let file_id = module.definition_source_file_id(db).original_file(db); if !visited_files.contains(&file_id) { let crate_name = - module.krate().display_name(db).as_deref().unwrap_or("unknown").to_owned(); + module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned(); let file_path = vfs.file_path(file_id.into()); eprintln!("processing crate: {crate_name}, module: {file_path}",); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 44325fa1a29..d7e9a5c586c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -84,10 +84,10 @@ config_data! { completion_snippets_custom: FxHashMap<String, SnippetDef> = Config::completion_snippets_default(), - /// These directories will be ignored by rust-analyzer. They are + /// These paths (file/directories) will be ignored by rust-analyzer. They are /// relative to the workspace root, and globs are not supported. You may /// also need to add the folders to Code's `files.watcherExclude`. - files_excludeDirs: Vec<Utf8PathBuf> = vec![], + files_exclude | files_excludeDirs: Vec<Utf8PathBuf> = vec![], @@ -208,6 +208,8 @@ config_data! { /// Whether to hide inlay type hints for `let` statements that initialize to a closure. /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. inlayHints_typeHints_hideClosureInitialization: bool = false, + /// Whether to hide inlay parameter type hints for closures. + inlayHints_typeHints_hideClosureParameter:bool = false, /// Whether to hide inlay type hints for constructors. inlayHints_typeHints_hideNamedConstructor: bool = false, @@ -528,7 +530,7 @@ config_data! { imports_granularity_enforce: bool = false, /// How imports should be grouped into use statements. imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate, - /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines. + /// Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines. imports_group_enable: bool = true, /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. imports_merge_glob: bool = true, @@ -1666,6 +1668,9 @@ impl Config { hide_closure_initialization_hints: self .inlayHints_typeHints_hideClosureInitialization() .to_owned(), + hide_closure_parameter_hints: self + .inlayHints_typeHints_hideClosureParameter() + .to_owned(), closure_style: match self.inlayHints_closureStyle() { ClosureStyle::ImplFn => hir::ClosureStyle::ImplFn, ClosureStyle::RustAnalyzer => hir::ClosureStyle::RANotation, @@ -1787,7 +1792,7 @@ impl Config { fn discovered_projects(&self) -> Vec<ManifestOrProjectJson> { let exclude_dirs: Vec<_> = - self.files_excludeDirs().iter().map(|p| self.root_path.join(p)).collect(); + self.files_exclude().iter().map(|p| self.root_path.join(p)).collect(); let mut projects = vec![]; for fs_proj in &self.discovered_projects_from_filesystem { @@ -1909,10 +1914,14 @@ impl Config { } _ => FilesWatcher::Server, }, - exclude: self.files_excludeDirs().iter().map(|it| self.root_path.join(it)).collect(), + exclude: self.excluded().collect(), } } + pub fn excluded(&self) -> impl Iterator<Item = AbsPathBuf> + use<'_> { + self.files_exclude().iter().map(|it| self.root_path.join(it)) + } + pub fn notifications(&self) -> NotificationsConfig { NotificationsConfig { cargo_toml_not_found: self.notifications_cargoTomlNotFound().to_owned(), @@ -3624,21 +3633,9 @@ fn manual(fields: &[SchemaField]) -> String { let name = format!("rust-analyzer.{}", field.replace('_', ".")); let doc = doc_comment_to_string(doc); if default.contains('\n') { - format_to_acc!( - acc, - r#"[[{name}]]{name}:: -+ --- -Default: ----- -{default} ----- -{doc} --- -"# - ) + format_to_acc!(acc, " **{name}**\n\nDefault:\n\n```{default}\n\n```\n\n {doc}\n\n ") } else { - format_to_acc!(acc, "[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n") + format_to_acc!(acc, "**{name}** (default: {default})\n\n {doc}\n\n") } }) } @@ -3716,7 +3713,7 @@ mod tests { #[test] fn generate_config_documentation() { - let docs_path = project_root().join("docs/user/generated_config.adoc"); + let docs_path = project_root().join("docs/book/src/configuration_generated.md"); let expected = FullConfigInput::manual(); ensure_file_contents(docs_path.as_std_path(), &expected); } @@ -3805,8 +3802,10 @@ mod tests { (config, _, _) = config.apply_change(change); assert_eq!(config.cargo_targetDir(None), &Some(TargetDirectory::UseSubdirectory(true))); + let target = + Utf8PathBuf::from(std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_owned())); assert!( - matches!(config.flycheck(None), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer"))) + matches!(config.flycheck(None), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(target.join("rust-analyzer"))) ); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 0f2d7823b7e..70105cda006 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -396,6 +396,7 @@ impl GlobalState { || !self.config.same_source_root_parent_map(&self.local_roots_parent_map) { let config_change = { + let _p = span!(Level::INFO, "GlobalState::process_changes/config_change").entered(); let user_config_path = (|| { let mut p = Config::user_config_dir_path()?; p.push("rust-analyzer.toml"); @@ -569,12 +570,12 @@ impl GlobalState { if let Some((method, start)) = self.req_queue.incoming.complete(&response.id) { if let Some(err) = &response.error { if err.message.starts_with("server panicked") { - self.poke_rust_analyzer_developer(format!("{}, check the log", err.message)) + self.poke_rust_analyzer_developer(format!("{}, check the log", err.message)); } } let duration = start.elapsed(); - tracing::debug!("handled {} - ({}) in {:0.2?}", method, response.id, duration); + tracing::debug!(name: "message response", method, %response.id, duration = format_args!("{:0.2?}", duration)); self.send(response.into()); } } @@ -649,7 +650,8 @@ impl GlobalStateSnapshot { RwLockReadGuard::map(self.vfs.read(), |(it, _)| it) } - pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> { + /// Returns `None` if the file was excluded. + pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<Option<FileId>> { url_to_file_id(&self.vfs_read(), url) } @@ -657,7 +659,8 @@ impl GlobalStateSnapshot { file_id_to_url(&self.vfs_read(), id) } - pub(crate) fn vfs_path_to_file_id(&self, vfs_path: &VfsPath) -> anyhow::Result<FileId> { + /// Returns `None` if the file was excluded. + pub(crate) fn vfs_path_to_file_id(&self, vfs_path: &VfsPath) -> anyhow::Result<Option<FileId>> { vfs_path_to_file_id(&self.vfs_read(), vfs_path) } @@ -749,14 +752,21 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { url_from_abs_path(path) } -pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<FileId> { +/// Returns `None` if the file was excluded. +pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<Option<FileId>> { let path = from_proto::vfs_path(url)?; - let res = vfs.file_id(&path).ok_or_else(|| anyhow::format_err!("file not found: {path}"))?; - Ok(res) + vfs_path_to_file_id(vfs, &path) } -pub(crate) fn vfs_path_to_file_id(vfs: &vfs::Vfs, vfs_path: &VfsPath) -> anyhow::Result<FileId> { - let res = +/// Returns `None` if the file was excluded. +pub(crate) fn vfs_path_to_file_id( + vfs: &vfs::Vfs, + vfs_path: &VfsPath, +) -> anyhow::Result<Option<FileId>> { + let (file_id, excluded) = vfs.file_id(vfs_path).ok_or_else(|| anyhow::format_err!("file not found: {vfs_path}"))?; - Ok(res) + match excluded { + vfs::FileExcluded::Yes => Ok(None), + vfs::FileExcluded::No => Ok(Some(file_id)), + } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index ff50f7533a6..4683877db69 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -118,7 +118,7 @@ impl RequestDispatcher<'_> { } return self; } - self.on_with_thread_intent::<true, ALLOW_RETRYING, R>( + self.on_with_thread_intent::<false, ALLOW_RETRYING, R>( ThreadIntent::Worker, f, Self::content_modified_error, @@ -147,7 +147,7 @@ impl RequestDispatcher<'_> { } return self; } - self.on_with_thread_intent::<true, false, R>(ThreadIntent::Worker, f, on_cancelled) + self.on_with_thread_intent::<false, false, R>(ThreadIntent::Worker, f, on_cancelled) } /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not @@ -166,7 +166,7 @@ impl RequestDispatcher<'_> { } return self; } - self.on_with_thread_intent::<true, ALLOW_RETRYING, R>( + self.on_with_thread_intent::<false, ALLOW_RETRYING, R>( ThreadIntent::Worker, f, Self::content_modified_error, @@ -193,7 +193,7 @@ impl RequestDispatcher<'_> { } return self; } - self.on_with_thread_intent::<true, ALLOW_RETRYING, R>( + self.on_with_thread_intent::<false, ALLOW_RETRYING, R>( ThreadIntent::LatencySensitive, f, Self::content_modified_error, @@ -212,7 +212,7 @@ impl RequestDispatcher<'_> { R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, R::Result: Serialize, { - self.on_with_thread_intent::<false, false, R>( + self.on_with_thread_intent::<true, false, R>( ThreadIntent::LatencySensitive, f, Self::content_modified_error, @@ -231,7 +231,7 @@ impl RequestDispatcher<'_> { } } - fn on_with_thread_intent<const MAIN_POOL: bool, const ALLOW_RETRYING: bool, R>( + fn on_with_thread_intent<const RUSTFMT: bool, const ALLOW_RETRYING: bool, R>( &mut self, intent: ThreadIntent, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, @@ -251,10 +251,10 @@ impl RequestDispatcher<'_> { tracing::debug!(?params); let world = self.global_state.snapshot(); - if MAIN_POOL { - &mut self.global_state.task_pool.handle - } else { + if RUSTFMT { &mut self.global_state.fmt_pool.handle + } else { + &mut self.global_state.task_pool.handle } .spawn(intent, move || { let result = panic::catch_unwind(move || { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 84ba89d9f31..55344a4d6ac 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -22,6 +22,7 @@ use crate::{ mem_docs::DocumentData, reload, target_spec::TargetSpec, + try_default, }; pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> { @@ -74,7 +75,16 @@ pub(crate) fn handle_did_open_text_document( tracing::error!("duplicate DidOpenTextDocument: {}", path); } - state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes())); + if let Some(abs_path) = path.as_path() { + if state.config.excluded().any(|excluded| abs_path.starts_with(&excluded)) { + tracing::trace!("opened excluded file {abs_path}"); + state.vfs.write().0.insert_excluded_file(path); + return Ok(()); + } + } + + let contents = params.text_document.text.into_bytes(); + state.vfs.write().0.set_file_contents(path, Some(contents)); if state.config.discover_workspace_config().is_some() { tracing::debug!("queuing task"); let _ = state @@ -126,7 +136,8 @@ pub(crate) fn handle_did_close_text_document( tracing::error!("orphan DidCloseTextDocument: {}", path); } - if let Some(file_id) = state.vfs.read().0.file_id(&path) { + // Clear diagnostics also for excluded files, just in case. + if let Some((file_id, _)) = state.vfs.read().0.file_id(&path) { state.diagnostics.clear_native_for(file_id); } @@ -145,7 +156,7 @@ pub(crate) fn handle_did_save_text_document( ) -> anyhow::Result<()> { if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { let snap = state.snapshot(); - let file_id = snap.vfs_path_to_file_id(&vfs_path)?; + let file_id = try_default!(snap.vfs_path_to_file_id(&vfs_path)?); let sr = snap.analysis.source_root_id(file_id)?; if state.config.script_rebuild_on_save(Some(sr)) && state.build_deps_changed { @@ -289,7 +300,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let _p = tracing::info_span!("run_flycheck").entered(); let file_id = state.vfs.read().0.file_id(&vfs_path); - if let Some(file_id) = file_id { + if let Some((file_id, vfs::FileExcluded::No)) = file_id { let world = state.snapshot(); let invocation_strategy_once = state.config.flycheck(None).invocation_strategy_once(); let may_flycheck_workspace = state.config.flycheck_workspace(None); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index 39cbf53eaa2..1b144d90732 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -53,6 +53,7 @@ use crate::{ }, target_spec::{CargoTargetSpec, TargetSpec}, test_runner::{CargoTestHandle, TestTarget}, + try_default, }; pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { @@ -83,7 +84,8 @@ pub(crate) fn handle_analyzer_status( let mut file_id = None; if let Some(tdi) = params.text_document { match from_proto::file_id(&snap, &tdi.uri) { - Ok(it) => file_id = Some(it), + Ok(Some(it)) => file_id = Some(it), + Ok(None) => {} Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri), } } @@ -141,7 +143,7 @@ pub(crate) fn handle_view_syntax_tree( params: lsp_ext::ViewSyntaxTreeParams, ) -> anyhow::Result<String> { let _p = tracing::info_span!("handle_view_syntax_tree").entered(); - let id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let res = snap.analysis.view_syntax_tree(id)?; Ok(res) } @@ -151,7 +153,7 @@ pub(crate) fn handle_view_hir( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<String> { let _p = tracing::info_span!("handle_view_hir").entered(); - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let res = snap.analysis.view_hir(position)?; Ok(res) } @@ -161,7 +163,7 @@ pub(crate) fn handle_view_mir( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<String> { let _p = tracing::info_span!("handle_view_mir").entered(); - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let res = snap.analysis.view_mir(position)?; Ok(res) } @@ -171,7 +173,7 @@ pub(crate) fn handle_interpret_function( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<String> { let _p = tracing::info_span!("handle_interpret_function").entered(); - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let res = snap.analysis.interpret_function(position)?; Ok(res) } @@ -180,7 +182,7 @@ pub(crate) fn handle_view_file_text( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentIdentifier, ) -> anyhow::Result<String> { - let file_id = from_proto::file_id(&snap, ¶ms.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.uri)?); Ok(snap.analysis.file_text(file_id)?.to_string()) } @@ -189,7 +191,7 @@ pub(crate) fn handle_view_item_tree( params: lsp_ext::ViewItemTreeParams, ) -> anyhow::Result<String> { let _p = tracing::info_span!("handle_view_item_tree").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let res = snap.analysis.view_item_tree(file_id)?; Ok(res) } @@ -315,7 +317,7 @@ pub(crate) fn handle_expand_macro( params: lsp_ext::ExpandMacroParams, ) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> { let _p = tracing::info_span!("handle_expand_macro").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let offset = from_proto::offset(&line_index, params.position)?; @@ -328,7 +330,7 @@ pub(crate) fn handle_selection_range( params: lsp_types::SelectionRangeParams, ) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> { let _p = tracing::info_span!("handle_selection_range").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params .positions @@ -371,7 +373,7 @@ pub(crate) fn handle_matching_brace( params: lsp_ext::MatchingBraceParams, ) -> anyhow::Result<Vec<Position>> { let _p = tracing::info_span!("handle_matching_brace").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; params .positions @@ -395,7 +397,7 @@ pub(crate) fn handle_join_lines( ) -> anyhow::Result<Vec<lsp_types::TextEdit>> { let _p = tracing::info_span!("handle_join_lines").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let config = snap.config.join_lines(); let line_index = snap.file_line_index(file_id)?; @@ -419,7 +421,7 @@ pub(crate) fn handle_on_enter( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> { let _p = tracing::info_span!("handle_on_enter").entered(); - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let edit = match snap.analysis.on_enter(position)? { None => return Ok(None), Some(it) => it, @@ -439,7 +441,8 @@ pub(crate) fn handle_on_type_formatting( return Ok(None); } - let mut position = from_proto::file_position(&snap, params.text_document_position)?; + let mut position = + try_default!(from_proto::file_position(&snap, params.text_document_position)?); let line_index = snap.file_line_index(position.file_id)?; // in `ide`, the `on_type` invariant is that @@ -465,32 +468,33 @@ pub(crate) fn handle_on_type_formatting( Ok(Some(change)) } +pub(crate) fn empty_diagnostic_report() -> lsp_types::DocumentDiagnosticReportResult { + lsp_types::DocumentDiagnosticReportResult::Report(lsp_types::DocumentDiagnosticReport::Full( + lsp_types::RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { + result_id: Some("rust-analyzer".to_owned()), + items: vec![], + }, + }, + )) +} + pub(crate) fn handle_document_diagnostics( snap: GlobalStateSnapshot, params: lsp_types::DocumentDiagnosticParams, ) -> anyhow::Result<lsp_types::DocumentDiagnosticReportResult> { - let empty = || { - lsp_types::DocumentDiagnosticReportResult::Report( - lsp_types::DocumentDiagnosticReport::Full( - lsp_types::RelatedFullDocumentDiagnosticReport { - related_documents: None, - full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { - result_id: Some("rust-analyzer".to_owned()), - items: vec![], - }, - }, - ), - ) + let file_id = match from_proto::file_id(&snap, ¶ms.text_document.uri)? { + Some(it) => it, + None => return Ok(empty_diagnostic_report()), }; - - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let source_root = snap.analysis.source_root_id(file_id)?; if !snap.analysis.is_local_source_root(source_root)? { - return Ok(empty()); + return Ok(empty_diagnostic_report()); } let config = snap.config.diagnostics(Some(source_root)); if !config.enabled { - return Ok(empty()); + return Ok(empty_diagnostic_report()); } let line_index = snap.file_line_index(file_id)?; let supports_related = snap.config.text_document_diagnostic_related_document_support(); @@ -546,7 +550,7 @@ pub(crate) fn handle_document_symbol( params: lsp_types::DocumentSymbolParams, ) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> { let _p = tracing::info_span!("handle_document_symbol").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new(); @@ -760,7 +764,7 @@ pub(crate) fn handle_will_rename_files( } }) .filter_map(|(file_id, new_name)| { - snap.analysis.will_rename_file(file_id, &new_name).ok()? + snap.analysis.will_rename_file(file_id?, &new_name).ok()? }) .collect(); @@ -782,7 +786,8 @@ pub(crate) fn handle_goto_definition( params: lsp_types::GotoDefinitionParams, ) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> { let _p = tracing::info_span!("handle_goto_definition").entered(); - let position = from_proto::file_position(&snap, params.text_document_position_params)?; + let position = + try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.goto_definition(position)? { None => return Ok(None), Some(it) => it, @@ -797,7 +802,10 @@ pub(crate) fn handle_goto_declaration( params: lsp_types::request::GotoDeclarationParams, ) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> { let _p = tracing::info_span!("handle_goto_declaration").entered(); - let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?; + let position = try_default!(from_proto::file_position( + &snap, + params.text_document_position_params.clone() + )?); let nav_info = match snap.analysis.goto_declaration(position)? { None => return handle_goto_definition(snap, params), Some(it) => it, @@ -812,7 +820,8 @@ pub(crate) fn handle_goto_implementation( params: lsp_types::request::GotoImplementationParams, ) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> { let _p = tracing::info_span!("handle_goto_implementation").entered(); - let position = from_proto::file_position(&snap, params.text_document_position_params)?; + let position = + try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.goto_implementation(position)? { None => return Ok(None), Some(it) => it, @@ -827,7 +836,8 @@ pub(crate) fn handle_goto_type_definition( params: lsp_types::request::GotoTypeDefinitionParams, ) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> { let _p = tracing::info_span!("handle_goto_type_definition").entered(); - let position = from_proto::file_position(&snap, params.text_document_position_params)?; + let position = + try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.goto_type_definition(position)? { None => return Ok(None), Some(it) => it, @@ -880,7 +890,7 @@ pub(crate) fn handle_parent_module( } // check if invoked at the crate root - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let crate_id = match snap.analysis.crates_for(file_id)?.first() { Some(&crate_id) => crate_id, None => return Ok(None), @@ -904,7 +914,7 @@ pub(crate) fn handle_parent_module( } // locate parent module by semantics - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let navs = snap.analysis.parent_module(position)?; let res = to_proto::goto_definition_response(&snap, None, navs)?; Ok(Some(res)) @@ -915,7 +925,7 @@ pub(crate) fn handle_runnables( params: lsp_ext::RunnablesParams, ) -> anyhow::Result<Vec<lsp_ext::Runnable>> { let _p = tracing::info_span!("handle_runnables").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let source_root = snap.analysis.source_root_id(file_id).ok(); let line_index = snap.file_line_index(file_id)?; let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok()); @@ -1035,7 +1045,7 @@ pub(crate) fn handle_related_tests( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<Vec<lsp_ext::TestInfo>> { let _p = tracing::info_span!("handle_related_tests").entered(); - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let tests = snap.analysis.related_tests(position, None)?; let mut res = Vec::new(); @@ -1053,7 +1063,8 @@ pub(crate) fn handle_completion( lsp_types::CompletionParams { text_document_position, context,.. }: lsp_types::CompletionParams, ) -> anyhow::Result<Option<lsp_types::CompletionResponse>> { let _p = tracing::info_span!("handle_completion").entered(); - let mut position = from_proto::file_position(&snap, text_document_position.clone())?; + let mut position = + try_default!(from_proto::file_position(&snap, text_document_position.clone())?); let line_index = snap.file_line_index(position.file_id)?; let completion_trigger_character = context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next()); @@ -1102,7 +1113,8 @@ pub(crate) fn handle_completion_resolve( let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?; - let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?; + let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)? + .expect("we never provide completions for excluded files"); let line_index = snap.file_line_index(file_id)?; // FIXME: We should fix up the position when retrying the cancelled request instead let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else { @@ -1185,7 +1197,7 @@ pub(crate) fn handle_folding_range( params: FoldingRangeParams, ) -> anyhow::Result<Option<Vec<FoldingRange>>> { let _p = tracing::info_span!("handle_folding_range").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let folds = snap.analysis.folding_ranges(file_id)?; let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; @@ -1202,7 +1214,8 @@ pub(crate) fn handle_signature_help( params: lsp_types::SignatureHelpParams, ) -> anyhow::Result<Option<lsp_types::SignatureHelp>> { let _p = tracing::info_span!("handle_signature_help").entered(); - let position = from_proto::file_position(&snap, params.text_document_position_params)?; + let position = + try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); let help = match snap.analysis.signature_help(position)? { Some(it) => it, None => return Ok(None), @@ -1221,7 +1234,7 @@ pub(crate) fn handle_hover( PositionOrRange::Position(position) => Range::new(position, position), PositionOrRange::Range(range) => range, }; - let file_range = from_proto::file_range(&snap, ¶ms.text_document, range)?; + let file_range = try_default!(from_proto::file_range(&snap, ¶ms.text_document, range)?); let hover = snap.config.hover(); let info = match snap.analysis.hover(&hover, file_range)? { @@ -1255,7 +1268,7 @@ pub(crate) fn handle_prepare_rename( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<Option<PrepareRenameResponse>> { let _p = tracing::info_span!("handle_prepare_rename").entered(); - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?; @@ -1269,7 +1282,7 @@ pub(crate) fn handle_rename( params: RenameParams, ) -> anyhow::Result<Option<WorkspaceEdit>> { let _p = tracing::info_span!("handle_rename").entered(); - let position = from_proto::file_position(&snap, params.text_document_position)?; + let position = try_default!(from_proto::file_position(&snap, params.text_document_position)?); let mut change = snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?; @@ -1304,7 +1317,7 @@ pub(crate) fn handle_references( params: lsp_types::ReferenceParams, ) -> anyhow::Result<Option<Vec<Location>>> { let _p = tracing::info_span!("handle_references").entered(); - let position = from_proto::file_position(&snap, params.text_document_position)?; + let position = try_default!(from_proto::file_position(&snap, params.text_document_position)?); let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_tests = snap.config.find_all_refs_exclude_tests(); @@ -1375,9 +1388,9 @@ pub(crate) fn handle_code_action( return Ok(None); } - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; - let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?; + let frange = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?); let source_root = snap.analysis.source_root_id(file_id)?; let mut assists_config = snap.config.assist(Some(source_root)); @@ -1455,7 +1468,8 @@ pub(crate) fn handle_code_action_resolve( return Err(invalid_params_error("code action without data".to_owned()).into()); }; - let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?; + let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)? + .expect("we never provide code actions for excluded files"); if snap.file_version(file_id) != params.version { return Err(invalid_params_error("stale code action".to_owned()).into()); } @@ -1551,7 +1565,7 @@ pub(crate) fn handle_code_lens( return Ok(Some(Vec::default())); } - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let target_spec = TargetSpec::for_file(&snap, file_id)?; let annotations = snap.analysis.annotations( @@ -1613,7 +1627,8 @@ pub(crate) fn handle_document_highlight( params: lsp_types::DocumentHighlightParams, ) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> { let _p = tracing::info_span!("handle_document_highlight").entered(); - let position = from_proto::file_position(&snap, params.text_document_position_params)?; + let position = + try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); let line_index = snap.file_line_index(position.file_id)?; let source_root = snap.analysis.source_root_id(position.file_id)?; @@ -1639,12 +1654,12 @@ pub(crate) fn handle_ssr( params: lsp_ext::SsrParams, ) -> anyhow::Result<lsp_types::WorkspaceEdit> { let _p = tracing::info_span!("handle_ssr").entered(); - let selections = params + let selections = try_default!(params .selections .iter() .map(|range| from_proto::file_range(&snap, ¶ms.position.text_document, *range)) - .collect::<Result<Vec<_>, _>>()?; - let position = from_proto::file_position(&snap, params.position)?; + .collect::<Result<Option<Vec<_>>, _>>()?); + let position = try_default!(from_proto::file_position(&snap, params.position)?); let source_change = snap.analysis.structural_search_replace( ¶ms.query, params.parse_only, @@ -1660,11 +1675,11 @@ pub(crate) fn handle_inlay_hints( ) -> anyhow::Result<Option<Vec<InlayHint>>> { let _p = tracing::info_span!("handle_inlay_hints").entered(); let document_uri = ¶ms.text_document.uri; - let FileRange { file_id, range } = from_proto::file_range( + let FileRange { file_id, range } = try_default!(from_proto::file_range( &snap, &TextDocumentIdentifier::new(document_uri.to_owned()), params.range, - )?; + )?); let line_index = snap.file_line_index(file_id)?; let range = TextRange::new( range.start().min(line_index.index.len()), @@ -1744,7 +1759,8 @@ pub(crate) fn handle_call_hierarchy_prepare( params: CallHierarchyPrepareParams, ) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> { let _p = tracing::info_span!("handle_call_hierarchy_prepare").entered(); - let position = from_proto::file_position(&snap, params.text_document_position_params)?; + let position = + try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.call_hierarchy(position)? { None => return Ok(None), @@ -1769,7 +1785,7 @@ pub(crate) fn handle_call_hierarchy_incoming( let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); - let frange = from_proto::file_range(&snap, &doc, item.selection_range)?; + let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?); let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; let config = snap.config.call_hierarchy(); @@ -1807,7 +1823,7 @@ pub(crate) fn handle_call_hierarchy_outgoing( let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); - let frange = from_proto::file_range(&snap, &doc, item.selection_range)?; + let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?); let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; let line_index = snap.file_line_index(fpos.file_id)?; @@ -1842,7 +1858,7 @@ pub(crate) fn handle_semantic_tokens_full( ) -> anyhow::Result<Option<SemanticTokensResult>> { let _p = tracing::info_span!("handle_semantic_tokens_full").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; @@ -1872,7 +1888,7 @@ pub(crate) fn handle_semantic_tokens_full_delta( ) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> { let _p = tracing::info_span!("handle_semantic_tokens_full_delta").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; @@ -1915,7 +1931,7 @@ pub(crate) fn handle_semantic_tokens_range( ) -> anyhow::Result<Option<SemanticTokensRangeResult>> { let _p = tracing::info_span!("handle_semantic_tokens_range").entered(); - let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?; + let frange = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?); let text = snap.analysis.file_text(frange.file_id)?; let line_index = snap.file_line_index(frange.file_id)?; @@ -1940,7 +1956,7 @@ pub(crate) fn handle_open_docs( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<ExternalDocsResponse> { let _p = tracing::info_span!("handle_open_docs").entered(); - let position = from_proto::file_position(&snap, params)?; + let position = try_default!(from_proto::file_position(&snap, params)?); let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind { ProjectWorkspaceKind::Cargo { cargo, .. } @@ -1982,7 +1998,7 @@ pub(crate) fn handle_open_cargo_toml( params: lsp_ext::OpenCargoTomlParams, ) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> { let _p = tracing::info_span!("handle_open_cargo_toml").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let cargo_spec = match TargetSpec::for_file(&snap, file_id)? { Some(TargetSpec::Cargo(it)) => it, @@ -2000,8 +2016,8 @@ pub(crate) fn handle_move_item( params: lsp_ext::MoveItemParams, ) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> { let _p = tracing::info_span!("handle_move_item").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; - let range = from_proto::file_range(&snap, ¶ms.text_document, params.range)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let range = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?); let direction = match params.direction { lsp_ext::MoveItemDirection::Up => ide::Direction::Up, @@ -2022,7 +2038,7 @@ pub(crate) fn handle_view_recursive_memory_layout( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> { let _p = tracing::info_span!("handle_view_recursive_memory_layout").entered(); - let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let offset = from_proto::offset(&line_index, params.position)?; @@ -2210,7 +2226,7 @@ fn run_rustfmt( text_document: TextDocumentIdentifier, range: Option<lsp_types::Range>, ) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> { - let file_id = from_proto::file_id(snap, &text_document.uri)?; + let file_id = try_default!(from_proto::file_id(snap, &text_document.uri)?); let file = snap.analysis.file_text(file_id)?; // Determine the edition of the crate the file belongs to (if there's multiple, we pick the @@ -2275,7 +2291,7 @@ fn run_rustfmt( .into()); } - let frange = from_proto::file_range(snap, &text_document, range)?; + let frange = try_default!(from_proto::file_range(snap, &text_document, range)?); let start_line = line_index.index.line_col(frange.range.start()).line; let end_line = line_index.index.line_col(frange.range.end()).line; @@ -2284,7 +2300,8 @@ fn run_rustfmt( cmd.arg( json!([{ "file": "stdin", - "range": [start_line, end_line] + // LineCol is 0-based, but rustfmt is 1-based. + "range": [start_line + 1, end_line + 1] }]) .to_string(), ); @@ -2318,18 +2335,21 @@ fn run_rustfmt( } }; - tracing::debug!(?command, "created format command"); + let output = { + let _p = tracing::info_span!("rustfmt", ?command).entered(); - let mut rustfmt = command - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn() - .context(format!("Failed to spawn {command:?}"))?; + let mut rustfmt = command + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context(format!("Failed to spawn {command:?}"))?; - rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; + rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; + + rustfmt.wait_with_output()? + }; - let output = rustfmt.wait_with_output()?; let captured_stdout = String::from_utf8(output.stdout)?; let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default(); @@ -2413,15 +2433,15 @@ pub(crate) fn internal_testing_fetch_config( state: GlobalStateSnapshot, params: InternalTestingFetchConfigParams, ) -> anyhow::Result<Option<InternalTestingFetchConfigResponse>> { - let source_root = params - .text_document - .map(|it| { + let source_root = match params.text_document { + Some(it) => Some( state .analysis - .source_root_id(from_proto::file_id(&state, &it.uri)?) - .map_err(anyhow::Error::from) - }) - .transpose()?; + .source_root_id(try_default!(from_proto::file_id(&state, &it.uri)?)) + .map_err(anyhow::Error::from)?, + ), + None => None, + }; Ok(Some(match params.config { InternalTestingFetchConfigOption::AssistEmitMustUse => { InternalTestingFetchConfigResponse::AssistEmitMustUse( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index 5cdc51a1c19..c6aa8ba1707 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -25,6 +25,14 @@ use vfs::{AbsPathBuf, VfsPath}; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; +#[track_caller] +fn file_id(vfs: &vfs::Vfs, path: &VfsPath) -> vfs::FileId { + match vfs.file_id(path) { + Some((file_id, vfs::FileExcluded::No)) => file_id, + None | Some((_, vfs::FileExcluded::Yes)) => panic!("can't find virtual file for {path}"), + } +} + #[test] fn integrated_highlighting_benchmark() { if std::env::var("RUN_SLOW_BENCHES").is_err() { @@ -62,7 +70,7 @@ fn integrated_highlighting_benchmark() { let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); - vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) + file_id(&vfs, &path) }; { @@ -130,7 +138,7 @@ fn integrated_completion_benchmark() { let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); - vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) + file_id(&vfs, &path) }; // kick off parsing and index population @@ -324,7 +332,7 @@ fn integrated_diagnostics_benchmark() { let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); - vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) + file_id(&vfs, &path) }; let diagnostics_config = DiagnosticsConfig { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index ccffa7a671e..27d6225cdb7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -50,7 +50,7 @@ mod integrated_benchmarks; use hir::Mutability; use ide::{CompletionItem, CompletionItemRefMode, CompletionRelevance}; use serde::de::DeserializeOwned; -use tenthash::TentHasher; +use tenthash::TentHash; pub use crate::{ lsp::capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, @@ -66,7 +66,7 @@ pub fn from_json<T: DeserializeOwned>( } fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8; 20] { - fn hash_completion_relevance(hasher: &mut TentHasher, relevance: &CompletionRelevance) { + fn hash_completion_relevance(hasher: &mut TentHash, relevance: &CompletionRelevance) { use ide_completion::{ CompletionRelevancePostfixMatch, CompletionRelevanceReturnType, CompletionRelevanceTypeMatch, @@ -79,71 +79,108 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8; u8::from(relevance.requires_import), u8::from(relevance.is_private_editable), ]); - if let Some(type_match) = &relevance.type_match { - let label = match type_match { - CompletionRelevanceTypeMatch::CouldUnify => "could_unify", - CompletionRelevanceTypeMatch::Exact => "exact", - }; - hasher.update(label); + + match relevance.type_match { + None => hasher.update([0u8]), + Some(CompletionRelevanceTypeMatch::CouldUnify) => hasher.update([1u8]), + Some(CompletionRelevanceTypeMatch::Exact) => hasher.update([2u8]), } + + hasher.update([u8::from(relevance.trait_.is_some())]); if let Some(trait_) = &relevance.trait_ { hasher.update([u8::from(trait_.is_op_method), u8::from(trait_.notable_trait)]); } - if let Some(postfix_match) = &relevance.postfix_match { - let label = match postfix_match { - CompletionRelevancePostfixMatch::NonExact => "non_exact", - CompletionRelevancePostfixMatch::Exact => "exact", - }; - hasher.update(label); + + match relevance.postfix_match { + None => hasher.update([0u8]), + Some(CompletionRelevancePostfixMatch::NonExact) => hasher.update([1u8]), + Some(CompletionRelevancePostfixMatch::Exact) => hasher.update([2u8]), } + + hasher.update([u8::from(relevance.function.is_some())]); if let Some(function) = &relevance.function { hasher.update([u8::from(function.has_params), u8::from(function.has_self_param)]); - let label = match function.return_type { - CompletionRelevanceReturnType::Other => "other", - CompletionRelevanceReturnType::DirectConstructor => "direct_constructor", - CompletionRelevanceReturnType::Constructor => "constructor", - CompletionRelevanceReturnType::Builder => "builder", + let discriminant: u8 = match function.return_type { + CompletionRelevanceReturnType::Other => 0, + CompletionRelevanceReturnType::DirectConstructor => 1, + CompletionRelevanceReturnType::Constructor => 2, + CompletionRelevanceReturnType::Builder => 3, }; - hasher.update(label); + hasher.update([discriminant]); } } - let mut hasher = TentHasher::new(); + let mut hasher = TentHash::new(); hasher.update([ u8::from(is_ref_completion), u8::from(item.is_snippet), u8::from(item.deprecated), u8::from(item.trigger_call_info), ]); + + hasher.update(item.label.primary.len().to_ne_bytes()); hasher.update(&item.label.primary); + + hasher.update([u8::from(item.label.detail_left.is_some())]); if let Some(label_detail) = &item.label.detail_left { + hasher.update(label_detail.len().to_ne_bytes()); hasher.update(label_detail); } + + hasher.update([u8::from(item.label.detail_right.is_some())]); if let Some(label_detail) = &item.label.detail_right { + hasher.update(label_detail.len().to_ne_bytes()); hasher.update(label_detail); } + // NB: do not hash edits or source range, as those may change between the time the client sends the resolve request // and the time it receives it: some editors do allow changing the buffer between that, leading to ranges being different. // // Documentation hashing is skipped too, as it's a large blob to process, // while not really making completion properties more unique as they are already. - hasher.update(item.kind.tag()); + + let kind_tag = item.kind.tag(); + hasher.update(kind_tag.len().to_ne_bytes()); + hasher.update(kind_tag); + + hasher.update(item.lookup.len().to_ne_bytes()); hasher.update(&item.lookup); + + hasher.update([u8::from(item.detail.is_some())]); if let Some(detail) = &item.detail { + hasher.update(detail.len().to_ne_bytes()); hasher.update(detail); } + hash_completion_relevance(&mut hasher, &item.relevance); + + hasher.update([u8::from(item.ref_match.is_some())]); if let Some((ref_mode, text_size)) = &item.ref_match { - let prefix = match ref_mode { - CompletionItemRefMode::Reference(Mutability::Shared) => "&", - CompletionItemRefMode::Reference(Mutability::Mut) => "&mut ", - CompletionItemRefMode::Dereference => "*", + let discriminant = match ref_mode { + CompletionItemRefMode::Reference(Mutability::Shared) => 0u8, + CompletionItemRefMode::Reference(Mutability::Mut) => 1u8, + CompletionItemRefMode::Dereference => 2u8, }; - hasher.update(prefix); - hasher.update(u32::from(*text_size).to_le_bytes()); + hasher.update([discriminant]); + hasher.update(u32::from(*text_size).to_ne_bytes()); } + + hasher.update(item.import_to_add.len().to_ne_bytes()); for import_path in &item.import_to_add { + hasher.update(import_path.len().to_ne_bytes()); hasher.update(import_path); } + hasher.finalize() } + +#[doc(hidden)] +macro_rules! try_default_ { + ($it:expr $(,)?) => { + match $it { + Some(it) => it, + None => return Ok(Default::default()), + } + }; +} +pub(crate) use try_default_ as try_default; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs index 47e9961cf13..6375a1a054b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs @@ -9,7 +9,7 @@ use vfs::AbsPathBuf; use crate::{ global_state::GlobalStateSnapshot, line_index::{LineIndex, PositionEncoding}, - lsp_ext, + lsp_ext, try_default, }; pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> { @@ -61,37 +61,44 @@ pub(crate) fn text_range( } } -pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> anyhow::Result<FileId> { +/// Returns `None` if the file was excluded. +pub(crate) fn file_id( + snap: &GlobalStateSnapshot, + url: &lsp_types::Url, +) -> anyhow::Result<Option<FileId>> { snap.url_to_file_id(url) } +/// Returns `None` if the file was excluded. pub(crate) fn file_position( snap: &GlobalStateSnapshot, tdpp: lsp_types::TextDocumentPositionParams, -) -> anyhow::Result<FilePosition> { - let file_id = file_id(snap, &tdpp.text_document.uri)?; +) -> anyhow::Result<Option<FilePosition>> { + let file_id = try_default!(file_id(snap, &tdpp.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let offset = offset(&line_index, tdpp.position)?; - Ok(FilePosition { file_id, offset }) + Ok(Some(FilePosition { file_id, offset })) } +/// Returns `None` if the file was excluded. pub(crate) fn file_range( snap: &GlobalStateSnapshot, text_document_identifier: &lsp_types::TextDocumentIdentifier, range: lsp_types::Range, -) -> anyhow::Result<FileRange> { +) -> anyhow::Result<Option<FileRange>> { file_range_uri(snap, &text_document_identifier.uri, range) } +/// Returns `None` if the file was excluded. pub(crate) fn file_range_uri( snap: &GlobalStateSnapshot, document: &lsp_types::Url, range: lsp_types::Range, -) -> anyhow::Result<FileRange> { - let file_id = file_id(snap, document)?; +) -> anyhow::Result<Option<FileRange>> { + let file_id = try_default!(file_id(snap, document)?); let line_index = snap.file_line_index(file_id)?; let range = text_range(&line_index, range)?; - Ok(FileRange { file_id, range }) + Ok(Some(FileRange { file_id, range })) } pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> { @@ -108,6 +115,7 @@ pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> Some(assist_kind) } +/// Returns `None` if the file was excluded. pub(crate) fn annotation( snap: &GlobalStateSnapshot, range: lsp_types::Range, @@ -121,7 +129,7 @@ pub(crate) fn annotation( return Ok(None); } let pos @ FilePosition { file_id, .. } = - file_position(snap, params.text_document_position_params)?; + try_default!(file_position(snap, params.text_document_position_params)?); let line_index = snap.file_line_index(file_id)?; Ok(Annotation { @@ -133,7 +141,7 @@ pub(crate) fn annotation( if snap.url_file_version(¶ms.text_document.uri) != Some(data.version) { return Ok(None); } - let pos @ FilePosition { file_id, .. } = file_position(snap, params)?; + let pos @ FilePosition { file_id, .. } = try_default!(file_position(snap, params)?); let line_index = snap.file_line_index(file_id)?; Ok(Annotation { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs index 991c10743f7..3c21e199252 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs @@ -24,7 +24,7 @@ macro_rules! define_semantic_token_types { } pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[ - $(SemanticTokenType::$standard,)* + $(self::types::$standard,)* $(self::types::$custom),* ]; @@ -32,7 +32,7 @@ macro_rules! define_semantic_token_types { use self::types::*; $( if token == $custom { - None $(.or(Some(SemanticTokenType::$fallback)))? + None $(.or(Some(self::types::$fallback)))? } else )* { Some(token )} @@ -60,6 +60,7 @@ define_semantic_token_types![ STRUCT, TYPE_PARAMETER, VARIABLE, + TYPE, } custom { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index d6dc8b521fd..f5d9469f262 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -27,7 +27,10 @@ use crate::{ FetchWorkspaceResponse, GlobalState, }, hack_recover_crate_name, - handlers::dispatch::{NotificationDispatcher, RequestDispatcher}, + handlers::{ + dispatch::{NotificationDispatcher, RequestDispatcher}, + request::empty_diagnostic_report, + }, lsp::{ from_proto, to_proto, utils::{notification_is, Progress}, @@ -253,6 +256,11 @@ impl GlobalState { &self, inbox: &Receiver<lsp_server::Message>, ) -> Result<Option<Event>, crossbeam_channel::RecvError> { + // Make sure we reply to formatting requests ASAP so the editor doesn't block + if let Ok(task) = self.fmt_pool.receiver.try_recv() { + return Ok(Some(Event::Task(task))); + } + select! { recv(inbox) -> msg => return Ok(msg.ok().map(Event::Lsp)), @@ -320,26 +328,30 @@ impl GlobalState { } for progress in prime_caches_progress { - let (state, message, fraction); + let (state, message, fraction, title); match progress { PrimeCachesProgress::Begin => { state = Progress::Begin; message = None; fraction = 0.0; + title = "Indexing"; } PrimeCachesProgress::Report(report) => { state = Progress::Report; + title = report.work_type; - message = match &report.crates_currently_indexing[..] { + message = match &*report.crates_currently_indexing { [crate_name] => Some(format!( - "{}/{} ({crate_name})", - report.crates_done, report.crates_total + "{}/{} ({})", + report.crates_done, + report.crates_total, + crate_name.as_str(), )), [crate_name, rest @ ..] => Some(format!( "{}/{} ({} + {} more)", report.crates_done, report.crates_total, - crate_name, + crate_name.as_str(), rest.len() )), _ => None, @@ -351,6 +363,7 @@ impl GlobalState { state = Progress::End; message = None; fraction = 1.0; + title = "Indexing"; self.prime_caches_queue.op_completed(()); if cancelled { @@ -360,7 +373,13 @@ impl GlobalState { } }; - self.report_progress("Indexing", state, message, Some(fraction), None); + self.report_progress( + title, + state, + message, + Some(fraction), + Some("rustAnalyzer/cachePriming".to_owned()), + ); } } Event::Vfs(message) => { @@ -532,6 +551,9 @@ impl GlobalState { self.mem_docs .iter() .map(|path| vfs.file_id(path).unwrap()) + .filter_map(|(file_id, excluded)| { + (excluded == vfs::FileExcluded::No).then_some(file_id) + }) .filter(|&file_id| { let source_root = db.file_source_root(file_id); // Only publish diagnostics for files in the workspace, not from crates.io deps @@ -616,6 +638,9 @@ impl GlobalState { .mem_docs .iter() .map(|path| self.vfs.read().0.file_id(path).unwrap()) + .filter_map(|(file_id, excluded)| { + (excluded == vfs::FileExcluded::No).then_some(file_id) + }) .filter(|&file_id| { let source_root = db.file_source_root(file_id); !db.source_root(source_root).is_library @@ -863,7 +888,10 @@ impl GlobalState { self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| { let _p = tracing::info_span!("GlobalState::check_if_indexed").entered(); tracing::debug!(?uri, "handling uri"); - let id = from_proto::file_id(&snap, &uri).expect("unable to get FileId"); + let Some(id) = from_proto::file_id(&snap, &uri).expect("unable to get FileId") + else { + return; + }; if let Ok(crates) = &snap.analysis.crates_for(id) { if crates.is_empty() { if snap.config.discover_workspace_config().is_some() { @@ -971,13 +999,14 @@ impl GlobalState { ); for diag in diagnostics { match url_to_file_id(&self.vfs.read().0, &diag.url) { - Ok(file_id) => self.diagnostics.add_check_diagnostic( + Ok(Some(file_id)) => self.diagnostics.add_check_diagnostic( id, &package_id, file_id, diag.diagnostic, diag.fix, ), + Ok(None) => {} Err(err) => { error!( "flycheck {id}: File with cargo diagnostic not found in VFS: {}", @@ -1099,17 +1128,7 @@ impl GlobalState { .on_latency_sensitive::<NO_RETRY, lsp_request::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range) // FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change. // All other request handlers - .on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report( - lsp_types::DocumentDiagnosticReport::Full( - lsp_types::RelatedFullDocumentDiagnosticReport { - related_documents: None, - full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { - result_id: Some("rust-analyzer".to_owned()), - items: vec![], - }, - }, - ), - ), || lsp_server::ResponseError { + .on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, empty_diagnostic_report, || lsp_server::ResponseError { code: lsp_server::ErrorCode::ServerCancelled as i32, message: "server cancelled the request".to_owned(), data: serde_json::to_value(lsp_types::DiagnosticServerCancellationData { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 0add2cdf5a7..ba72ea35df6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -316,6 +316,7 @@ impl GlobalState { let workspace = project_model::ProjectWorkspace::load_inline( it.clone(), &cargo_config, + &progress, ); Ok(workspace) } @@ -701,12 +702,13 @@ impl GlobalState { let (crate_graph, proc_macro_paths, ws_data) = { // Create crate graph from all the workspaces - let vfs = &mut self.vfs.write().0; - + let vfs = &self.vfs.read().0; let load = |path: &AbsPath| { let vfs_path = vfs::VfsPath::from(path.to_path_buf()); self.crate_graph_file_dependencies.insert(vfs_path.clone()); - vfs.file_id(&vfs_path) + vfs.file_id(&vfs_path).and_then(|(file_id, excluded)| { + (excluded == vfs::FileExcluded::No).then_some(file_id) + }) }; ws_to_crate_graph(&self.workspaces, self.config.extra_env(None), load) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs index 2bcd8505e81..c5de69bb9fc 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs @@ -1,6 +1,8 @@ //! A thin wrapper around [`stdx::thread::Pool`] which threads a sender through spawned jobs. //! It is used in [`crate::global_state::GlobalState`] throughout the main loop. +use std::panic::UnwindSafe; + use crossbeam_channel::Sender; use stdx::thread::{Pool, ThreadIntent}; @@ -18,7 +20,7 @@ impl<T> TaskPool<T> { pub(crate) fn spawn<F>(&mut self, intent: ThreadIntent, task: F) where - F: FnOnce() -> T + Send + 'static, + F: FnOnce() -> T + Send + UnwindSafe + 'static, T: Send + 'static, { self.pool.spawn(intent, { @@ -29,7 +31,7 @@ impl<T> TaskPool<T> { pub(crate) fn spawn_with_sender<F>(&mut self, intent: ThreadIntent, task: F) where - F: FnOnce(Sender<T>) + Send + 'static, + F: FnOnce(Sender<T>) + Send + UnwindSafe + 'static, T: Send + 'static, { self.pool.spawn(intent, { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/cli.rs index fba54666912..4ef930e9854 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/cli.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/cli.rs @@ -43,89 +43,93 @@ mod tests { expect![[r#" {"id":2,"type":"vertex","label":"foldingRangeResult","result":[{"startLine":2,"startCharacter":43,"endLine":6,"endCharacter":1},{"startLine":3,"startCharacter":19,"endLine":5,"endCharacter":5},{"startLine":9,"startCharacter":10,"endLine":12,"endCharacter":1}]} {"id":3,"type":"edge","label":"textDocument/foldingRange","inV":2,"outV":1} - {"id":4,"type":"vertex","label":"range","start":{"line":0,"character":3},"end":{"line":0,"character":8}} + {"id":4,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":13,"character":0}} {"id":5,"type":"vertex","label":"resultSet"} {"id":6,"type":"edge","label":"next","inV":5,"outV":4} - {"id":7,"type":"vertex","label":"range","start":{"line":2,"character":13},"end":{"line":2,"character":43}} + {"id":7,"type":"vertex","label":"range","start":{"line":0,"character":3},"end":{"line":0,"character":8}} {"id":8,"type":"vertex","label":"resultSet"} {"id":9,"type":"edge","label":"next","inV":8,"outV":7} - {"id":10,"type":"vertex","label":"range","start":{"line":8,"character":0},"end":{"line":8,"character":30}} - {"id":11,"type":"edge","label":"next","inV":8,"outV":10} - {"id":12,"type":"vertex","label":"range","start":{"line":8,"character":32},"end":{"line":8,"character":39}} - {"id":13,"type":"vertex","label":"resultSet"} - {"id":14,"type":"edge","label":"next","inV":13,"outV":12} - {"id":15,"type":"vertex","label":"range","start":{"line":9,"character":4},"end":{"line":9,"character":9}} + {"id":10,"type":"vertex","label":"range","start":{"line":2,"character":13},"end":{"line":2,"character":43}} + {"id":11,"type":"vertex","label":"resultSet"} + {"id":12,"type":"edge","label":"next","inV":11,"outV":10} + {"id":13,"type":"vertex","label":"range","start":{"line":8,"character":0},"end":{"line":8,"character":30}} + {"id":14,"type":"edge","label":"next","inV":11,"outV":13} + {"id":15,"type":"vertex","label":"range","start":{"line":8,"character":32},"end":{"line":8,"character":39}} {"id":16,"type":"vertex","label":"resultSet"} {"id":17,"type":"edge","label":"next","inV":16,"outV":15} - {"id":18,"type":"vertex","label":"range","start":{"line":10,"character":8},"end":{"line":10,"character":13}} + {"id":18,"type":"vertex","label":"range","start":{"line":9,"character":4},"end":{"line":9,"character":9}} {"id":19,"type":"vertex","label":"resultSet"} {"id":20,"type":"edge","label":"next","inV":19,"outV":18} - {"id":21,"type":"vertex","label":"range","start":{"line":11,"character":4},"end":{"line":11,"character":34}} - {"id":22,"type":"edge","label":"next","inV":8,"outV":21} - {"id":23,"type":"vertex","label":"range","start":{"line":11,"character":36},"end":{"line":11,"character":43}} - {"id":24,"type":"vertex","label":"resultSet"} - {"id":25,"type":"edge","label":"next","inV":24,"outV":23} - {"id":26,"type":"edge","label":"contains","inVs":[4,7,10,12,15,18,21,23],"outV":1} - {"id":27,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\n#[allow]\n```\n\n---\n\nValid forms are:\n\n* \\#\\[allow(lint1, lint2, ..., /\\*opt\\*/ reason = \"...\")\\]"}}} - {"id":28,"type":"edge","label":"textDocument/hover","inV":27,"outV":5} - {"id":29,"type":"vertex","label":"referenceResult"} - {"id":30,"type":"edge","label":"textDocument/references","inV":29,"outV":5} - {"id":31,"type":"edge","label":"item","document":1,"property":"references","inVs":[4],"outV":29} - {"id":32,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmacro_rules! generate_const_from_identifier\n```"}}} - {"id":33,"type":"edge","label":"textDocument/hover","inV":32,"outV":8} - {"id":34,"type":"vertex","label":"packageInformation","name":"foo","manager":"cargo","version":"0.0.0"} - {"id":35,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::generate_const_from_identifier","unique":"scheme","kind":"export"} - {"id":36,"type":"edge","label":"packageInformation","inV":34,"outV":35} - {"id":37,"type":"edge","label":"moniker","inV":35,"outV":8} - {"id":38,"type":"vertex","label":"definitionResult"} - {"id":39,"type":"edge","label":"item","document":1,"inVs":[7],"outV":38} - {"id":40,"type":"edge","label":"textDocument/definition","inV":38,"outV":8} - {"id":41,"type":"vertex","label":"referenceResult"} - {"id":42,"type":"edge","label":"textDocument/references","inV":41,"outV":8} - {"id":43,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[7],"outV":41} - {"id":44,"type":"edge","label":"item","document":1,"property":"references","inVs":[10,21],"outV":41} - {"id":45,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nconst REQ_001: &str = \"encoded_data\"\n```"}}} - {"id":46,"type":"edge","label":"textDocument/hover","inV":45,"outV":13} - {"id":47,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::REQ_001","unique":"scheme","kind":"export"} - {"id":48,"type":"edge","label":"packageInformation","inV":34,"outV":47} - {"id":49,"type":"edge","label":"moniker","inV":47,"outV":13} - {"id":50,"type":"vertex","label":"definitionResult"} - {"id":51,"type":"edge","label":"item","document":1,"inVs":[12],"outV":50} - {"id":52,"type":"edge","label":"textDocument/definition","inV":50,"outV":13} - {"id":53,"type":"vertex","label":"referenceResult"} - {"id":54,"type":"edge","label":"textDocument/references","inV":53,"outV":13} - {"id":55,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[12],"outV":53} - {"id":56,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmod tests\n```"}}} - {"id":57,"type":"edge","label":"textDocument/hover","inV":56,"outV":16} - {"id":58,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests","unique":"scheme","kind":"export"} - {"id":59,"type":"edge","label":"packageInformation","inV":34,"outV":58} - {"id":60,"type":"edge","label":"moniker","inV":58,"outV":16} - {"id":61,"type":"vertex","label":"definitionResult"} - {"id":62,"type":"edge","label":"item","document":1,"inVs":[15],"outV":61} - {"id":63,"type":"edge","label":"textDocument/definition","inV":61,"outV":16} - {"id":64,"type":"vertex","label":"referenceResult"} - {"id":65,"type":"edge","label":"textDocument/references","inV":64,"outV":16} - {"id":66,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[15],"outV":64} - {"id":67,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nextern crate foo\n```"}}} - {"id":68,"type":"edge","label":"textDocument/hover","inV":67,"outV":19} - {"id":69,"type":"vertex","label":"definitionResult"} - {"id":70,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":13,"character":0}} - {"id":71,"type":"edge","label":"contains","inVs":[70],"outV":1} - {"id":72,"type":"edge","label":"item","document":1,"inVs":[70],"outV":69} - {"id":73,"type":"edge","label":"textDocument/definition","inV":69,"outV":19} - {"id":74,"type":"vertex","label":"referenceResult"} - {"id":75,"type":"edge","label":"textDocument/references","inV":74,"outV":19} - {"id":76,"type":"edge","label":"item","document":1,"property":"references","inVs":[18],"outV":74} - {"id":77,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo::tests\n```\n\n```rust\nconst REQ_002: &str = \"encoded_data\"\n```"}}} - {"id":78,"type":"edge","label":"textDocument/hover","inV":77,"outV":24} - {"id":79,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests::REQ_002","unique":"scheme","kind":"export"} - {"id":80,"type":"edge","label":"packageInformation","inV":34,"outV":79} - {"id":81,"type":"edge","label":"moniker","inV":79,"outV":24} - {"id":82,"type":"vertex","label":"definitionResult"} - {"id":83,"type":"edge","label":"item","document":1,"inVs":[23],"outV":82} - {"id":84,"type":"edge","label":"textDocument/definition","inV":82,"outV":24} - {"id":85,"type":"vertex","label":"referenceResult"} - {"id":86,"type":"edge","label":"textDocument/references","inV":85,"outV":24} - {"id":87,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[23],"outV":85} + {"id":21,"type":"vertex","label":"range","start":{"line":10,"character":8},"end":{"line":10,"character":13}} + {"id":22,"type":"edge","label":"next","inV":5,"outV":21} + {"id":23,"type":"vertex","label":"range","start":{"line":11,"character":4},"end":{"line":11,"character":34}} + {"id":24,"type":"edge","label":"next","inV":11,"outV":23} + {"id":25,"type":"vertex","label":"range","start":{"line":11,"character":36},"end":{"line":11,"character":43}} + {"id":26,"type":"vertex","label":"resultSet"} + {"id":27,"type":"edge","label":"next","inV":26,"outV":25} + {"id":28,"type":"edge","label":"contains","inVs":[4,7,10,13,15,18,21,23,25],"outV":1} + {"id":29,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nextern crate foo\n```"}}} + {"id":30,"type":"edge","label":"textDocument/hover","inV":29,"outV":5} + {"id":31,"type":"vertex","label":"packageInformation","name":"foo","manager":"cargo","version":"0.0.0"} + {"id":32,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::crate","unique":"scheme","kind":"export"} + {"id":33,"type":"edge","label":"packageInformation","inV":31,"outV":32} + {"id":34,"type":"edge","label":"moniker","inV":32,"outV":5} + {"id":35,"type":"vertex","label":"definitionResult"} + {"id":36,"type":"edge","label":"item","document":1,"inVs":[4],"outV":35} + {"id":37,"type":"edge","label":"textDocument/definition","inV":35,"outV":5} + {"id":38,"type":"vertex","label":"referenceResult"} + {"id":39,"type":"edge","label":"textDocument/references","inV":38,"outV":5} + {"id":40,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[4],"outV":38} + {"id":41,"type":"edge","label":"item","document":1,"property":"references","inVs":[21],"outV":38} + {"id":42,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\n#[allow]\n```\n\n---\n\nValid forms are:\n\n* \\#\\[allow(lint1, lint2, ..., /\\*opt\\*/ reason = \"...\")\\]"}}} + {"id":43,"type":"edge","label":"textDocument/hover","inV":42,"outV":8} + {"id":44,"type":"vertex","label":"referenceResult"} + {"id":45,"type":"edge","label":"textDocument/references","inV":44,"outV":8} + {"id":46,"type":"edge","label":"item","document":1,"property":"references","inVs":[7],"outV":44} + {"id":47,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmacro_rules! generate_const_from_identifier\n```"}}} + {"id":48,"type":"edge","label":"textDocument/hover","inV":47,"outV":11} + {"id":49,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::generate_const_from_identifier","unique":"scheme","kind":"export"} + {"id":50,"type":"edge","label":"packageInformation","inV":31,"outV":49} + {"id":51,"type":"edge","label":"moniker","inV":49,"outV":11} + {"id":52,"type":"vertex","label":"definitionResult"} + {"id":53,"type":"edge","label":"item","document":1,"inVs":[10],"outV":52} + {"id":54,"type":"edge","label":"textDocument/definition","inV":52,"outV":11} + {"id":55,"type":"vertex","label":"referenceResult"} + {"id":56,"type":"edge","label":"textDocument/references","inV":55,"outV":11} + {"id":57,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[10],"outV":55} + {"id":58,"type":"edge","label":"item","document":1,"property":"references","inVs":[13,23],"outV":55} + {"id":59,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nconst REQ_001: &str = \"encoded_data\"\n```"}}} + {"id":60,"type":"edge","label":"textDocument/hover","inV":59,"outV":16} + {"id":61,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::REQ_001","unique":"scheme","kind":"export"} + {"id":62,"type":"edge","label":"packageInformation","inV":31,"outV":61} + {"id":63,"type":"edge","label":"moniker","inV":61,"outV":16} + {"id":64,"type":"vertex","label":"definitionResult"} + {"id":65,"type":"edge","label":"item","document":1,"inVs":[15],"outV":64} + {"id":66,"type":"edge","label":"textDocument/definition","inV":64,"outV":16} + {"id":67,"type":"vertex","label":"referenceResult"} + {"id":68,"type":"edge","label":"textDocument/references","inV":67,"outV":16} + {"id":69,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[15],"outV":67} + {"id":70,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmod tests\n```"}}} + {"id":71,"type":"edge","label":"textDocument/hover","inV":70,"outV":19} + {"id":72,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests","unique":"scheme","kind":"export"} + {"id":73,"type":"edge","label":"packageInformation","inV":31,"outV":72} + {"id":74,"type":"edge","label":"moniker","inV":72,"outV":19} + {"id":75,"type":"vertex","label":"definitionResult"} + {"id":76,"type":"edge","label":"item","document":1,"inVs":[18],"outV":75} + {"id":77,"type":"edge","label":"textDocument/definition","inV":75,"outV":19} + {"id":78,"type":"vertex","label":"referenceResult"} + {"id":79,"type":"edge","label":"textDocument/references","inV":78,"outV":19} + {"id":80,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[18],"outV":78} + {"id":81,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo::tests\n```\n\n```rust\nconst REQ_002: &str = \"encoded_data\"\n```"}}} + {"id":82,"type":"edge","label":"textDocument/hover","inV":81,"outV":26} + {"id":83,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests::REQ_002","unique":"scheme","kind":"export"} + {"id":84,"type":"edge","label":"packageInformation","inV":31,"outV":83} + {"id":85,"type":"edge","label":"moniker","inV":83,"outV":26} + {"id":86,"type":"vertex","label":"definitionResult"} + {"id":87,"type":"edge","label":"item","document":1,"inVs":[25],"outV":86} + {"id":88,"type":"edge","label":"textDocument/definition","inV":86,"outV":26} + {"id":89,"type":"vertex","label":"referenceResult"} + {"id":90,"type":"edge","label":"textDocument/references","inV":89,"outV":26} + {"id":91,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[25],"outV":89} "#]].assert_eq(stdout); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index 2b3c0a47a22..6f26bdc2cf0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -21,12 +21,14 @@ use lsp_types::{ notification::DidOpenTextDocument, request::{ CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest, - InlayHintRequest, InlayHintResolveRequest, WillRenameFiles, WorkspaceSymbolRequest, + InlayHintRequest, InlayHintResolveRequest, RangeFormatting, WillRenameFiles, + WorkspaceSymbolRequest, }, CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams, - DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams, - InlayHint, InlayHintLabel, InlayHintParams, PartialResultParams, Position, Range, - RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, + DocumentFormattingParams, DocumentRangeFormattingParams, FileRename, FormattingOptions, + GotoDefinitionParams, HoverParams, InlayHint, InlayHintLabel, InlayHintParams, + PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem, + TextDocumentPositionParams, WorkDoneProgressParams, }; use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams}; use serde_json::json; @@ -661,6 +663,70 @@ fn main() {} } #[test] +fn test_format_document_range() { + if skip_slow_tests() { + return; + } + + let server = Project::with_fixture( + r#" +//- /Cargo.toml +[package] +name = "foo" +version = "0.0.0" + +//- /src/lib.rs +fn main() { + let unit_offsets_cache = collect(dwarf.units ()) ?; +} +"#, + ) + .with_config(serde_json::json!({ + "rustfmt": { + "overrideCommand": [ "rustfmt", "+nightly", ], + "rangeFormatting": { "enable": true } + }, + })) + .server() + .wait_until_workspace_is_loaded(); + + server.request::<RangeFormatting>( + DocumentRangeFormattingParams { + range: Range { + end: Position { line: 1, character: 0 }, + start: Position { line: 1, character: 0 }, + }, + text_document: server.doc_id("src/lib.rs"), + options: FormattingOptions { + tab_size: 4, + insert_spaces: false, + insert_final_newline: None, + trim_final_newlines: None, + trim_trailing_whitespace: None, + properties: HashMap::new(), + }, + work_done_progress_params: WorkDoneProgressParams::default(), + }, + json!([ + { + "newText": "", + "range": { + "start": { "character": 48, "line": 1 }, + "end": { "character": 50, "line": 1 }, + }, + }, + { + "newText": "", + "range": { + "start": { "character": 53, "line": 1 }, + "end": { "character": 55, "line": 1 }, + }, + } + ]), + ); +} + +#[test] fn test_missing_module_code_action() { if skip_slow_tests() { return; @@ -1086,7 +1152,11 @@ fn resolve_proc_macro() { &AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()), &Default::default(), ); - sysroot.load_workspace(&project_model::SysrootSourceWorkspaceConfig::default_cargo()); + let loaded_sysroot = + sysroot.load_workspace(&project_model::RustSourceWorkspaceConfig::default_cargo()); + if let Some(loaded_sysroot) = loaded_sysroot { + sysroot.set_workspace(loaded_sysroot); + } let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap(); @@ -1372,6 +1442,40 @@ pub fn foo() {} name = "bar" version = "0.0.0" +[dependencies] +foo = { path = "../foo" } + +//- /bar/src/lib.rs +"#, + ) + .root("foo") + .root("bar") + .root("baz") + .with_config(json!({ + "files": { + "exclude": ["foo"] + } + })) + .server() + .wait_until_workspace_is_loaded(); + + server.request::<WorkspaceSymbolRequest>(Default::default(), json!([])); + + let server = Project::with_fixture( + r#" +//- /foo/Cargo.toml +[package] +name = "foo" +version = "0.0.0" + +//- /foo/src/lib.rs +pub fn foo() {} + +//- /bar/Cargo.toml +[package] +name = "bar" +version = "0.0.0" + //- /bar/src/lib.rs pub fn bar() {} @@ -1388,7 +1492,7 @@ version = "0.0.0" .root("baz") .with_config(json!({ "files": { - "excludeDirs": ["foo", "bar"] + "exclude": ["foo", "bar"] } })) .server() diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs index d113bd51278..409be2894fe 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs @@ -43,10 +43,15 @@ impl TestDir { } fs::create_dir_all(&path).unwrap(); - #[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] + #[cfg(any( + target_os = "macos", + target_os = "linux", + target_os = "windows", + target_os = "freebsd" + ))] if symlink { let symlink_path = base.join(format!("{pid}_{cnt}_symlink")); - #[cfg(any(target_os = "macos", target_os = "linux"))] + #[cfg(any(target_os = "macos", target_os = "linux", target_os = "freebsd"))] std::os::unix::fs::symlink(path, &symlink_path).unwrap(); #[cfg(target_os = "windows")] diff --git a/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs index 4ec74c0742a..a35d50b78df 100644 --- a/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs +++ b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs @@ -1,28 +1,25 @@ //! A micro-crate to enhance panic messages with context info. -//! -//! FIXME: upstream to <https://github.com/kriomant/panic-context> ? use std::{cell::RefCell, panic, sync::Once}; -pub fn enter(context: String) -> PanicContext { - static ONCE: Once = Once::new(); - ONCE.call_once(PanicContext::init); - - with_ctx(|ctx| ctx.push(context)); - PanicContext { _priv: () } -} - +/// Dummy for leveraging RAII cleanup to pop frames. #[must_use] pub struct PanicContext { + // prevent arbitrary construction _priv: (), } -impl PanicContext { +impl Drop for PanicContext { + fn drop(&mut self) { + with_ctx(|ctx| assert!(ctx.pop().is_some())); + } +} + +pub fn enter(frame: String) -> PanicContext { #[allow(clippy::print_stderr)] - fn init() { + fn set_hook() { let default_hook = panic::take_hook(); - #[allow(deprecated)] - let hook = move |panic_info: &panic::PanicInfo<'_>| { + panic::set_hook(Box::new(move |panic_info| { with_ctx(|ctx| { if !ctx.is_empty() { eprintln!("Panic context:"); @@ -30,17 +27,16 @@ impl PanicContext { eprintln!("> {frame}\n"); } } - default_hook(panic_info); }); - }; - panic::set_hook(Box::new(hook)); + default_hook(panic_info); + })); } -} -impl Drop for PanicContext { - fn drop(&mut self) { - with_ctx(|ctx| assert!(ctx.pop().is_some())); - } + static SET_HOOK: Once = Once::new(); + SET_HOOK.call_once(set_hook); + + with_ctx(|ctx| ctx.push(frame)); + PanicContext { _priv: () } } fn with_ctx(f: impl FnOnce(&mut Vec<String>)) { diff --git a/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs b/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs index 2ddd7da74c2..9acc1de922a 100644 --- a/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs +++ b/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs @@ -7,9 +7,12 @@ //! The thread pool is implemented entirely using //! the threading utilities in [`crate::thread`]. -use std::sync::{ - atomic::{AtomicUsize, Ordering}, - Arc, +use std::{ + panic::{self, UnwindSafe}, + sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }, }; use crossbeam_channel::{Receiver, Sender}; @@ -25,13 +28,13 @@ pub struct Pool { // so that the channel is actually closed // before we join the worker threads! job_sender: Sender<Job>, - _handles: Vec<JoinHandle>, + _handles: Box<[JoinHandle]>, extant_tasks: Arc<AtomicUsize>, } struct Job { requested_intent: ThreadIntent, - f: Box<dyn FnOnce() + Send + 'static>, + f: Box<dyn FnOnce() + Send + UnwindSafe + 'static>, } impl Pool { @@ -47,6 +50,7 @@ impl Pool { let handle = Builder::new(INITIAL_INTENT) .stack_size(STACK_SIZE) .name("Worker".into()) + .allow_leak(true) .spawn({ let extant_tasks = Arc::clone(&extant_tasks); let job_receiver: Receiver<Job> = job_receiver.clone(); @@ -58,7 +62,8 @@ impl Pool { current_intent = job.requested_intent; } extant_tasks.fetch_add(1, Ordering::SeqCst); - (job.f)(); + // discard the panic, we should've logged the backtrace already + _ = panic::catch_unwind(job.f); extant_tasks.fetch_sub(1, Ordering::SeqCst); } } @@ -68,12 +73,12 @@ impl Pool { handles.push(handle); } - Pool { _handles: handles, extant_tasks, job_sender } + Pool { _handles: handles.into_boxed_slice(), extant_tasks, job_sender } } pub fn spawn<F>(&self, intent: ThreadIntent, f: F) where - F: FnOnce() + Send + 'static, + F: FnOnce() + Send + UnwindSafe + 'static, { let f = Box::new(move || { if cfg!(debug_assertions) { diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 4e2a70d6cd9..bbb8413cbc0 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -241,7 +241,7 @@ RecordFieldList = RecordField = Attr* Visibility? - Name ':' Type + Name ':' Type ('=' Expr)? TupleFieldList = '(' fields:(TupleField (',' TupleField)* ','?)? ')' diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index 291fc646e21..aedf810b794 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -710,52 +710,6 @@ impl ast::Fn { } } -impl Removable for ast::MatchArm { - fn remove(&self) { - if let Some(sibling) = self.syntax().prev_sibling_or_token() { - if sibling.kind() == SyntaxKind::WHITESPACE { - ted::remove(sibling); - } - } - if let Some(sibling) = self.syntax().next_sibling_or_token() { - if sibling.kind() == T![,] { - ted::remove(sibling); - } - } - ted::remove(self.syntax()); - } -} - -impl ast::MatchArmList { - pub fn add_arm(&self, arm: ast::MatchArm) { - normalize_ws_between_braces(self.syntax()); - let mut elements = Vec::new(); - let position = match self.arms().last() { - Some(last_arm) => { - if needs_comma(&last_arm) { - ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA)); - } - Position::after(last_arm.syntax().clone()) - } - None => match self.l_curly_token() { - Some(it) => Position::after(it), - None => Position::last_child_of(self.syntax()), - }, - }; - let indent = IndentLevel::from_node(self.syntax()) + 1; - elements.push(make::tokens::whitespace(&format!("\n{indent}")).into()); - elements.push(arm.syntax().clone().into()); - if needs_comma(&arm) { - ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA)); - } - ted::insert_all(position, elements); - - fn needs_comma(arm: &ast::MatchArm) -> bool { - arm.expr().is_some_and(|e| !e.is_block_like()) && arm.comma_token().is_none() - } - } -} - impl ast::LetStmt { pub fn set_ty(&self, ty: Option<ast::Type>) { match ty { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 69e2a9f9c1b..58c76a456ab 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -1,4 +1,4 @@ -//! Generated by `cargo codegen grammar`, do not edit by hand. +//! Generated by `cargo xtask codegen grammar`, do not edit by hand. #![allow(non_snake_case)] use crate::{ @@ -1539,9 +1539,13 @@ impl ast::HasName for RecordField {} impl ast::HasVisibility for RecordField {} impl RecordField { #[inline] + pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } #[inline] pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) } + #[inline] + pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs index 85d20c2bd8c..df2e9619db1 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs @@ -1,4 +1,4 @@ -//! Generated by `cargo codegen grammar`, do not edit by hand. +//! Generated by `cargo xtask codegen grammar`, do not edit by hand. use crate::{ ast::AstToken, diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index ff027ac5848..9dc2d832530 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -837,7 +837,8 @@ pub fn match_guard(condition: ast::Expr) -> ast::MatchGuard { pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList { let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| { - let needs_comma = arm.expr().is_none_or(|it| !it.is_block_like()); + let needs_comma = + arm.comma_token().is_none() && arm.expr().is_none_or(|it| !it.is_block_like()); let comma = if needs_comma { "," } else { "" }; let arm = arm.syntax(); format_to_acc!(acc, " {arm}{comma}\n") diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs index 28089ffb377..5d33f132ac1 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs @@ -5,7 +5,122 @@ use crate::{ match_ast, AstNode, SyntaxNode, }; +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] +pub enum ExprPrecedence { + // return, break, yield, closures + Jump, + // = += -= *= /= %= &= |= ^= <<= >>= + Assign, + // .. ..= + Range, + // || + LOr, + // && + LAnd, + // == != < > <= >= + Compare, + // | + BitOr, + // ^ + BitXor, + // & + BitAnd, + // << >> + Shift, + // + - + Sum, + // * / % + Product, + // as + Cast, + // unary - * ! & &mut + Prefix, + // paths, loops, function calls, array indexing, field expressions, method calls + Unambiguous, +} + +#[derive(PartialEq, Debug)] +pub enum Fixity { + /// The operator is left-associative + Left, + /// The operator is right-associative + Right, + /// The operator is not associative + None, +} + +pub fn precedence(expr: &ast::Expr) -> ExprPrecedence { + match expr { + Expr::ClosureExpr(closure) => match closure.ret_type() { + None => ExprPrecedence::Jump, + Some(_) => ExprPrecedence::Unambiguous, + }, + + Expr::BreakExpr(_) + | Expr::ContinueExpr(_) + | Expr::ReturnExpr(_) + | Expr::YeetExpr(_) + | Expr::YieldExpr(_) => ExprPrecedence::Jump, + + Expr::RangeExpr(..) => ExprPrecedence::Range, + + Expr::BinExpr(bin_expr) => match bin_expr.op_kind() { + Some(it) => match it { + BinaryOp::LogicOp(logic_op) => match logic_op { + ast::LogicOp::And => ExprPrecedence::LAnd, + ast::LogicOp::Or => ExprPrecedence::LOr, + }, + BinaryOp::ArithOp(arith_op) => match arith_op { + ast::ArithOp::Add | ast::ArithOp::Sub => ExprPrecedence::Sum, + ast::ArithOp::Div | ast::ArithOp::Rem | ast::ArithOp::Mul => { + ExprPrecedence::Product + } + ast::ArithOp::Shl | ast::ArithOp::Shr => ExprPrecedence::Shift, + ast::ArithOp::BitXor => ExprPrecedence::BitXor, + ast::ArithOp::BitOr => ExprPrecedence::BitOr, + ast::ArithOp::BitAnd => ExprPrecedence::BitAnd, + }, + BinaryOp::CmpOp(_) => ExprPrecedence::Compare, + BinaryOp::Assignment { .. } => ExprPrecedence::Assign, + }, + None => ExprPrecedence::Unambiguous, + }, + Expr::CastExpr(_) => ExprPrecedence::Cast, + + Expr::LetExpr(_) | Expr::PrefixExpr(_) | Expr::RefExpr(_) => ExprPrecedence::Prefix, + + Expr::ArrayExpr(_) + | Expr::AsmExpr(_) + | Expr::AwaitExpr(_) + | Expr::BecomeExpr(_) + | Expr::BlockExpr(_) + | Expr::CallExpr(_) + | Expr::FieldExpr(_) + | Expr::ForExpr(_) + | Expr::FormatArgsExpr(_) + | Expr::IfExpr(_) + | Expr::IndexExpr(_) + | Expr::Literal(_) + | Expr::LoopExpr(_) + | Expr::MacroExpr(_) + | Expr::MatchExpr(_) + | Expr::MethodCallExpr(_) + | Expr::OffsetOfExpr(_) + | Expr::ParenExpr(_) + | Expr::PathExpr(_) + | Expr::RecordExpr(_) + | Expr::TryExpr(_) + | Expr::TupleExpr(_) + | Expr::UnderscoreExpr(_) + | Expr::WhileExpr(_) => ExprPrecedence::Unambiguous, + } +} + impl Expr { + pub fn precedence(&self) -> ExprPrecedence { + precedence(self) + } + // Implementation is based on // - https://doc.rust-lang.org/reference/expressions.html#expression-precedence // - https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html @@ -261,7 +376,7 @@ impl Expr { } /// Returns true if self is one of `return`, `break`, `continue` or `yield` with **no associated value**. - fn is_ret_like_with_no_value(&self) -> bool { + pub fn is_ret_like_with_no_value(&self) -> bool { use Expr::*; match self { diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index 866379d940e..613f27c7958 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -17,7 +17,7 @@ use hir_expand::{ tt::{Leaf, TokenTree, TopSubtree, TopSubtreeBuilder, TtElement, TtIter}, FileRange, }; -use intern::Symbol; +use intern::{sym, Symbol}; use rustc_hash::FxHashMap; use span::{Edition, EditionedFileId, FileId, Span}; use stdx::itertools::Itertools; @@ -258,15 +258,7 @@ impl ChangeFixture { let to_id = crates[&to]; let sysroot = crate_graph[to_id].origin.is_lang(); crate_graph - .add_dep( - from_id, - Dependency::with_prelude( - CrateName::new(&to).unwrap(), - to_id, - prelude, - sysroot, - ), - ) + .add_dep(from_id, Dependency::with_prelude(to.clone(), to_id, prelude, sysroot)) .unwrap(); } } @@ -519,6 +511,21 @@ pub fn issue_18898(_attr: TokenStream, input: TokenStream) -> TokenStream { disabled: false, }, ), + ( + r#" +#[proc_macro_attribute] +pub fn disallow_cfg(_attr: TokenStream, input: TokenStream) -> TokenStream { + input +} +"# + .into(), + ProcMacro { + name: Symbol::intern("disallow_cfg"), + kind: ProcMacroKind::Attr, + expander: sync::Arc::new(DisallowCfgProcMacroExpander), + disabled: false, + }, + ), ]) } @@ -873,3 +880,30 @@ impl ProcMacroExpander for Issue18898ProcMacroExpander { }) } } + +// Reads ident type within string quotes, for issue #17479. +#[derive(Debug)] +struct DisallowCfgProcMacroExpander; +impl ProcMacroExpander for DisallowCfgProcMacroExpander { + fn expand( + &self, + subtree: &TopSubtree, + _: Option<&TopSubtree>, + _: &Env, + _: Span, + _: Span, + _: Span, + _: Option<String>, + ) -> Result<TopSubtree, ProcMacroExpansionError> { + for tt in subtree.token_trees().flat_tokens() { + if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt { + if ident.sym == sym::cfg || ident.sym == sym::cfg_attr { + return Err(ProcMacroExpansionError::Panic( + "cfg or cfg_attr found in DisallowCfgProcMacroExpander".to_owned(), + )); + } + } + } + Ok(subtree.clone()) + } +} diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs index 36be9937d3f..e7279fa1f66 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs @@ -396,12 +396,19 @@ pub fn skip_slow_tests() -> bool { if should_skip { eprintln!("ignoring slow test"); } else { - let path = project_root().join("./target/.slow_tests_cookie"); + let path = target_dir().join(".slow_tests_cookie"); fs::write(path, ".").unwrap(); } should_skip } +pub fn target_dir() -> Utf8PathBuf { + match std::env::var("CARGO_TARGET_DIR") { + Ok(target) => Utf8PathBuf::from(target), + Err(_) => project_root().join("target"), + } +} + /// Returns the path to the root directory of `rust-analyzer` project. pub fn project_root() -> Utf8PathBuf { let dir = env!("CARGO_MANIFEST_DIR"); diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 4ed68d18e80..202afebde70 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -647,18 +647,21 @@ pub mod ops { #[lang = "fn"] #[fundamental] + #[rustc_paren_sugar] pub trait Fn<Args: Tuple>: FnMut<Args> { extern "rust-call" fn call(&self, args: Args) -> Self::Output; } #[lang = "fn_mut"] #[fundamental] + #[rustc_paren_sugar] pub trait FnMut<Args: Tuple>: FnOnce<Args> { extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output; } #[lang = "fn_once"] #[fundamental] + #[rustc_paren_sugar] pub trait FnOnce<Args: Tuple> { #[lang = "fn_once_output"] type Output; @@ -736,12 +739,14 @@ pub mod ops { #[lang = "async_fn"] #[fundamental] + #[rustc_paren_sugar] pub trait AsyncFn<Args: Tuple>: AsyncFnMut<Args> { extern "rust-call" fn async_call(&self, args: Args) -> Self::CallRefFuture<'_>; } #[lang = "async_fn_mut"] #[fundamental] + #[rustc_paren_sugar] pub trait AsyncFnMut<Args: Tuple>: AsyncFnOnce<Args> { #[lang = "call_ref_future"] type CallRefFuture<'a>: Future<Output = Self::Output> @@ -752,6 +757,7 @@ pub mod ops { #[lang = "async_fn_once"] #[fundamental] + #[rustc_paren_sugar] pub trait AsyncFnOnce<Args: Tuple> { #[lang = "async_fn_once_output"] type Output; diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs index 0ae8b7baf46..32003341764 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs @@ -280,8 +280,9 @@ impl NotifyActor { return false; } - root == path - || dirs.exclude.iter().chain(&dirs.include).all(|it| it != path) + // We want to filter out subdirectories that are roots themselves, because they will be visited separately. + dirs.exclude.iter().all(|it| it != path) + && (root == path || dirs.include.iter().all(|it| it != path)) }); let files = walkdir.filter_map(|it| it.ok()).filter_map(|entry| { diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs index a26444e9ea2..3feca512e55 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs @@ -100,6 +100,9 @@ pub enum FileState { Exists(u64), /// The file is deleted. Deleted, + /// The file was specifically excluded by the user. We still include excluded files + /// when they're opened (without their contents). + Excluded, } /// Changed file in the [`Vfs`]. @@ -164,10 +167,22 @@ pub enum ChangeKind { Delete, } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FileExcluded { + Yes, + No, +} + impl Vfs { /// Id of the given path if it exists in the `Vfs` and is not deleted. - pub fn file_id(&self, path: &VfsPath) -> Option<FileId> { - self.interner.get(path).filter(|&it| matches!(self.get(it), FileState::Exists(_))) + pub fn file_id(&self, path: &VfsPath) -> Option<(FileId, FileExcluded)> { + let file_id = self.interner.get(path)?; + let file_state = self.get(file_id); + match file_state { + FileState::Exists(_) => Some((file_id, FileExcluded::No)), + FileState::Deleted => None, + FileState::Excluded => Some((file_id, FileExcluded::Yes)), + } } /// File path corresponding to the given `file_id`. @@ -216,6 +231,7 @@ impl Vfs { } Change::Modify(v, new_hash) } + (FileState::Excluded, _) => return false, }; let mut set_data = |change_kind| { @@ -297,6 +313,13 @@ impl Vfs { fn get(&self, file_id: FileId) -> FileState { self.data[file_id.0 as usize] } + + /// We cannot ignore excluded files, because this will lead to errors when the client + /// requests semantic information for them, so we instead mark them specially. + pub fn insert_excluded_file(&mut self, path: VfsPath) { + let file_id = self.alloc_file_id(path); + self.data[file_id.0 as usize] = FileState::Excluded; + } } impl fmt::Debug for Vfs { |
